blob: 63703a7871d60896773cfa60411f2c5b28531bb8 [file] [log] [blame]
Angel Pons32859fc2020-04-02 23:48:27 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Stefan Reinauer8f2c6162010-04-06 21:50:21 +00002
Eric Biedermanc84c1902004-10-14 20:13:01 +00003#ifndef CPU_X86_CACHE
4#define CPU_X86_CACHE
5
Aaron Durbin029aaf62013-10-10 12:41:49 -05006#include <cpu/x86/cr.h>
7
8#define CR0_CacheDisable (CR0_CD)
9#define CR0_NoWriteThrough (CR0_NW)
Patrick Georgi05e740f2012-03-31 12:52:21 +020010
Arthur Heymansa6a2f932019-11-25 19:58:36 +010011#define CPUID_FEATURE_CLFLUSH_BIT 19
12
Patrick Georgi05e740f2012-03-31 12:52:21 +020013#if !defined(__ASSEMBLER__)
14
Arthur Heymans3134a812019-11-25 12:20:01 +010015#include <stdbool.h>
16
Rudolf Marek417e66b2010-05-16 22:32:58 +000017static inline void wbinvd(void)
18{
19 asm volatile ("wbinvd" ::: "memory");
20}
21
Eric Biedermanc84c1902004-10-14 20:13:01 +000022static inline void invd(void)
23{
24 asm volatile("invd" ::: "memory");
25}
Stefan Reinauer8f2c6162010-04-06 21:50:21 +000026
Marshall Dawson1731a332017-12-13 12:04:14 -070027static inline void clflush(void *addr)
28{
29 asm volatile ("clflush (%0)"::"r" (addr));
30}
31
Arthur Heymans3134a812019-11-25 12:20:01 +010032bool clflush_supported(void);
33
Aaron Durbin75a62e72018-09-13 02:10:45 -060034/* The following functions require the __always_inline due to AMD
Scott Duplichan78301d02010-09-17 21:38:40 +000035 * function STOP_CAR_AND_CPU that disables cache as
Elyes HAOUAS918535a2016-07-28 21:25:21 +020036 * RAM, the cache as RAM stack can no longer be used. Called
Scott Duplichan78301d02010-09-17 21:38:40 +000037 * functions must be inlined to avoid stack usage. Also, the
38 * compiler must keep local variables register based and not
39 * allocated them from the stack. With gcc 4.5.0, some functions
40 * declared as inline are not being inlined. This patch forces
41 * these functions to always be inlined by adding the qualifier
Aaron Durbin75a62e72018-09-13 02:10:45 -060042 * __always_inline to their declaration.
Scott Duplichan78301d02010-09-17 21:38:40 +000043 */
Aaron Durbin75a62e72018-09-13 02:10:45 -060044static __always_inline void enable_cache(void)
Eric Biedermanc84c1902004-10-14 20:13:01 +000045{
Elyes HAOUAS63f98f22019-06-26 20:17:50 +020046 CRx_TYPE cr0;
Eric Biedermanc84c1902004-10-14 20:13:01 +000047 cr0 = read_cr0();
Aaron Durbin029aaf62013-10-10 12:41:49 -050048 cr0 &= ~(CR0_CD | CR0_NW);
Eric Biedermanc84c1902004-10-14 20:13:01 +000049 write_cr0(cr0);
50}
51
Aaron Durbin75a62e72018-09-13 02:10:45 -060052static __always_inline void disable_cache(void)
Eric Biedermanc84c1902004-10-14 20:13:01 +000053{
54 /* Disable and write back the cache */
Elyes HAOUAS63f98f22019-06-26 20:17:50 +020055 CRx_TYPE cr0;
Eric Biedermanc84c1902004-10-14 20:13:01 +000056 cr0 = read_cr0();
Aaron Durbin029aaf62013-10-10 12:41:49 -050057 cr0 |= CR0_CD;
Eric Biedermanc84c1902004-10-14 20:13:01 +000058 wbinvd();
59 write_cr0(cr0);
60 wbinvd();
61}
62
Patrick Georgi05e740f2012-03-31 12:52:21 +020063#endif /* !__ASSEMBLER__ */
Eric Biedermanc84c1902004-10-14 20:13:01 +000064#endif /* CPU_X86_CACHE */