blob: db35ef14ebfce7a95364720b50d5fd0981934356 [file] [log] [blame]
Eric Biedermanc84c1902004-10-14 20:13:01 +00001#ifndef CPU_X86_MSR_H
2#define CPU_X86_MSR_H
3
Stefan Reinauer35b6bbb2010-03-28 21:26:54 +00004#if defined(__ROMCC__)
Eric Biedermanc84c1902004-10-14 20:13:01 +00005
6typedef __builtin_msr_t msr_t;
7
8static msr_t rdmsr(unsigned long index)
9{
10 return __builtin_rdmsr(index);
11}
12
13static void wrmsr(unsigned long index, msr_t msr)
14{
15 __builtin_wrmsr(index, msr.lo, msr.hi);
16}
17
arch import user (historical)6ca76362005-07-06 17:17:25 +000018#else
Eric Biedermanc84c1902004-10-14 20:13:01 +000019
Stefan Reinauer14e22772010-04-27 06:56:47 +000020typedef struct msr_struct
Eric Biedermanc84c1902004-10-14 20:13:01 +000021{
22 unsigned lo;
23 unsigned hi;
24} msr_t;
25
Stefan Reinauer5ff7c132011-10-31 12:56:45 -070026typedef struct msrinit_struct
Edwin Beasantf333ba02010-06-10 15:24:57 +000027{
28 unsigned index;
29 msr_t msr;
30} msrinit_t;
31
Lee Leahyae738ac2016-07-24 08:03:37 -070032#if IS_ENABLED(CONFIG_SOC_SETS_MSRS)
33msr_t soc_msr_read(unsigned index);
34void soc_msr_write(unsigned index, msr_t msr);
35
36/* Handle MSR references in the other source code */
37static inline __attribute__((always_inline)) msr_t rdmsr(unsigned index)
38{
39 return soc_msr_read(index);
40}
41
42static inline __attribute__((always_inline)) void wrmsr(unsigned index,
43 msr_t msr)
44{
45 soc_msr_write(index, msr);
46}
47#else /* CONFIG_SOC_SETS_MSRS */
48
Scott Duplichan78301d02010-09-17 21:38:40 +000049/* The following functions require the always_inline due to AMD
50 * function STOP_CAR_AND_CPU that disables cache as
Elyes HAOUAS918535a2016-07-28 21:25:21 +020051 * RAM, the cache as RAM stack can no longer be used. Called
Scott Duplichan78301d02010-09-17 21:38:40 +000052 * functions must be inlined to avoid stack usage. Also, the
53 * compiler must keep local variables register based and not
54 * allocated them from the stack. With gcc 4.5.0, some functions
55 * declared as inline are not being inlined. This patch forces
56 * these functions to always be inlined by adding the qualifier
57 * __attribute__((always_inline)) to their declaration.
58 */
59static inline __attribute__((always_inline)) msr_t rdmsr(unsigned index)
Eric Biedermanc84c1902004-10-14 20:13:01 +000060{
61 msr_t result;
62 __asm__ __volatile__ (
63 "rdmsr"
64 : "=a" (result.lo), "=d" (result.hi)
65 : "c" (index)
66 );
67 return result;
68}
69
Lee Leahyae738ac2016-07-24 08:03:37 -070070static inline __attribute__((always_inline)) void wrmsr(unsigned index,
71 msr_t msr)
Eric Biedermanc84c1902004-10-14 20:13:01 +000072{
73 __asm__ __volatile__ (
74 "wrmsr"
75 : /* No outputs */
76 : "c" (index), "a" (msr.lo), "d" (msr.hi)
77 );
78}
79
Lee Leahyae738ac2016-07-24 08:03:37 -070080#endif /* CONFIG_SOC_SETS_MSRS */
Myles Watson1d6d45e2009-11-06 17:02:51 +000081#endif /* __ROMCC__ */
Eric Biedermanc84c1902004-10-14 20:13:01 +000082
Eric Biedermanc84c1902004-10-14 20:13:01 +000083#endif /* CPU_X86_MSR_H */