blob: b653d24bee1caa7bef5c383e0b5557de93cf67d9 [file] [log] [blame]
Furquan Shaikh2af76f42014-04-28 16:39:40 -07001/*
2 * Based on arch/arm/include/asm/barrier.h
3 *
4 * Copyright (C) 2012 ARM Ltd.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
Furquan Shaikh2af76f42014-04-28 16:39:40 -070014 */
15#ifndef __ASM_ARM_BARRIER_H
16#define __ASM_ARM_BARRIER_H
17
18#ifndef __ASSEMBLY__
19
Aaron Durbin9ebddf22014-09-06 01:10:02 -050020#define sevl() asm volatile("sevl" : : : "memory")
Furquan Shaikh2af76f42014-04-28 16:39:40 -070021#define sev() asm volatile("sev" : : : "memory")
22#define wfe() asm volatile("wfe" : : : "memory")
23#define wfi() asm volatile("wfi" : : : "memory")
24
25#define isb() asm volatile("isb" : : : "memory")
26#define dsb() asm volatile("dsb sy" : : : "memory")
Aaron Durbin9ebddf22014-09-06 01:10:02 -050027#define dmb() asm volatile("dmb sy" : : : "memory")
Furquan Shaikh2af76f42014-04-28 16:39:40 -070028
29#define mb() dsb()
30#define rmb() asm volatile("dsb ld" : : : "memory")
31#define wmb() asm volatile("dsb st" : : : "memory")
32
Aaron Durbin9ebddf22014-09-06 01:10:02 -050033#if IS_ENABLED(CONFIG_SMP)
34#define barrier() __asm__ __volatile__("": : :"memory")
Furquan Shaikh2af76f42014-04-28 16:39:40 -070035#endif
36
Furquan Shaikh2af76f42014-04-28 16:39:40 -070037#define nop() asm volatile("nop");
38
Aaron Durbin9ebddf22014-09-06 01:10:02 -050039#define force_read(x) (*(volatile typeof(x) *)&(x))
40
41#define load_acquire(p) \
42({ \
43 typeof(*p) ___p1; \
44 switch (sizeof(*p)) { \
45 case 4: \
46 asm volatile ("ldar %w0, %1" \
47 : "=r" (___p1) : "Q" (*p) : "memory"); \
48 break; \
49 case 8: \
50 asm volatile ("ldar %0, %1" \
51 : "=r" (___p1) : "Q" (*p) : "memory"); \
52 break; \
53 } \
54 ___p1; \
55})
56
57#define store_release(p, v) \
58do { \
59 switch (sizeof(*p)) { \
60 case 4: \
61 asm volatile ("stlr %w1, %0" \
62 : "=Q" (*p) : "r" (v) : "memory"); \
63 break; \
64 case 8: \
65 asm volatile ("stlr %1, %0" \
66 : "=Q" (*p) : "r" (v) : "memory"); \
67 break; \
68 } \
69} while (0)
70
71#define load_acquire_exclusive(p) \
72({ \
73 typeof(*p) ___p1; \
74 switch (sizeof(*p)) { \
75 case 4: \
76 asm volatile ("ldaxr %w0, %1" \
77 : "=r" (___p1) : "Q" (*p) : "memory"); \
78 break; \
79 case 8: \
80 asm volatile ("ldaxr %0, %1" \
81 : "=r" (___p1) : "Q" (*p) : "memory"); \
82 break; \
83 } \
84 ___p1; \
85})
86
87/* Returns 1 on success. */
88#define store_release_exclusive(p, v) \
89({ \
90 int ret; \
91 switch (sizeof(*p)) { \
92 case 4: \
93 asm volatile ("stlxr %w0, %w2, %1" \
94 : "=&r" (ret), "=Q" (*p) : "r" (v) \
95 : "memory"); \
96 break; \
97 case 8: \
98 asm volatile ("stlxr %w0, %2, %1" \
99 : "=&r" (ret), "=Q" (*p) : "r" (v) \
100 : "memory"); \
101 break; \
102 } \
103 !ret; \
104})
105
Furquan Shaikh2af76f42014-04-28 16:39:40 -0700106#endif /* __ASSEMBLY__ */
107
108#endif /* __ASM_ARM_BARRIER_H */