aboutsummaryrefslogtreecommitdiff
path: root/tools/virtio/asm/barrier.h
blob: 468435ed64e6b370d028fc08a21560dca32584f9 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
/* SPDX-License-Identifier: GPL-2.0 */
#include <stdlib.h>
#if defined(__i386__) || defined(__x86_64__)
#define barrier() asm volatile("" ::: "memory")
#define virt_mb() __sync_synchronize()
#define virt_rmb() barrier()
#define virt_wmb() barrier()
/* Atomic store should be enough, but gcc generates worse code in that case. */
#define virt_store_mb(var, value)  do { \
	typeof(var) virt_store_mb_value = (value); \
	__atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \
			  __ATOMIC_SEQ_CST); \
	barrier(); \
} while (0);
/* Weak barriers should be used. If not - it's a bug */
# define mb() abort()
# define dma_rmb() abort()
# define dma_wmb() abort()
#elif defined(__aarch64__)
#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
#define virt_mb() __sync_synchronize()
#define virt_rmb() dmb(ishld)
#define virt_wmb() dmb(ishst)
#define virt_store_mb(var, value)  do { WRITE_ONCE(var, value); dmb(ish); } while (0)
/* Weak barriers should be used. If not - it's a bug */
# define mb() abort()
# define dma_rmb() abort()
# define dma_wmb() abort()
#else
#error Please fill in barrier macros
#endif