1 #ifndef _ASM_METAG_BARRIER_H
2 #define _ASM_METAG_BARRIER_H
4 #include <asm/metag_mem.h>
6 #define nop() asm volatile ("NOP")
8 #define rmb() barrier()
10 #ifdef CONFIG_METAG_META21
12 /* HTP and above have a system event to fence writes */
13 static inline void wr_fence(void)
15 volatile int *flushptr = (volatile int *) LINSYSEVENT_WR_FENCE;
20 #else /* CONFIG_METAG_META21 */
23 * ATP doesn't have system event to fence writes, so it is necessary to flush
24 * the processor write queues as well as possibly the write combiner (depending
25 * on the page being written).
26 * To ensure the write queues are flushed we do 4 writes to a system event
27 * register (in this case write combiner flush) which will also flush the write
30 static inline void wr_fence(void)
32 volatile int *flushptr = (volatile int *) LINSYSEVENT_WR_COMBINE_FLUSH;
40 #endif /* !CONFIG_METAG_META21 */
42 static inline void wmb(void)
44 /* flush writes through the write combiner */
48 #define read_barrier_depends() do { } while (0)
51 #define fence() do { } while (0)
52 #define smp_mb() barrier()
53 #define smp_rmb() barrier()
54 #define smp_wmb() barrier()
57 #ifdef CONFIG_METAG_SMP_WRITE_REORDERING
59 * Write to the atomic memory unlock system event register (command 0). This is
60 * needed before a write to shared memory in a critical section, to prevent
61 * external reordering of writes before the fence on other threads with writes
62 * after the fence on this thread (and to prevent the ensuing cache-memory
63 * incoherence). It is therefore ineffective if used after and on the same
66 static inline void fence(void)
68 volatile int *flushptr = (volatile int *) LINSYSEVENT_WR_ATOMIC_UNLOCK;
72 #define smp_mb() fence()
73 #define smp_rmb() fence()
74 #define smp_wmb() barrier()
76 #define fence() do { } while (0)
77 #define smp_mb() barrier()
78 #define smp_rmb() barrier()
79 #define smp_wmb() barrier()
82 #define smp_read_barrier_depends() do { } while (0)
83 #define set_mb(var, value) do { var = value; smp_mb(); } while (0)
85 #define smp_store_release(p, v) \
87 compiletime_assert_atomic_type(*p); \
89 ACCESS_ONCE(*p) = (v); \
92 #define smp_load_acquire(p) \
94 typeof(*p) ___p1 = ACCESS_ONCE(*p); \
95 compiletime_assert_atomic_type(*p); \
100 #endif /* _ASM_METAG_BARRIER_H */