David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright IBM Corp. 1999, 2009 |
| 3 | * |
| 4 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> |
| 5 | */ |
| 6 | |
| 7 | #ifndef __ASM_BARRIER_H |
| 8 | #define __ASM_BARRIER_H |
| 9 | |
| 10 | /* |
| 11 | * Force strict CPU ordering. |
| 12 | * And yes, this is required on UP too when we're talking |
| 13 | * to devices. |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 14 | */ |
| 15 | |
Heiko Carstens | e5b8d75 | 2012-05-14 12:41:54 +0200 | [diff] [blame] | 16 | #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES |
Heiko Carstens | e06ef37 | 2013-01-30 13:56:14 +0100 | [diff] [blame] | 17 | /* Fast-BCR without checkpoint synchronization */ |
Heiko Carstens | 4423028 | 2014-09-08 08:20:43 +0200 | [diff] [blame] | 18 | #define __ASM_BARRIER "bcr 14,0\n" |
Heiko Carstens | e5b8d75 | 2012-05-14 12:41:54 +0200 | [diff] [blame] | 19 | #else |
Heiko Carstens | 4423028 | 2014-09-08 08:20:43 +0200 | [diff] [blame] | 20 | #define __ASM_BARRIER "bcr 15,0\n" |
Heiko Carstens | e5b8d75 | 2012-05-14 12:41:54 +0200 | [diff] [blame] | 21 | #endif |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 22 | |
Heiko Carstens | 4423028 | 2014-09-08 08:20:43 +0200 | [diff] [blame] | 23 | #define mb() do { asm volatile(__ASM_BARRIER : : : "memory"); } while (0) |
| 24 | |
Christian Borntraeger | 1afc82a | 2015-09-11 16:32:24 +0200 | [diff] [blame] | 25 | #define rmb() barrier() |
| 26 | #define wmb() barrier() |
| 27 | #define dma_rmb() mb() |
| 28 | #define dma_wmb() mb() |
Michael S. Tsirkin | 82b4449 | 2015-12-27 15:04:42 +0200 | [diff] [blame] | 29 | #define __smp_mb() mb() |
| 30 | #define __smp_rmb() rmb() |
| 31 | #define __smp_wmb() wmb() |
Alexander Duyck | 8a44971 | 2014-12-11 15:01:55 -0800 | [diff] [blame] | 32 | |
Michael S. Tsirkin | 82b4449 | 2015-12-27 15:04:42 +0200 | [diff] [blame] | 33 | #define __smp_store_release(p, v) \ |
Peter Zijlstra | 47933ad | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 34 | do { \ |
| 35 | compiletime_assert_atomic_type(*p); \ |
| 36 | barrier(); \ |
Andrey Konovalov | 76695af | 2015-08-02 17:11:04 +0200 | [diff] [blame] | 37 | WRITE_ONCE(*p, v); \ |
Peter Zijlstra | 47933ad | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 38 | } while (0) |
| 39 | |
Michael S. Tsirkin | 82b4449 | 2015-12-27 15:04:42 +0200 | [diff] [blame] | 40 | #define __smp_load_acquire(p) \ |
Peter Zijlstra | 47933ad | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 41 | ({ \ |
Andrey Konovalov | 76695af | 2015-08-02 17:11:04 +0200 | [diff] [blame] | 42 | typeof(*p) ___p1 = READ_ONCE(*p); \ |
Peter Zijlstra | 47933ad | 2013-11-06 14:57:36 +0100 | [diff] [blame] | 43 | compiletime_assert_atomic_type(*p); \ |
| 44 | barrier(); \ |
| 45 | ___p1; \ |
| 46 | }) |
| 47 | |
Michael S. Tsirkin | 779a6a3 | 2016-01-10 13:19:38 +0200 | [diff] [blame] | 48 | #define __smp_mb__before_atomic() barrier() |
| 49 | #define __smp_mb__after_atomic() barrier() |
| 50 | |
Michael S. Tsirkin | 21535aa | 2015-12-21 09:22:18 +0200 | [diff] [blame] | 51 | #include <asm-generic/barrier.h> |
| 52 | |
David Howells | a0616cd | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 53 | #endif /* __ASM_BARRIER_H */ |