| #if defined(__i386__) || defined(__x86_64__) | |
| #define barrier() asm volatile("" ::: "memory") | |
| #define mb() __sync_synchronize() | |
| #define smp_mb() mb() | |
| # define smp_rmb() barrier() | |
| # define smp_wmb() barrier() | |
| /* Weak barriers should be used. If not - it's a bug */ | |
| # define rmb() abort() | |
| # define wmb() abort() | |
| #else | |
| #error Please fill in barrier macros | |
| #endif | |