diff options
Diffstat (limited to 'sys/linux/amd64/arch/atomic.h')
-rw-r--r-- | sys/linux/amd64/arch/atomic.h | 123 |
1 files changed, 123 insertions, 0 deletions
diff --git a/sys/linux/amd64/arch/atomic.h b/sys/linux/amd64/arch/atomic.h new file mode 100644 index 0000000..b3aeed1 --- /dev/null +++ b/sys/linux/amd64/arch/atomic.h @@ -0,0 +1,123 @@ +static inline int +atomic·cas(volatile int *p, int t, int s) +{ + __asm__ __volatile__ ( + "lock ; cmpxchg %3, %1" + : "=a"(t), "=m"(*p) : "a"(t), "r"(s) : "memory" ); + return t; +} + +static inline void +*atomic·casp(volatile void *p, void *t, void *s) +{ + __asm__( "lock ; cmpxchg %3, %1" + : "=a"(t), "=m"(*(void *volatile *)p) + : "a"(t), "r"(s) : "memory" ); + return t; +} + +static inline int +atomic·swap(volatile int *p, int v) +{ + __asm__ __volatile__( + "xchg %0, %1" + : "=r"(v), "=m"(*p) : "0"(v) : "memory" ); + return v; +} + +static inline int +atomic·fetchadd(volatile int *p, int v) +{ + __asm__ __volatile__( + "lock ; xadd %0, %1" + : "=r"(v), "=m"(*p) : "0"(v) : "memory" ); + return v; +} + +static inline void +atomic·and(volatile int *p, int v) +{ + __asm__ __volatile__( + "lock ; and %1, %0" + : "=m"(*p) : "r"(v) : "memory" ); +} + +static inline void +atomic·or(volatile int *p, int v) +{ + __asm__ __volatile__( + "lock ; or %1, %0" + : "=m"(*p) : "r"(v) : "memory" ); +} + +static inline void +atomic·and64(volatile uint64 *p, uint64 v) +{ + __asm__ __volatile( + "lock ; and %1, %0" + : "=m"(*p) : "r"(v) : "memory" ); +} + +static inline void +atomic·or64(volatile uint64 *p, uint64 v) +{ + __asm__ __volatile__( + "lock ; or %1, %0" + : "=m"(*p) : "r"(v) : "memory" ); +} + +static inline void +atomic·inc(volatile int *p) +{ + __asm__ __volatile__( + "lock ; incl %0" + : "=m"(*p) : "m"(*p) : "memory" ); +} + +static inline void +atomic·dec(volatile int *p) +{ + __asm__ __volatile__( + "lock ; decl %0" + : "=m"(*p) : "m"(*p) : "memory" ); +} + +static inline void +atomic·store(volatile int *p, int x) +{ + __asm__ __volatile__( + "mov %1, %0 ; lock ; orl $0,(%%rsp)" + : "=m"(*p) : "r"(x) : "memory" ); +} + +static inline void +atomic·barrier() +{ + __asm__ __volatile__( "" : : : "memory" ); +} + +static inline void +atomic·spin() +{ + __asm__ __volatile__( "pause" : : : "memory" ); +} + +static inline void +atomic·crash() +{ + __asm__ __volatile__( "hlt" : : : "memory" ); +} + +static inline int +atomic·ctz64(uint64 x) +{ + __asm__( "bsf %1,%0" : "=r"(x) : "r"(x) ); + return x; +} + +static inline int +atomic·clz64(uint64 x) +{ + __asm__( "bsr %1,%0 ; xor $63,%0" : "=r"(x) : "r"(x) ); + return x; +} |