25 #ifndef __MOTR_LIB_USER_X86_64_ATOMIC_H__ 26 #define __MOTR_LIB_USER_X86_64_ATOMIC_H__ 68 asm volatile(
"lock incq %0" 82 asm volatile(
"lock decq %0" 92 asm volatile(
"lock addq %1,%0" 102 asm volatile(
"lock subq %1,%0" 121 asm volatile(
"lock xaddq %0, %1;" 122 :
"+r" (delta),
"+m" (a->
a_value)
124 return delta + result;
142 unsigned char result;
144 asm volatile(
"lock incq %0; sete %1" 145 :
"=m" (a->
a_value),
"=qm" (result)
146 :
"m" (a->
a_value) :
"memory");
152 unsigned char result;
154 asm volatile(
"lock decq %0; sete %1" 155 :
"=m" (a->
a_value),
"=qm" (result)
156 :
"m" (a->
a_value) :
"memory");
166 asm volatile(
"lock cmpxchgq %2,%1" 167 :
"=a" (
val),
"+m" (*(
volatile long *)(loc))
168 :
"r" (newval),
"0" (oldval)
170 return val == oldval;
175 asm volatile(
"mfence":::
"memory");
static void m0_atomic64_dec(struct m0_atomic64 *a)
static bool m0_atomic64_cas(int64_t *loc, int64_t oldval, int64_t newval)
static bool m0_atomic64_dec_and_test(struct m0_atomic64 *a)
static void m0_atomic64_set(struct m0_atomic64 *a, int64_t num)
static bool m0_atomic64_inc_and_test(struct m0_atomic64 *a)
static void m0_atomic64_sub(struct m0_atomic64 *a, int64_t num)
static void m0_atomic64_inc(struct m0_atomic64 *a)
static int64_t m0_atomic64_get(const struct m0_atomic64 *a)
static int64_t m0_atomic64_add_return(struct m0_atomic64 *a, int64_t delta)
static int64_t m0_atomic64_sub_return(struct m0_atomic64 *a, int64_t delta)
static void m0_atomic64_add(struct m0_atomic64 *a, int64_t num)