25 #ifndef __MOTR_LIB_USER_AARCH64_ATOMIC_H__ 26 #define __MOTR_LIB_USER_AARCH64_ATOMIC_H__ 93 asm volatile(
"// atomic64_add \n" \
94 " prfm pstl1strm, %2\n" \
97 " stxr %w1, %0, %2\n" \
99 :
"=&r" (result),
"=&r" (tmp),
"+Q" (a->
a_value) \
111 asm volatile(
"// atomic64_sub \n" \
112 " prfm pstl1strm, %2\n" \
114 " sub %0, %0, %3\n" \
115 " stxr %w1, %0, %2\n" \
117 :
"=&r" (result),
"=&r" (tmp),
"+Q" (a->
a_value) \
135 asm volatile(
"// atomic64_add_return \n" \
136 " prfm pstl1strm, %2\n" \
138 " add %0, %0, %3\n" \
139 " stlxr %w1, %0, %2\n" \
142 :
"=&r" (result),
"=&r" (tmp),
"+Q" (a->
a_value) \
161 asm volatile(
"// atomic64_sub_return \n" \
162 " prfm pstl1strm, %2\n" \
164 " sub %0, %0, %3\n" \
165 " stlxr %w1, %0, %2\n" \
168 :
"=&r" (result),
"=&r" (tmp),
"+Q" (a->
a_value) \
215 return __atomic_compare_exchange_n(loc, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
220 asm volatile(
"dsb sy":::
"memory");
static void m0_atomic64_dec(struct m0_atomic64 *a)
static void m0_atomic64_inc(struct m0_atomic64 *a)
static bool m0_atomic64_inc_and_test(struct m0_atomic64 *a)
static void m0_atomic64_add(struct m0_atomic64 *a, int64_t num)
static int64_t m0_atomic64_add_return(struct m0_atomic64 *a, int64_t delta)
static int64_t m0_atomic64_get(const struct m0_atomic64 *a)
static void m0_atomic64_sub(struct m0_atomic64 *a, int64_t num)
static bool m0_atomic64_dec_and_test(struct m0_atomic64 *a)
static bool m0_atomic64_cas(int64_t *loc, int64_t oldval, int64_t newval)
static int64_t m0_atomic64_sub_return(struct m0_atomic64 *a, int64_t delta)
static void m0_atomic64_set(struct m0_atomic64 *a, int64_t num)