#endif /* __linux__ */
#if !defined (__i386__) && !defined (__ia64__) && !defined(__x86_64__) && \
- !defined(__PPC__) && !defined(__PPC64__) && !defined(__s390x__)
+ !defined(__PPC__) && !defined(__PPC64__) && !defined(__s390x__) && \
+ !defined(__aarch64__)
#error UNDEFINED ARCH
#endif
: "=&r" (tmp), "+m" (v)
: "b" (v)
: "cc");
+#elif defined(__aarch64__)
+ __atomic_fetch_add(v, 1, __ATOMIC_ACQ_REL);
#else /* !__ia64__ */
__asm__ __volatile__ (
"lock;" "incl %0"
: "=&r" (tmp), "+m" (v)
: "b" (v)
: "cc");
+#elif defined(__aarch64__)
+ __atomic_fetch_add(v, -1, __ATOMIC_ACQ_REL);
#else /* !__ia64__ */
__asm__ __volatile__ (
"lock;" "decl %0"
: "=&r" (current_value), "=m" (*v)
: "r" (v), "r" (match_value), "r" (new_value), "m" (*v)
: "cc", "memory");
+#elif defined(__aarch64__)
+ current_value = match_value;
+ __atomic_compare_exchange_n(v, ¤t_value, new_value, 1,
+ __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
#else
__asm__ __volatile__ (
"lock; cmpxchgl %1, %2"