56 __atomic_store_n(&me->locked, 1, __ATOMIC_RELAXED);
57 __atomic_store_n(&me->next, NULL, __ATOMIC_RELAXED);
65 prev = __atomic_exchange_n(msl, me, __ATOMIC_ACQ_REL);
66 if (
likely(prev == NULL)) {
79 __atomic_store_n(&prev->next, me, __ATOMIC_RELEASE);
85 __atomic_thread_fence(__ATOMIC_ACQ_REL);
106 if (
likely(__atomic_load_n(&me->next, __ATOMIC_RELAXED) == NULL)) {
108 rte_mcslock_t *save_me = __atomic_load_n(&me, __ATOMIC_RELAXED);
111 if (
likely(__atomic_compare_exchange_n(msl, &save_me, NULL, 0,
112 __ATOMIC_RELEASE, __ATOMIC_RELAXED)))
119 __atomic_thread_fence(__ATOMIC_ACQUIRE);
124 next = (uintptr_t *)&me->next;
125 RTE_WAIT_UNTIL_MASKED(next, UINTPTR_MAX, !=, 0,
130 __atomic_store_n(&me->next->locked, 0, __ATOMIC_RELEASE);