57 rte_atomic_store_explicit(&me->locked, 1, rte_memory_order_relaxed);
58 rte_atomic_store_explicit(&me->next, NULL, rte_memory_order_relaxed);
66 prev = rte_atomic_exchange_explicit(msl, me, rte_memory_order_acq_rel);
67 if (
likely(prev == NULL)) {
80 rte_atomic_store_explicit(&prev->next, me, rte_memory_order_release);
86 __rte_atomic_thread_fence(rte_memory_order_acq_rel);
107 if (
likely(rte_atomic_load_explicit(&me->next, rte_memory_order_relaxed) == NULL)) {
109 rte_mcslock_t *save_me = rte_atomic_load_explicit(&me, rte_memory_order_relaxed);
112 if (
likely(rte_atomic_compare_exchange_strong_explicit(msl, &save_me, NULL,
113 rte_memory_order_release, rte_memory_order_relaxed)))
120 __rte_atomic_thread_fence(rte_memory_order_acquire);
124 RTE_ATOMIC(uintptr_t) *next;
125 next = (__rte_atomic uintptr_t *)&me->next;
126 RTE_WAIT_UNTIL_MASKED(next, UINTPTR_MAX, !=, 0, rte_memory_order_relaxed);
130 rte_atomic_store_explicit(&me->next->locked, 0, rte_memory_order_release);