mm_event_dispatch_advance_epoch(struct mm_event_dispatch *dispatch) { ENTER(); uint32_t epoch = mm_memory_load(dispatch->reclaim_epoch); bool rc = mm_event_dispatch_check_epoch(dispatch, epoch); if (rc) { mm_memory_fence(); // TODO: load_store fence mm_memory_store(dispatch->reclaim_epoch, epoch + 1); DEBUG("advance epoch %u", epoch + 1); } LEAVE(); return rc; }
mm_thread_barrier_wait(struct mm_thread_barrier *const barrier, struct mm_thread_barrier_local *local) { uint32_t sense = ~local->sense; if (mm_atomic_uint32_dec_and_test(&barrier->value) == 0) { mm_memory_store(barrier->value, barrier->count); mm_memory_store_fence(); mm_memory_store(barrier->sense, sense); } else { mm_memory_fence(); // TODO: atomic_load fence while (mm_memory_load(barrier->sense) != sense) mm_cpu_backoff(); } local->sense = sense; }