AE_FORCEINLINE void fence(memory_order order) { // Non-specialized arch, use heavier memory barriers everywhere just in case :-( switch (order) { case memory_order_relaxed: break; case memory_order_acquire: _ReadBarrier(); AeLiteSync(); _ReadBarrier(); break; case memory_order_release: _WriteBarrier(); AeLiteSync(); _WriteBarrier(); break; case memory_order_acq_rel: _ReadWriteBarrier(); AeLiteSync(); _ReadWriteBarrier(); break; case memory_order_seq_cst: _ReadWriteBarrier(); AeFullSync(); _ReadWriteBarrier(); break; default: assert(false); } }
BOOL WINAPI StSynchronizationEvent_IsSet ( __in PST_SYNCHRONIZATION_EVENT Event ) { LONG State = Event->State; _ReadBarrier(); return (State != 0); }
ULONG WINAPI StBarrier_GetPartnersRemaining ( __in PST_BARRIER Barrier ) { ULONG State = Barrier->PhaseState->State; _ReadBarrier(); return (State >> PARTNERS_SHIFT) - (State & ARRIVED_MASK); }
ULONG WINAPI StBarrier_GetPartners ( __in PST_BARRIER Barrier ) { LONG State = Barrier->PhaseState->State; _ReadBarrier(); return (State >> PARTNERS_SHIFT); }
ULONGLONG WINAPI StBarrier_GetPhaseNumber ( __in PST_BARRIER Barrier ) { ULONGLONG PhaseNumber = Barrier->PhaseNumber; _ReadBarrier(); return PhaseNumber; }
void bar() { _ReadWriteBarrier(); // expected-warning {{is deprecated: use other intrinsics or C++11 atomics instead}} _ReadBarrier(); // expected-warning {{is deprecated: use other intrinsics or C++11 atomics instead}} _WriteBarrier(); // expected-warning {{is deprecated: use other intrinsics or C++11 atomics instead}} // FIXME: It'd be handy if we didn't have to hardcode the line number in // intrin.h. // [email protected]:754 {{declared here}} // [email protected]:759 {{declared here}} // [email protected]:764 {{declared here}} }
AE_FORCEINLINE void compiler_fence(memory_order order) { switch (order) { case memory_order_relaxed: break; case memory_order_acquire: _ReadBarrier(); break; case memory_order_release: _WriteBarrier(); break; case memory_order_acq_rel: _ReadWriteBarrier(); break; case memory_order_seq_cst: _ReadWriteBarrier(); break; default: assert(false); } }
// Invalidates an instruction cache for the specified region. _Use_decl_annotations_ static void MmonpInvalidateInstructionCache(void *base_address, SIZE_T length) { #if defined(_AMD64_) UNREFERENCED_PARAMETER(base_address); UNREFERENCED_PARAMETER(length); __faststorefence(); #elif defined(_X86_) UNREFERENCED_PARAMETER(base_address); UNREFERENCED_PARAMETER(length); _ReadBarrier(); #endif }