RTDECL(int) RTSemRWDestroy(RTSEMRW hRWSem) { /* * Validate input. */ RTSEMRWINTERNAL *pThis = hRWSem; if (pThis == NIL_RTSEMRW) return VINF_SUCCESS; AssertPtrReturn(pThis, VERR_INVALID_HANDLE); AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, VERR_INVALID_HANDLE); Assert(!(ASMAtomicReadU64(&pThis->u64State) & (RTSEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK))); /* * Invalidate the object and free up the resources. */ AssertReturn(ASMAtomicCmpXchgU32(&pThis->u32Magic, ~RTSEMRW_MAGIC, RTSEMRW_MAGIC), VERR_INVALID_HANDLE); RTSEMEVENTMULTI hEvtRead; ASMAtomicXchgHandle(&pThis->hEvtRead, NIL_RTSEMEVENTMULTI, &hEvtRead); int rc = RTSemEventMultiDestroy(hEvtRead); AssertRC(rc); RTSEMEVENT hEvtWrite; ASMAtomicXchgHandle(&pThis->hEvtWrite, NIL_RTSEMEVENT, &hEvtWrite); rc = RTSemEventDestroy(hEvtWrite); AssertRC(rc); #ifdef RTSEMRW_STRICT RTLockValidatorRecSharedDelete(&pThis->ValidatorRead); RTLockValidatorRecExclDelete(&pThis->ValidatorWrite); #endif RTMemFree(pThis); return VINF_SUCCESS; }
/** * Internal worker for RTSemXRoadsNSEnter and RTSemXRoadsEWEnter. * * @returns IPRT status code. * @param pThis The semaphore instance. * @param fDir The direction. * @param uCountShift The shift count for getting the count. * @param fCountMask The mask for getting the count. * @param uWaitCountShift The shift count for getting the wait count. * @param fWaitCountMask The mask for getting the wait count. */ DECL_FORCE_INLINE(int) rtSemXRoadsEnter(RTSEMXROADSINTERNAL *pThis, uint64_t fDir, uint64_t uCountShift, uint64_t fCountMask, uint64_t uWaitCountShift, uint64_t fWaitCountMask) { uint64_t u64OldState; uint64_t u64State; u64State = ASMAtomicReadU64(&pThis->u64State); u64OldState = u64State; add_hist(u64State, u64OldState, fDir, "enter"); for (;;) { if ((u64State & RTSEMXROADS_DIR_MASK) == (fDir << RTSEMXROADS_DIR_SHIFT)) { /* It flows in the right direction, try follow it before it changes. */ uint64_t c = (u64State & fCountMask) >> uCountShift; c++; Assert(c < 8*_1K); u64State &= ~fCountMask; u64State |= c << uCountShift; if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) { add_hist(u64State, u64OldState, fDir, "enter-simple"); break; } } else if ((u64State & (RTSEMXROADS_CNT_NS_MASK | RTSEMXROADS_CNT_EW_MASK)) == 0)
RTDECL(int) RTSemXRoadsDestroy(RTSEMXROADS hXRoads) { /* * Validate input. */ RTSEMXROADSINTERNAL *pThis = hXRoads; if (pThis == NIL_RTSEMXROADS) return VINF_SUCCESS; AssertPtrReturn(pThis, VERR_INVALID_HANDLE); AssertReturn(pThis->u32Magic == RTSEMXROADS_MAGIC, VERR_INVALID_HANDLE); Assert(!(ASMAtomicReadU64(&pThis->u64State) & (RTSEMXROADS_CNT_NS_MASK | RTSEMXROADS_CNT_EW_MASK))); /* * Invalidate the object and free up the resources. */ AssertReturn(ASMAtomicCmpXchgU32(&pThis->u32Magic, RTSEMXROADS_MAGIC_DEAD, RTSEMXROADS_MAGIC), VERR_INVALID_HANDLE); RTSEMEVENTMULTI hEvt; ASMAtomicXchgHandle(&pThis->aDirs[0].hEvt, NIL_RTSEMEVENTMULTI, &hEvt); int rc = RTSemEventMultiDestroy(hEvt); AssertRC(rc); ASMAtomicXchgHandle(&pThis->aDirs[1].hEvt, NIL_RTSEMEVENTMULTI, &hEvt); rc = RTSemEventMultiDestroy(hEvt); AssertRC(rc); RTMemFree(pThis); return VINF_SUCCESS; }
static int pdmacFileEpGetSize(PPDMASYNCCOMPLETIONENDPOINT pEndpoint, uint64_t *pcbSize) { PPDMASYNCCOMPLETIONENDPOINTFILE pEpFile = (PPDMASYNCCOMPLETIONENDPOINTFILE)pEndpoint; *pcbSize = ASMAtomicReadU64(&pEpFile->cbFile); return VINF_SUCCESS; }
static int rtCritSectRwEnterShared(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly) { /* * Validate input. */ AssertPtr(pThis); AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED); #ifdef IN_RING0 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0); #else Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0)); #endif #ifdef RTCRITSECTRW_STRICT RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt(); if (!fTryOnly) { int rc9; RTNATIVETHREAD hNativeWriter; ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter); if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf()) rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT); else rc9 = RTLockValidatorRecSharedCheckOrder(pThis->pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT); if (RT_FAILURE(rc9)) return rc9; } #endif /* * Get cracking... */ uint64_t u64State = ASMAtomicReadU64(&pThis->u64State); uint64_t u64OldState = u64State; for (;;) { if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT)) { /* It flows in the right direction, try follow it before it changes. */ uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; c++; Assert(c < RTCSRW_CNT_MASK / 2); u64State &= ~RTCSRW_CNT_RD_MASK; u64State |= c << RTCSRW_CNT_RD_SHIFT; if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) { #ifdef RTCRITSECTRW_STRICT RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos); #endif break; } } else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
static int rtSemRWRequestRead(RTSEMRW hRWSem, RTMSINTERVAL cMillies, bool fInterruptible, PCRTLOCKVALSRCPOS pSrcPos) { /* * Validate input. */ RTSEMRWINTERNAL *pThis = hRWSem; if (pThis == NIL_RTSEMRW) return VINF_SUCCESS; AssertPtrReturn(pThis, VERR_INVALID_HANDLE); AssertReturn(pThis->u32Magic == RTSEMRW_MAGIC, VERR_INVALID_HANDLE); #ifdef RTSEMRW_STRICT RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt(); if (cMillies > 0) { int rc9; RTNATIVETHREAD hNativeWriter; ASMAtomicUoReadHandle(&pThis->hNativeWriter, &hNativeWriter); if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf()) rc9 = RTLockValidatorRecExclCheckOrder(&pThis->ValidatorWrite, hThreadSelf, pSrcPos, cMillies); else rc9 = RTLockValidatorRecSharedCheckOrder(&pThis->ValidatorRead, hThreadSelf, pSrcPos, cMillies); if (RT_FAILURE(rc9)) return rc9; } #endif /* * Get cracking... */ uint64_t u64State = ASMAtomicReadU64(&pThis->u64State); uint64_t u64OldState = u64State; for (;;) { if ((u64State & RTSEMRW_DIR_MASK) == (RTSEMRW_DIR_READ << RTSEMRW_DIR_SHIFT)) { /* It flows in the right direction, try follow it before it changes. */ uint64_t c = (u64State & RTSEMRW_CNT_RD_MASK) >> RTSEMRW_CNT_RD_SHIFT; c++; Assert(c < RTSEMRW_CNT_MASK / 2); u64State &= ~RTSEMRW_CNT_RD_MASK; u64State |= c << RTSEMRW_CNT_RD_SHIFT; if (ASMAtomicCmpXchgU64(&pThis->u64State, u64State, u64OldState)) { #ifdef RTSEMRW_STRICT RTLockValidatorRecSharedAddOwner(&pThis->ValidatorRead, hThreadSelf, pSrcPos); #endif break; } } else if ((u64State & (RTSEMRW_CNT_RD_MASK | RTSEMRW_CNT_WR_MASK)) == 0)