void ethr_atomic_dec(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); ethr_atomic_dec__(var); }
void ethr_spin_lock(ethr_spinlock_t *lock) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(lock); ethr_spin_lock__(lock); }
void ethr_atomic_set(ethr_atomic_t *var, long i) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); ethr_atomic_set__(var, i); }
long ethr_atomic_dec_read(ethr_atomic_t *var) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); return ethr_atomic_dec_read__(var); }
void ethr_atomic_add(ethr_atomic_t *var, long incr) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); ethr_atomic_add__(var, incr); }
void ethr_write_lock(ethr_rwlock_t *lock) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(lock); ethr_write_lock__(lock); }
long ethr_atomic_add_read(ethr_atomic_t *var, long incr) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); return ethr_atomic_add_read__(var, incr); }
long ethr_atomic_read_bor(ethr_atomic_t *var, long mask) { ETHR_ASSERT(!ethr_not_inited__); ETHR_ASSERT(var); return ethr_atomic_read_bor__(var, mask); }
int ethr_rwlock_destroy(ethr_rwlock_t *lock) { #if ETHR_XCHK if (ethr_not_inited__) { ETHR_ASSERT(0); return EACCES; } if (!lock) { ETHR_ASSERT(0); return EINVAL; } #endif return ethr_rwlock_destroy__(lock); }
int ethr_install_exit_handler(void (*funcp)(void)) { ethr_xhndl_list *xhp; #if ETHR_XCHK if (ethr_not_completely_inited__) { ETHR_ASSERT(0); return EACCES; } #endif if (!funcp) return EINVAL; xhp = (ethr_xhndl_list *) ethr_mem__.std.alloc(sizeof(ethr_xhndl_list)); if (!xhp) return ENOMEM; ethr_rwmutex_rwlock(&xhndl_rwmtx); xhp->funcp = funcp; xhp->next = xhndl_list; xhndl_list = xhp; ethr_rwmutex_rwunlock(&xhndl_rwmtx); return 0; }
static void ts_event_free(ethr_ts_event *ts_ev) { ETHR_ASSERT(!ts_ev->udata); ethr_spin_lock(&ts_ev_alloc_lock); ts_ev->next = free_ts_ev; free_ts_ev = ts_ev; ethr_spin_unlock(&ts_ev_alloc_lock); }
static void thr_exit_cleanup(ethr_tid *tid, void *res) { ETHR_ASSERT(tid == ETHR_GET_OWN_TID__); if (tid->jdata) tid->jdata->res = res; ethr_run_exit_handlers__(); ethr_ts_event_destructor__((void *) ethr_get_tse__()); }
int ethr_rwlock_init(ethr_rwlock_t *lock) { #if ETHR_XCHK if (!lock) { ETHR_ASSERT(0); return EINVAL; } #endif return ethr_rwlock_init__(lock); }
static ETHR_INLINE int wait__(ethr_event *e, int spincount) { unsigned sc = spincount; int res; ethr_sint32_t val; int until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; if (spincount < 0) ETHR_FATAL_ERROR__(EINVAL); while (1) { while (1) { val = ethr_atomic32_read(&e->futex); if (val == ETHR_EVENT_ON__) return 0; if (sc == 0) break; sc--; ETHR_SPIN_BODY; if (--until_yield == 0) { until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; res = ETHR_YIELD(); if (res != 0) ETHR_FATAL_ERROR__(res); } } if (val != ETHR_EVENT_OFF_WAITER__) { val = ethr_atomic32_cmpxchg(&e->futex, ETHR_EVENT_OFF_WAITER__, ETHR_EVENT_OFF__); if (val == ETHR_EVENT_ON__) return 0; ETHR_ASSERT(val == ETHR_EVENT_OFF__); } res = ETHR_FUTEX__(&e->futex, ETHR_FUTEX_WAIT__, ETHR_EVENT_OFF_WAITER__); if (res == EINTR) break; if (res != 0 && res != EWOULDBLOCK) ETHR_FATAL_ERROR__(res); } return res; }
static ETHR_INLINE int wait(ethr_event *e, int spincount) { LONG state; DWORD code; int sc, res, until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; if (spincount < 0) ETHR_FATAL_ERROR__(EINVAL); sc = spincount; while (1) { long on; while (1) { #if ETHR_READ_AND_SET_WITHOUT_INTERLOCKED_OP__ state = e->state; #else state = _InterlockedExchangeAdd(&e->state, (LONG) 0); #endif if (state == ETHR_EVENT_ON__) return 0; if (sc == 0) break; sc--; ETHR_SPIN_BODY; if (--until_yield == 0) { until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; res = ETHR_YIELD(); if (res != 0) ETHR_FATAL_ERROR__(res); } } if (state != ETHR_EVENT_OFF_WAITER__) { state = _InterlockedCompareExchange(&e->state, ETHR_EVENT_OFF_WAITER__, ETHR_EVENT_OFF__); if (state == ETHR_EVENT_ON__) return 0; ETHR_ASSERT(state == ETHR_EVENT_OFF__); } code = WaitForSingleObject(e->handle, INFINITE); if (code != WAIT_OBJECT_0) ETHR_FATAL_ERROR__(ethr_win_get_errno__()); } }
static void setup_nonblocking_pipe(ethr_event *e) { int flgs; int res; res = pipe(e->fd); if (res != 0) ETHR_FATAL_ERROR__(errno); ETHR_ASSERT(e->fd[0] >= 0 && e->fd[1] >= 0); flgs = fcntl(e->fd[0], F_GETFL, 0); fcntl(e->fd[0], F_SETFL, flgs | O_NONBLOCK); flgs = fcntl(e->fd[1], F_GETFL, 0); fcntl(e->fd[1], F_SETFL, flgs | O_NONBLOCK); #ifndef __DARWIN__ if (e->fd[0] >= FD_SETSIZE) ETHR_FATAL_ERROR__(ENOTSUP); #else { int nmasks; ethr_event_fdsets__ *fdsets; size_t mem_size; nmasks = (e->fd[0]+NFDBITS)/NFDBITS; mem_size = 2*nmasks*sizeof(fd_mask); if (mem_size < 2*sizeof(fd_set)) { mem_size = 2*sizeof(fd_set); nmasks = mem_size/(2*sizeof(fd_mask)); } fdsets = malloc(sizeof(ethr_event_fdsets__) + mem_size - sizeof(fd_mask)); if (!fdsets) ETHR_FATAL_ERROR__(ENOMEM); fdsets->rsetp = (fd_set *) (char *) &fdsets->mem[0]; fdsets->esetp = (fd_set *) (char *) &fdsets->mem[nmasks]; fdsets->mem_size = mem_size; e->fdsets = fdsets; } #endif ETHR_MEMBAR(ETHR_StoreStore); }
static ETHR_INLINE int wait__(ethr_event *e, int spincount, ethr_sint64_t timeout) { unsigned sc = spincount; int res; ethr_sint32_t val; int until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; ethr_sint64_t time = 0; /* SHUT UP annoying faulty warning... */ struct timespec ts, *tsp; #ifdef ETHR_HAVE_ETHR_GET_MONOTONIC_TIME ethr_sint64_t start = 0; /* SHUT UP annoying faulty warning... */ #endif if (spincount < 0) ETHR_FATAL_ERROR__(EINVAL); if (timeout < 0) { tsp = NULL; } else { #ifdef ETHR_HAVE_ETHR_GET_MONOTONIC_TIME start = ethr_get_monotonic_time(); #endif tsp = &ts; time = timeout; if (spincount == 0) { val = ethr_atomic32_read(&e->futex); if (val == ETHR_EVENT_ON__) goto return_event_on; goto set_timeout; } } while (1) { while (1) { val = ethr_atomic32_read(&e->futex); if (val == ETHR_EVENT_ON__) goto return_event_on; if (sc == 0) break; sc--; ETHR_SPIN_BODY; if (--until_yield == 0) { until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; res = ETHR_YIELD(); if (res != 0) ETHR_FATAL_ERROR__(res); } } if (timeout >= 0) { #ifdef ETHR_HAVE_ETHR_GET_MONOTONIC_TIME time = timeout - (ethr_get_monotonic_time() - start); #endif set_timeout: if (time <= 0) { val = ethr_atomic32_read(&e->futex); if (val == ETHR_EVENT_ON__) goto return_event_on; return ETIMEDOUT; } ts.tv_sec = time / (1000*1000*1000); ts.tv_nsec = time % (1000*1000*1000); } if (val != ETHR_EVENT_OFF_WAITER__) { val = ethr_atomic32_cmpxchg(&e->futex, ETHR_EVENT_OFF_WAITER__, ETHR_EVENT_OFF__); if (val == ETHR_EVENT_ON__) goto return_event_on; ETHR_ASSERT(val == ETHR_EVENT_OFF__); } res = ETHR_FUTEX__(&e->futex, ETHR_FUTEX_WAIT__, ETHR_EVENT_OFF_WAITER__, tsp); switch (res) { case EINTR: case ETIMEDOUT: return res; case 0: case EWOULDBLOCK: break; default: ETHR_FATAL_ERROR__(res); } } return_event_on: ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); return 0; }
static ETHR_INLINE int wait__(ethr_event *e, int spincount, ethr_sint64_t timeout) { int sc = spincount; ethr_sint32_t val; int res, ulres; int until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; ethr_sint64_t time = 0; /* SHUT UP annoying faulty warning... */ #ifdef ETHR_HAVE_ETHR_GET_MONOTONIC_TIME ethr_sint64_t start = 0; /* SHUT UP annoying faulty warning... */ #endif #ifdef ETHR_HAVE_PTHREAD_COND_TIMEDWAIT_MONOTONIC struct timespec cond_timeout; #endif val = ethr_atomic32_read(&e->state); if (val == ETHR_EVENT_ON__) goto return_event_on; if (timeout < 0) { if (spincount == 0) goto set_event_off_waiter; } if (timeout == 0) return ETIMEDOUT; else { time = timeout; switch (e->fd[0]) { case ETHR_EVENT_INVALID_FD__: #ifdef ETHR_HAVE_ETHR_GET_MONOTONIC_TIME start = ethr_get_monotonic_time(); #endif setup_nonblocking_pipe(e); break; #ifdef ETHR_HAVE_PTHREAD_COND_TIMEDWAIT_MONOTONIC case ETHR_EVENT_COND_TIMEDWAIT__: time += ethr_get_monotonic_time(); cond_timeout.tv_sec = time / (1000*1000*1000); cond_timeout.tv_nsec = time % (1000*1000*1000); if (spincount == 0) goto set_event_off_waiter; break; #endif default: /* Already initialized pipe... */ if (spincount == 0) goto set_select_timeout; #ifdef ETHR_HAVE_ETHR_GET_MONOTONIC_TIME start = ethr_get_monotonic_time(); #endif break; } } if (spincount < 0) ETHR_FATAL_ERROR__(EINVAL); while (1) { val = ethr_atomic32_read(&e->state); if (val == ETHR_EVENT_ON__) goto return_event_on; if (sc == 0) break; sc--; ETHR_SPIN_BODY; if (--until_yield == 0) { until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; res = ETHR_YIELD(); if (res != 0) ETHR_FATAL_ERROR__(res); } } if (timeout < 0 #ifdef ETHR_HAVE_PTHREAD_COND_TIMEDWAIT_MONOTONIC || e->fd[0] == ETHR_EVENT_COND_TIMEDWAIT__ #endif ) { set_event_off_waiter: if (val != ETHR_EVENT_OFF_WAITER__) { ethr_sint32_t act; act = ethr_atomic32_cmpxchg(&e->state, ETHR_EVENT_OFF_WAITER__, val); if (act == ETHR_EVENT_ON__) goto return_event_on; ETHR_ASSERT(act == val); } res = pthread_mutex_lock(&e->mtx); if (res != 0) ETHR_FATAL_ERROR__(res); while (1) { val = ethr_atomic32_read(&e->state); if (val == ETHR_EVENT_ON__) { ETHR_ASSERT(res == 0); ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); break; } #ifdef ETHR_HAVE_PTHREAD_COND_TIMEDWAIT_MONOTONIC if (timeout > 0) { res = pthread_cond_timedwait(&e->cnd, &e->mtx, &cond_timeout); if (res == EINTR || res == ETIMEDOUT) break; } else #endif { res = pthread_cond_wait(&e->cnd, &e->mtx); if (res == EINTR) break; } if (res != 0) ETHR_FATAL_ERROR__(res); } ulres = pthread_mutex_unlock(&e->mtx); if (ulres != 0) ETHR_FATAL_ERROR__(ulres); } else { int fd; int sres; ssize_t rres; #ifndef __DARWIN__ fd_set rset, eset; #endif fd_set *rsetp, *esetp; struct timeval select_timeout; #ifdef ETHR_HAVE_ETHR_GET_MONOTONIC_TIME time -= ethr_get_monotonic_time() - start; if (time <= 0) return ETIMEDOUT; #endif set_select_timeout: ETHR_ASSERT(time > 0); /* * timeout in nano-second, but we can only wait * for micro-seconds... */ time = ((time - 1) / 1000) + 1; select_timeout.tv_sec = time / (1000*1000); select_timeout.tv_usec = time % (1000*1000); ETHR_ASSERT(val != ETHR_EVENT_ON__); fd = e->fd[0]; /* Cleanup pipe... */ do { char buf[64]; rres = read(fd, buf, sizeof(buf)); } while (rres > 0 || (rres < 0 && errno == EINTR)); if (rres < 0 && errno != EAGAIN && errno != EWOULDBLOCK) ETHR_FATAL_ERROR__(errno); /* * Need to verify that state is still off * after cleaning the pipe... */ if (val == ETHR_EVENT_OFF_WAITER_SELECT__) { val = ethr_atomic32_read(&e->state); if (val == ETHR_EVENT_ON__) goto return_event_on; } else { ethr_sint32_t act; act = ethr_atomic32_cmpxchg(&e->state, ETHR_EVENT_OFF_WAITER_SELECT__, val); if (act == ETHR_EVENT_ON__) goto return_event_on; ETHR_ASSERT(act == val); } #ifdef __DARWIN__ rsetp = e->fdsets->rsetp; esetp = e->fdsets->esetp; memset((void *) &e->fdsets->mem[0], 0, e->fdsets->mem_size); #else FD_ZERO(&rset); FD_ZERO(&eset); rsetp = &rset; esetp = &eset; #endif FD_SET(fd, rsetp); FD_SET(fd, esetp); sres = select(fd + 1, rsetp, NULL, esetp, &select_timeout); if (sres == 0) res = ETIMEDOUT; else { res = EINTR; if (sres < 0 && errno != EINTR) ETHR_FATAL_ERROR__(errno); /* else: * Event is *probably* set, but it can be a * lingering writer. That is, it is important * that we verify that it actually is set. If * it isn't, return EINTR (spurious wakeup). */ } val = ethr_atomic32_read(&e->state); if (val == ETHR_EVENT_ON__) goto return_event_on; } return res; return_event_on: ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); return 0; }
static ETHR_INLINE int wait(ethr_event *e, int spincount, ethr_sint64_t timeout) { DWORD code, tmo; int sc, res, until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; if (timeout < 0) tmo = INFINITE; else if (timeout == 0) { ethr_sint32_t state = ethr_atomic32_read(&e->state); if (state == ETHR_EVENT_ON__) { ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); return 0; } return ETIMEDOUT; } else { /* * Timeout in nano-seconds, but we can only * wait for milli-seconds... */ tmo = (DWORD) (timeout - 1) / (1000*1000) + 1; } if (spincount < 0) ETHR_FATAL_ERROR__(EINVAL); sc = spincount; while (1) { ethr_sint32_t state; while (1) { state = ethr_atomic32_read(&e->state); if (state == ETHR_EVENT_ON__) { ETHR_MEMBAR(ETHR_LoadLoad|ETHR_LoadStore); return 0; } if (sc == 0) break; sc--; ETHR_SPIN_BODY; if (--until_yield == 0) { until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; res = ETHR_YIELD(); if (res != 0) ETHR_FATAL_ERROR__(res); } } if (state != ETHR_EVENT_OFF_WAITER__) { state = ethr_atomic32_cmpxchg(&e->state, ETHR_EVENT_OFF_WAITER__, ETHR_EVENT_OFF__); if (state == ETHR_EVENT_ON__) return 0; ETHR_ASSERT(state == ETHR_EVENT_OFF__); } code = WaitForSingleObject(e->handle, tmo); if (code == WAIT_TIMEOUT) return ETIMEDOUT; if (code != WAIT_OBJECT_0) ETHR_FATAL_ERROR__(ethr_win_get_errno__()); } }
static ETHR_INLINE int wait__(ethr_event *e, int spincount) { int sc = spincount; ethr_sint32_t val; int res, ulres; int until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; if (spincount < 0) ETHR_FATAL_ERROR__(EINVAL); while (1) { val = ethr_atomic32_read(&e->state); if (val == ETHR_EVENT_ON__) return 0; if (sc == 0) break; sc--; ETHR_SPIN_BODY; if (--until_yield == 0) { until_yield = ETHR_YIELD_AFTER_BUSY_LOOPS; res = ETHR_YIELD(); if (res != 0) ETHR_FATAL_ERROR__(res); } } if (val != ETHR_EVENT_OFF_WAITER__) { val = ethr_atomic32_cmpxchg(&e->state, ETHR_EVENT_OFF_WAITER__, ETHR_EVENT_OFF__); if (val == ETHR_EVENT_ON__) return 0; ETHR_ASSERT(val == ETHR_EVENT_OFF__); } ETHR_ASSERT(val == ETHR_EVENT_OFF_WAITER__ || val == ETHR_EVENT_OFF__); res = pthread_mutex_lock(&e->mtx); if (res != 0) ETHR_FATAL_ERROR__(res); while (1) { val = ethr_atomic32_read(&e->state); if (val == ETHR_EVENT_ON__) break; res = pthread_cond_wait(&e->cnd, &e->mtx); if (res == EINTR) break; if (res != 0) ETHR_FATAL_ERROR__(res); } ulres = pthread_mutex_unlock(&e->mtx); if (ulres != 0) ETHR_FATAL_ERROR__(ulres); return res; /* 0 || EINTR */ }
static void atomic_basic_test(void) { /* * Verify that each op does what it is expected * to do for at least one input. */ ethr_atomic32_t atomic32; ethr_atomic_t atomic; print_line("AT_AINT32_MAX=%d",AT_AINT32_MAX); print_line("AT_AINT32_MIN=%d",AT_AINT32_MIN); AT_TEST_INIT(ethr_sint32_t, atomic32, ); AT_TEST_SET(ethr_sint32_t, atomic32, ); AT_TEST_XCHG(ethr_sint32_t, atomic32, ); AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, ); AT_TEST_ADD_READ(ethr_sint32_t, atomic32, ); AT_TEST_ADD(ethr_sint32_t, atomic32, ); AT_TEST_INC_READ(ethr_sint32_t, atomic32, ); AT_TEST_DEC_READ(ethr_sint32_t, atomic32, ); AT_TEST_INC(ethr_sint32_t, atomic32, ); AT_TEST_DEC(ethr_sint32_t, atomic32, ); AT_TEST_READ_BAND(ethr_sint32_t, atomic32, ); AT_TEST_READ_BOR(ethr_sint32_t, atomic32, ); AT_TEST_INIT(ethr_sint32_t, atomic32, _acqb); AT_TEST_SET(ethr_sint32_t, atomic32, _acqb); AT_TEST_XCHG(ethr_sint32_t, atomic32, _acqb); AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _acqb); AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _acqb); AT_TEST_ADD(ethr_sint32_t, atomic32, _acqb); AT_TEST_INC_READ(ethr_sint32_t, atomic32, _acqb); AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _acqb); AT_TEST_INC(ethr_sint32_t, atomic32, _acqb); AT_TEST_DEC(ethr_sint32_t, atomic32, _acqb); AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _acqb); AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _acqb); AT_TEST_INIT(ethr_sint32_t, atomic32, _relb); AT_TEST_SET(ethr_sint32_t, atomic32, _relb); AT_TEST_XCHG(ethr_sint32_t, atomic32, _relb); AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _relb); AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _relb); AT_TEST_ADD(ethr_sint32_t, atomic32, _relb); AT_TEST_INC_READ(ethr_sint32_t, atomic32, _relb); AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _relb); AT_TEST_INC(ethr_sint32_t, atomic32, _relb); AT_TEST_DEC(ethr_sint32_t, atomic32, _relb); AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _relb); AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _relb); AT_TEST_INIT(ethr_sint32_t, atomic32, _rb); AT_TEST_SET(ethr_sint32_t, atomic32, _rb); AT_TEST_XCHG(ethr_sint32_t, atomic32, _rb); AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _rb); AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _rb); AT_TEST_ADD(ethr_sint32_t, atomic32, _rb); AT_TEST_INC_READ(ethr_sint32_t, atomic32, _rb); AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _rb); AT_TEST_INC(ethr_sint32_t, atomic32, _rb); AT_TEST_DEC(ethr_sint32_t, atomic32, _rb); AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _rb); AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _rb); AT_TEST_INIT(ethr_sint32_t, atomic32, _wb); AT_TEST_SET(ethr_sint32_t, atomic32, _wb); AT_TEST_XCHG(ethr_sint32_t, atomic32, _wb); AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _wb); AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _wb); AT_TEST_ADD(ethr_sint32_t, atomic32, _wb); AT_TEST_INC_READ(ethr_sint32_t, atomic32, _wb); AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _wb); AT_TEST_INC(ethr_sint32_t, atomic32, _wb); AT_TEST_DEC(ethr_sint32_t, atomic32, _wb); AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _wb); AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _wb); AT_TEST_INIT(ethr_sint32_t, atomic32, _mb); AT_TEST_SET(ethr_sint32_t, atomic32, _mb); AT_TEST_XCHG(ethr_sint32_t, atomic32, _mb); AT_TEST_CMPXCHG(ethr_sint32_t, atomic32, _mb); AT_TEST_ADD_READ(ethr_sint32_t, atomic32, _mb); AT_TEST_ADD(ethr_sint32_t, atomic32, _mb); AT_TEST_INC_READ(ethr_sint32_t, atomic32, _mb); AT_TEST_DEC_READ(ethr_sint32_t, atomic32, _mb); AT_TEST_INC(ethr_sint32_t, atomic32, _mb); AT_TEST_DEC(ethr_sint32_t, atomic32, _mb); AT_TEST_READ_BAND(ethr_sint32_t, atomic32, _mb); AT_TEST_READ_BOR(ethr_sint32_t, atomic32, _mb); AT_TEST_INIT(ethr_sint_t, atomic, ); AT_TEST_SET(ethr_sint_t, atomic, ); AT_TEST_XCHG(ethr_sint_t, atomic, ); AT_TEST_CMPXCHG(ethr_sint_t, atomic, ); AT_TEST_ADD_READ(ethr_sint_t, atomic, ); AT_TEST_ADD(ethr_sint_t, atomic, ); AT_TEST_INC_READ(ethr_sint_t, atomic, ); AT_TEST_DEC_READ(ethr_sint_t, atomic, ); AT_TEST_INC(ethr_sint_t, atomic, ); AT_TEST_DEC(ethr_sint_t, atomic, ); AT_TEST_READ_BAND(ethr_sint_t, atomic, ); AT_TEST_READ_BOR(ethr_sint_t, atomic, ); AT_TEST_INIT(ethr_sint_t, atomic, _acqb); AT_TEST_SET(ethr_sint_t, atomic, _acqb); AT_TEST_XCHG(ethr_sint_t, atomic, _acqb); AT_TEST_CMPXCHG(ethr_sint_t, atomic, _acqb); AT_TEST_ADD_READ(ethr_sint_t, atomic, _acqb); AT_TEST_ADD(ethr_sint_t, atomic, _acqb); AT_TEST_INC_READ(ethr_sint_t, atomic, _acqb); AT_TEST_DEC_READ(ethr_sint_t, atomic, _acqb); AT_TEST_INC(ethr_sint_t, atomic, _acqb); AT_TEST_DEC(ethr_sint_t, atomic, _acqb); AT_TEST_READ_BAND(ethr_sint_t, atomic, _acqb); AT_TEST_READ_BOR(ethr_sint_t, atomic, _acqb); AT_TEST_INIT(ethr_sint_t, atomic, _relb); AT_TEST_SET(ethr_sint_t, atomic, _relb); AT_TEST_XCHG(ethr_sint_t, atomic, _relb); AT_TEST_CMPXCHG(ethr_sint_t, atomic, _relb); AT_TEST_ADD_READ(ethr_sint_t, atomic, _relb); AT_TEST_ADD(ethr_sint_t, atomic, _relb); AT_TEST_INC_READ(ethr_sint_t, atomic, _relb); AT_TEST_DEC_READ(ethr_sint_t, atomic, _relb); AT_TEST_INC(ethr_sint_t, atomic, _relb); AT_TEST_DEC(ethr_sint_t, atomic, _relb); AT_TEST_READ_BAND(ethr_sint_t, atomic, _relb); AT_TEST_READ_BOR(ethr_sint_t, atomic, _relb); AT_TEST_INIT(ethr_sint_t, atomic, _rb); AT_TEST_SET(ethr_sint_t, atomic, _rb); AT_TEST_XCHG(ethr_sint_t, atomic, _rb); AT_TEST_CMPXCHG(ethr_sint_t, atomic, _rb); AT_TEST_ADD_READ(ethr_sint_t, atomic, _rb); AT_TEST_ADD(ethr_sint_t, atomic, _rb); AT_TEST_INC_READ(ethr_sint_t, atomic, _rb); AT_TEST_DEC_READ(ethr_sint_t, atomic, _rb); AT_TEST_INC(ethr_sint_t, atomic, _rb); AT_TEST_DEC(ethr_sint_t, atomic, _rb); AT_TEST_READ_BAND(ethr_sint_t, atomic, _rb); AT_TEST_READ_BOR(ethr_sint_t, atomic, _rb); AT_TEST_INIT(ethr_sint_t, atomic, _wb); AT_TEST_SET(ethr_sint_t, atomic, _wb); AT_TEST_XCHG(ethr_sint_t, atomic, _wb); AT_TEST_CMPXCHG(ethr_sint_t, atomic, _wb); AT_TEST_ADD_READ(ethr_sint_t, atomic, _wb); AT_TEST_ADD(ethr_sint_t, atomic, _wb); AT_TEST_INC_READ(ethr_sint_t, atomic, _wb); AT_TEST_DEC_READ(ethr_sint_t, atomic, _wb); AT_TEST_INC(ethr_sint_t, atomic, _wb); AT_TEST_DEC(ethr_sint_t, atomic, _wb); AT_TEST_READ_BAND(ethr_sint_t, atomic, _wb); AT_TEST_READ_BOR(ethr_sint_t, atomic, _wb); AT_TEST_INIT(ethr_sint_t, atomic, _mb); AT_TEST_SET(ethr_sint_t, atomic, _mb); AT_TEST_XCHG(ethr_sint_t, atomic, _mb); AT_TEST_CMPXCHG(ethr_sint_t, atomic, _mb); AT_TEST_ADD_READ(ethr_sint_t, atomic, _mb); AT_TEST_ADD(ethr_sint_t, atomic, _mb); AT_TEST_INC_READ(ethr_sint_t, atomic, _mb); AT_TEST_DEC_READ(ethr_sint_t, atomic, _mb); AT_TEST_INC(ethr_sint_t, atomic, _mb); AT_TEST_DEC(ethr_sint_t, atomic, _mb); AT_TEST_READ_BAND(ethr_sint_t, atomic, _mb); AT_TEST_READ_BOR(ethr_sint_t, atomic, _mb); /* Double word */ { ethr_dw_atomic_t dw_atomic; ethr_dw_sint_t dw0, dw1; dw0.sint[0] = 4711; dw0.sint[1] = 4712; /* init */ ethr_dw_atomic_init(&dw_atomic, &dw0); ethr_dw_atomic_read(&dw_atomic, &dw1); ETHR_ASSERT(dw1.sint[0] == 4711); ETHR_ASSERT(dw1.sint[1] == 4712); /* set */ dw0.sint[0] = 42; dw0.sint[1] = ~((ethr_sint_t) 0); ethr_dw_atomic_set(&dw_atomic, &dw0); ethr_dw_atomic_read(&dw_atomic, &dw1); ASSERT(dw1.sint[0] == 42); ASSERT(dw1.sint[1] == ~((ethr_sint_t) 0)); /* cmpxchg */ dw0.sint[0] = 17; dw0.sint[1] = 18; dw1.sint[0] = 19; dw1.sint[1] = 20; ASSERT(!ethr_dw_atomic_cmpxchg(&dw_atomic, &dw1, &dw0)); ethr_dw_atomic_read(&dw_atomic, &dw0); ASSERT(dw0.sint[0] == 42); ASSERT(dw0.sint[1] == ~((ethr_sint_t) 0)); ASSERT(ethr_dw_atomic_cmpxchg(&dw_atomic, &dw1, &dw0)); ethr_dw_atomic_read(&dw_atomic, &dw0); ASSERT(dw0.sint[0] == 19); ASSERT(dw0.sint[1] == 20); } }
void ethr_atomic_init(ethr_atomic_t *var, long i) { ETHR_ASSERT(var); ethr_atomic_init__(var, i); }