Esempio n. 1
0
int
main ()
{
  int b;

  if (!atomic_is_lock_free (&a))
    abort ();

  if (atomic_flag_test_and_set (&a))
    abort ();
  atomic_flag_clear_explicit (&a, memory_order_relaxed);
  if (atomic_flag_test_and_set (&a))
    abort ();
  atomic_flag_clear (&a);

  b = atomic_flag_test_and_set_explicit (&a, memory_order_seq_cst);
  if (!atomic_flag_test_and_set (&a) || b != 0)
    abort ();

  b = atomic_flag_test_and_set_explicit (&a, memory_order_acq_rel);
  if (!atomic_flag_test_and_set (&a) || b != 1)
    abort ();

  atomic_flag_clear_explicit (&a, memory_order_seq_cst);
  if (atomic_flag_test_and_set (&a))
    abort ();

  return 0;
}
Esempio n. 2
0
TEST(stdatomic, atomic_flag) {
  atomic_flag f = ATOMIC_FLAG_INIT;
  ASSERT_FALSE(atomic_flag_test_and_set(&f));
  ASSERT_TRUE(atomic_flag_test_and_set(&f));

  atomic_flag_clear(&f);

  ASSERT_FALSE(atomic_flag_test_and_set_explicit(&f, memory_order_relaxed));
  ASSERT_TRUE(atomic_flag_test_and_set_explicit(&f, memory_order_relaxed));

  atomic_flag_clear_explicit(&f, memory_order_relaxed);
  ASSERT_FALSE(atomic_flag_test_and_set_explicit(&f, memory_order_relaxed));
}
Esempio n. 3
0
void spinlock_acquire(struct spinlock *s)
{
	cpu_disable_preemption();
	while(atomic_flag_test_and_set_explicit(&s->flag, memory_order_relaxed)) {
		arch_cpu_pause();
	}
}
int main()
{
  atomic_flag af = ATOMIC_FLAG_INIT;

  if (!atomic_flag_test_and_set_explicit(&af, memory_order_acquire))
    atomic_flag_clear_explicit(&af, memory_order_release);

  return 0;
}
Esempio n. 5
0
void tatas_lock(void * lock) {
    TATASLock *l = (TATASLock*)lock;
    while(true){
        while(atomic_load_explicit(&l->lockFlag.value, 
                                   memory_order_acquire)){
            thread_yield();
        }
        if( ! atomic_flag_test_and_set_explicit(&l->lockFlag.value,
                                                memory_order_acquire)){
            return;
        }
   }
}
int main()
{
  atomic_flag f;
  atomic_flag* p = &f;
  memory_order m = memory_order_relaxed;

  // For position only.
  atomic_flag_test_and_set(p);
  atomic_flag_test_and_set_explicit(p, m);
  atomic_flag_clear(p);
  atomic_flag_clear_explicit(p, m);

  return 0;
}
Esempio n. 7
0
/* ChapelDistribution.chpl:286 */
static int64_t destroyArr(BaseArr this8, int64_t _ln, c_string _fn) {
  memory_order local_memory_order_seq_cst;
  int64_t cnt;
  _ref_atomic_refcnt call_tmp = NULL;
  _ref_atomic_int64 call_tmp2 = NULL;
  memory_order default_argorder;
  _ref_atomic_int_least64_t call_tmp3 = NULL;
  int64_t call_tmp4;
  int64_t call_tmp5;
  chpl_bool call_tmp6;
  chpl_bool call_tmp7;
  BaseArr ret = NULL;
  object call_tmp8 = NULL;
  chpl_bool call_tmp9;
  BaseArr ret2 = NULL;
  int64_t call_tmp10;
  chpl_bool call_tmp11;
  BaseArr ret3 = NULL;
  int32_t _virtual_method_tmp_;
  chpl_opaque call_tmp12;
  int32_t _virtual_method_tmp_2;
  chpl_bool call_tmp13;
  BaseDom dom = NULL;
  BaseDom call_tmp14 = NULL;
  int32_t _virtual_method_tmp_3;
  chpl_bool T;
  _ref_atomicflag call_tmp15 = NULL;
  memory_order default_argorder2;
  _ref_atomic_flag call_tmp16 = NULL;
  chpl_bool call_tmp17;
  _ref_atomicflag call_tmp18 = NULL;
  memory_order default_argorder3;
  _ref_atomic_flag call_tmp19 = NULL;
  chpl_bool call_tmp20;
  _ref_list_BaseArr call_tmp21 = NULL;
  _ref_atomicflag call_tmp22 = NULL;
  memory_order default_argorder4;
  _ref_atomic_flag call_tmp23 = NULL;
  int64_t call_tmp24;
  chpl_bool call_tmp25;
  int32_t _virtual_method_tmp_4;
  chpl_opaque call_tmp26;
  local_memory_order_seq_cst = memory_order_seq_cst;
  compilerAssert();
  compilerAssert();
  call_tmp = &((this8)->_arrCnt);
  call_tmp2 = &((call_tmp)->_cnt);
  default_argorder = local_memory_order_seq_cst;
  call_tmp3 = &((call_tmp2)->_v);
  call_tmp4 = atomic_fetch_sub_explicit_int_least64_t(call_tmp3, INT64(1), default_argorder);
  call_tmp5 = (call_tmp4 - INT64(1));
  call_tmp6 = (call_tmp5 < INT64(0));
  if (call_tmp6) {
    halt("array reference count is negative!", _ln, _fn);
  }
  cnt = call_tmp5;
  call_tmp7 = (call_tmp5 == INT64(0));
  if (call_tmp7) {
    ret = (this8)->_arrAlias;
    call_tmp8 = ((object)(ret));
    call_tmp9 = (call_tmp8 != nil);
    if (call_tmp9) {
      ret2 = (this8)->_arrAlias;
      call_tmp10 = destroyArr(ret2, _ln, _fn);
      call_tmp11 = (call_tmp10 == INT64(0));
      if (call_tmp11) {
        ret3 = (this8)->_arrAlias;
        _virtual_method_tmp_ = ((object)(ret3))->chpl__cid;
        ((void(*)(BaseArr, int64_t, c_string))chpl_vmtable[((INT64(8) * _virtual_method_tmp_) + INT64(0))])(ret3, _ln, _fn);
        call_tmp12 = ((void*)(ret3));
        chpl_here_free(call_tmp12, _ln, _fn);
      }
    } else {
      _virtual_method_tmp_2 = ((object)(this8))->chpl__cid;
      ((void(*)(BaseArr, int64_t, c_string))chpl_vmtable[((INT64(8) * _virtual_method_tmp_2) + INT64(5))])(this8, _ln, _fn);
    }
  }
  call_tmp13 = (call_tmp5 == INT64(0));
  if (call_tmp13) {
    _virtual_method_tmp_3 = ((object)(this8))->chpl__cid;
    call_tmp14 = ((BaseDom(*)(BaseArr, int64_t, c_string))chpl_vmtable[((INT64(8) * _virtual_method_tmp_3) + INT64(6))])(this8, _ln, _fn);
    dom = call_tmp14;
    call_tmp15 = &((dom)->_arrsLock);
    default_argorder2 = local_memory_order_seq_cst;
    call_tmp16 = &((call_tmp15)->_v);
    call_tmp17 = atomic_flag_test_and_set_explicit(call_tmp16, default_argorder2);
    T = call_tmp17;
    while (T) {
      chpl_task_yield();
      call_tmp18 = &((dom)->_arrsLock);
      default_argorder3 = local_memory_order_seq_cst;
      call_tmp19 = &((call_tmp18)->_v);
      call_tmp20 = atomic_flag_test_and_set_explicit(call_tmp19, default_argorder3);
      T = call_tmp20;
    }
    call_tmp21 = &((dom)->_arrs);
    remove4(call_tmp21, this8, _ln, _fn);
    call_tmp22 = &((dom)->_arrsLock);
    default_argorder4 = local_memory_order_seq_cst;
    call_tmp23 = &((call_tmp22)->_v);
    atomic_flag_clear_explicit(call_tmp23, default_argorder4);
    call_tmp24 = destroyDom(dom, _ln, _fn);
    call_tmp25 = (call_tmp24 == INT64(0));
    if (call_tmp25) {
      _virtual_method_tmp_4 = ((object)(dom))->chpl__cid;
      ((void(*)(BaseDom, int64_t, c_string))chpl_vmtable[((INT64(8) * _virtual_method_tmp_4) + INT64(0))])(dom, _ln, _fn);
      call_tmp26 = ((void*)(dom));
      chpl_here_free(call_tmp26, _ln, _fn);
    }
  }
  return cnt;
}
Esempio n. 8
0
/* ChapelDistribution.chpl:133 */
static int64_t destroyDom(BaseDom this8, int64_t _ln, c_string _fn) {
  memory_order local_memory_order_seq_cst;
  int64_t cnt;
  _ref_atomic_refcnt call_tmp = NULL;
  _ref_atomic_int64 call_tmp2 = NULL;
  memory_order default_argorder;
  _ref_atomic_int_least64_t call_tmp3 = NULL;
  int64_t call_tmp4;
  int64_t call_tmp5;
  chpl_bool call_tmp6;
  chpl_bool call_tmp7;
  chpl_bool T;
  chpl_bool call_tmp8;
  int32_t _virtual_method_tmp_;
  BaseDist dist2 = NULL;
  BaseDist call_tmp9 = NULL;
  chpl_bool T2;
  _ref_atomicflag call_tmp10 = NULL;
  memory_order default_argorder2;
  _ref_atomic_flag call_tmp11 = NULL;
  chpl_bool call_tmp12;
  _ref_atomicflag call_tmp13 = NULL;
  memory_order default_argorder3;
  _ref_atomic_flag call_tmp14 = NULL;
  chpl_bool call_tmp15;
  _ref_list_BaseDom call_tmp16 = NULL;
  _ref_atomicflag call_tmp17 = NULL;
  memory_order default_argorder4;
  _ref_atomic_flag call_tmp18 = NULL;
  int64_t call_tmp19;
  chpl_bool call_tmp20;
  int32_t _virtual_method_tmp_2;
  chpl_opaque call_tmp21;
  local_memory_order_seq_cst = memory_order_seq_cst;
  compilerAssert();
  compilerAssert();
  call_tmp = &((this8)->_domCnt);
  call_tmp2 = &((call_tmp)->_cnt);
  default_argorder = local_memory_order_seq_cst;
  call_tmp3 = &((call_tmp2)->_v);
  call_tmp4 = atomic_fetch_sub_explicit_int_least64_t(call_tmp3, INT64(1), default_argorder);
  call_tmp5 = (call_tmp4 - INT64(1));
  call_tmp6 = (call_tmp5 < INT64(0));
  if (call_tmp6) {
    halt("domain reference count is negative!", _ln, _fn);
  }
  cnt = call_tmp5;
  call_tmp7 = (call_tmp5 == INT64(0));
  if (call_tmp7) {
    _virtual_method_tmp_ = ((object)(this8))->chpl__cid;
    call_tmp8 = ((chpl_bool(*)(BaseDom))chpl_vmtable[((INT64(8) * _virtual_method_tmp_) + INT64(1))])(this8);
    T = call_tmp8;
  } else {
    T = false;
  }
  if (T) {
    call_tmp9 = dsiMyDist(this8, _ln, _fn);
    dist2 = call_tmp9;
    call_tmp10 = &((dist2)->_domsLock);
    default_argorder2 = local_memory_order_seq_cst;
    call_tmp11 = &((call_tmp10)->_v);
    call_tmp12 = atomic_flag_test_and_set_explicit(call_tmp11, default_argorder2);
    T2 = call_tmp12;
    while (T2) {
      chpl_task_yield();
      call_tmp13 = &((dist2)->_domsLock);
      default_argorder3 = local_memory_order_seq_cst;
      call_tmp14 = &((call_tmp13)->_v);
      call_tmp15 = atomic_flag_test_and_set_explicit(call_tmp14, default_argorder3);
      T2 = call_tmp15;
    }
    call_tmp16 = &((dist2)->_doms);
    remove3(call_tmp16, this8, _ln, _fn);
    call_tmp17 = &((dist2)->_domsLock);
    default_argorder4 = local_memory_order_seq_cst;
    call_tmp18 = &((call_tmp17)->_v);
    atomic_flag_clear_explicit(call_tmp18, default_argorder4);
    call_tmp19 = destroyDist(dist2, _ln, _fn);
    call_tmp20 = (call_tmp19 == INT64(0));
    if (call_tmp20) {
      _virtual_method_tmp_2 = ((object)(dist2))->chpl__cid;
      ((void(*)(BaseDist, int64_t, c_string))chpl_vmtable[((INT64(8) * _virtual_method_tmp_2) + INT64(0))])(dist2, _ln, _fn);
      call_tmp21 = ((void*)(dist2));
      chpl_here_free(call_tmp21, _ln, _fn);
    }
  }
  return cnt;
}
int main()
{
    {
        std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_relaxed) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_consume) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_acquire) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_release) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_acq_rel) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_seq_cst) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        volatile std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_relaxed) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        volatile std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_consume) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        volatile std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_acquire) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        volatile std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_release) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        volatile std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_acq_rel) == 0);
        assert(f.test_and_set() == 1);
    }
    {
        volatile std::atomic_flag f;
        f.clear();
        assert(atomic_flag_test_and_set_explicit(&f, std::memory_order_seq_cst) == 0);
        assert(f.test_and_set() == 1);
    }
}
Esempio n. 10
0
void _flowmap_overflow_lock(flowmap *m) {
  while (atomic_flag_test_and_set_explicit(&m->overflow_lock,
                                            memory_order_acquire))
    MSB_SPINLOCK_PAUSE();
}