Esempio n. 1
0
AO_t *
AO_stack_pop_explicit_aux_acquire(volatile AO_t *list, AO_stack_aux * a)
{
  unsigned i;
  int j = 0;
  AO_t first;
  AO_t * first_ptr;
  AO_t next;

 retry:
  first = AO_load(list);
  if (0 == first) return 0;
  /* Insert first into aux black list.                                  */
  /* This may spin if more than AO_BL_SIZE removals using auxiliary     */
  /* structure a are currently in progress.                             */
  for (i = 0; ; )
    {
      if (PRECHECK(a -> AO_stack_bl[i])
          AO_compare_and_swap_acquire(a->AO_stack_bl+i, 0, first))
        break;
      ++i;
      if ( i >= AO_BL_SIZE )
        {
          i = 0;
          AO_pause(++j);
        }
    }
  assert(i < AO_BL_SIZE);
  assert(a -> AO_stack_bl[i] == first);
  /* First is on the auxiliary black list.  It may be removed by        */
  /* another thread before we get to it, but a new insertion of x       */
  /* cannot be started here.                                            */
  /* Only we can remove it from the black list.                         */
  /* We need to make sure that first is still the first entry on the    */
  /* list.  Otherwise it's possible that a reinsertion of it was        */
  /* already started before we added the black list entry.              */
  if (AO_EXPECT_FALSE(first != AO_load(list))) {
    AO_store_release(a->AO_stack_bl+i, 0);
    goto retry;
  }
  first_ptr = AO_REAL_NEXT_PTR(first);
  next = AO_load(first_ptr);
  if (AO_EXPECT_FALSE(!AO_compare_and_swap_release(list, first, next))) {
    AO_store_release(a->AO_stack_bl+i, 0);
    goto retry;
  }
  assert(*list != first);
  /* Since we never insert an entry on the black list, this cannot have */
  /* succeeded unless first remained on the list while we were running. */
  /* Thus its next link cannot have changed out from under us, and we   */
  /* removed exactly one entry and preserved the rest of the list.      */
  /* Note that it is quite possible that an additional entry was        */
  /* inserted and removed while we were running; this is OK since the   */
  /* part of the list following first must have remained unchanged, and */
  /* first must again have been at the head of the list when the        */
  /* compare_and_swap succeeded.                                        */
  AO_store_release(a->AO_stack_bl+i, 0);
  return first_ptr;
}
/* may cause starvation ... */
int AO_wait_lock_g( AO_lock_ptr_t lock, AO_lock_val_t self ) {
    AO_lock_t lk_val;
    if( AO_load((ao_t*) lock) == (ao_t) self ) return 0;
    while( AO_compare_and_swap_release((ao_t *)lock,(ao_t )0,(ao_t) self) == 0 ) {
        lk_val = AO_load((ao_t*) lock);
        if( lk_val  == 0 ) continue;
        futex_wait(lock,lk_val);
    }
    return 1;
}
Esempio n. 3
0
/* pointers with extra bits "or"ed into the low order bits.             */
void
AO_stack_push_explicit_aux_release(volatile AO_t *list, AO_t *x,
                                   AO_stack_aux *a)
{
  AO_t x_bits = (AO_t)x;
  AO_t next;

  /* No deletions of x can start here, since x is not currently in the  */
  /* list.                                                              */
 retry:
# if AO_BL_SIZE == 2
  {
    /* Start all loads as close to concurrently as possible. */
    AO_t entry1 = AO_load(a -> AO_stack_bl);
    AO_t entry2 = AO_load(a -> AO_stack_bl + 1);
    if (entry1 == x_bits || entry2 == x_bits)
      {
        /* Entry is currently being removed.  Change it a little.       */
          ++x_bits;
          if ((x_bits & AO_BIT_MASK) == 0)
            /* Version count overflowed;         */
            /* EXTREMELY unlikely, but possible. */
            x_bits = (AO_t)x;
        goto retry;
      }
  }
# else
  {
    int i;
    for (i = 0; i < AO_BL_SIZE; ++i)
      {
        if (AO_load(a -> AO_stack_bl + i) == x_bits)
          {
            /* Entry is currently being removed.  Change it a little.   */
              ++x_bits;
              if ((x_bits & AO_BIT_MASK) == 0)
                /* Version count overflowed;         */
                /* EXTREMELY unlikely, but possible. */
                x_bits = (AO_t)x;
            goto retry;
          }
      }
  }
# endif
  /* x_bits is not currently being deleted */
  do
    {
      next = AO_load(list);
      *x = next;
    }
  while (AO_EXPECT_FALSE(!AO_compare_and_swap_release(list, next, x_bits)));
}
Esempio n. 4
0
void AO_stack_push_release(AO_stack_t *list, AO_t *element)
{
    AO_t next;

    do {
      next = AO_load(&(list -> ptr));
      *element = next;
    } while (AO_EXPECT_FALSE(!AO_compare_and_swap_release(&(list -> ptr),
                                                      next, (AO_t)element)));
    /* This uses a narrow CAS here, an old optimization suggested       */
    /* by Treiber.  Pop is still safe, since we run into the ABA        */
    /* problem only if there were both intervening "pop"s and "push"es. */
    /* In that case we still see a change in the version number.        */
}
int  AO_have_lock_g( AO_lock_ptr_t lock, AO_lock_val_t self ) {
    return  AO_compare_and_swap_release((ao_t *)lock,(ao_t )self,(ao_t)
                                        self) == 1;
}
int    AO_have_lock( AO_lock_ptr_t lock ) {
    pthread_t self = pthread_self();
    return  AO_compare_and_swap_release((ao_t *)lock,(ao_t )self,(ao_t)
                                        self) == 1;
}