Example #1
0
TEG_STATUS turno_2prevplayer( PSPLAYER *ppJ )
{
	PSPLAYER pJ;
	PLIST_ENTRY first_node = (PLIST_ENTRY)*ppJ;
	PLIST_ENTRY l = LIST_PREV( (*ppJ));

	TURNO_DEBUG("Old turn: '%s'\n",(*ppJ)->name);

	g_game.old_turn = *ppJ;

	if( IsListEmpty( first_node ) )
		return TEG_STATUS_ERROR;

	while( l != first_node ) {
		pJ = (PSPLAYER) l;
		if( (l != &g_list_player) && player_is_playing(pJ) ) {
			(*ppJ) = pJ;
			TURNO_DEBUG("New turn: '%s'\n",pJ->name);
			return TEG_STATUS_SUCCESS;
		}
		l = LIST_PREV(l);
	}

	con_text_out_wop(M_ERR,"Abnormal error in turno_2prevplayer\n");
	return TEG_STATUS_PLAYERNOTFOUND;
}
Example #2
0
int main()
{
    struct node_head *h;
    struct node *n;
    int i;
    h = (struct node_head *)malloc(sizeof (struct node_head));
    LIST_INIT(h);
    for (i = 0; i < 5; i++)
    {
        n = (struct node *)malloc(sizeof (struct node));
        LIST_INSERT_HEAD(h, n, entry);
    }

    printf("Head:%p, first-addr:%p, first-val:%p\n", h, &LIST_FIRST(h), LIST_FIRST(h));

    LIST_FOREACH(n, h, entry)
    {

            printf("n: %p n->prev: %p, *n->prev: %p, &(n->next):%p, n->next: %p \n",
                    n,
                    LIST_PREV(n, entry),
                    *LIST_PREV(n, entry),
                    &LIST_NEXT(n, entry),
                    LIST_NEXT(n, entry)
            );

    }
Example #3
0
/*
 * get_next_event
 * 
 * Safely get the next event to send from 'evlist'. This function handles the
 * case where an event has transitioned from the active to the inactive list and
 * continues to process all events in the active list
*/
static INLINE mempart_evt_t *get_next_event(part_evtlist_t *evlist, mempart_evt_t *event, elistdata_t *eld_p)
{
	INTR_LOCK(&evlist->active.lock);

	CRASHCHECK(event->inuse == 0);

	/* Done with both the current and previous nodes */
	--event->inuse;
	if (eld_p->prev_event != NULL) --eld_p->prev_event->inuse;

	if (event->onlist == &evlist->active)
	{
		/* just get the next event on the active list */
		eld_p->prev_event = event;
		event = (mempart_evt_t *)LIST_NEXT(event);
		if (event != NULL)
		{
			++eld_p->prev_event->inuse;
			++event->inuse;
			eld_p->prev_active = (mempart_evt_t **)LIST_PREV(event);
		}
	}
	else
	{
		/*
		 * here we handle the case where 'event' was moved from the active
		 * list to the inactive list during delivery. If the event is not on
		 * the active list, this is what happened and so we look at the
		 * 'eld_p->prev_active' (which was on the active list, it may not be now)
		 * and possibly use what it points to for the next 'event'
		 * NOTE: since 'event' is no longer on the active list, **prev_active
		 * 		 points to 'event->next' iff (**prev_active) is still on the
		 * 		 active list
		*/
		if ((event = (*eld_p->prev_active)) != NULL)
		{
			/* if our previous active transitioned off the active list there is
			 * no way to continue with the active list, so return NULL */
			if (event->onlist != &evlist->active) event = NULL;
			else
			{
				eld_p->prev_active = (mempart_evt_t **)LIST_PREV(event);
				if (eld_p->prev_active == (mempart_evt_t **)&evlist->active.list.head)
					eld_p->prev_event = NULL;
				else
					eld_p->prev_event = *((mempart_evt_t **)LIST_PREV(event));
				++event->inuse;
				if (eld_p->prev_event != NULL) ++eld_p->prev_event->inuse;
			}
		}
	}
	INTR_UNLOCK(&evlist->active.lock);
	return event;
}
static void
aoff_unlink_free_block(Allctr_t *allctr, Block_t *blk)
{
    AOFFAllctr_t* alc = (AOFFAllctr_t*)allctr;
    AOFF_RBTree_t* del = (AOFF_RBTree_t*)blk;
    AOFF_Carrier_t *crr = (AOFF_Carrier_t*) FBLK_TO_MBC(&del->hdr);

    ASSERT(crr->rbt_node.hdr.bhdr == crr->root->max_sz);
    HARD_CHECK_TREE(&crr->crr, alc->blk_order, crr->root, 0);

    if (alc->blk_order == FF_BF) {
	ASSERT(del->flags & IS_BF_FLG);
	if (IS_LIST_ELEM(del)) {
	    /* Remove from list */
	    ASSERT(LIST_PREV(del));
	    ASSERT(LIST_PREV(del)->flags & IS_BF_FLG);
	    LIST_NEXT(LIST_PREV(del)) = LIST_NEXT(del);
	    if (LIST_NEXT(del)) {
		ASSERT(LIST_NEXT(del)->flags & IS_BF_FLG);
		LIST_PREV(LIST_NEXT(del)) = LIST_PREV(del);
	    }
	    return;
	}
	else if (LIST_NEXT(del)) {
	    /* Replace tree node by next element in list... */
	    
	    ASSERT(AOFF_BLK_SZ(LIST_NEXT(del)) == AOFF_BLK_SZ(del));
	    ASSERT(IS_LIST_ELEM(LIST_NEXT(del)));
	    
	    replace(&crr->root, (AOFF_RBTree_t*)del, LIST_NEXT(del));
	    
	    HARD_CHECK_TREE(&crr->crr, alc->blk_order, crr->root, 0);
	    return;
	}
    }

    rbt_delete(&crr->root, (AOFF_RBTree_t*)del);

    HARD_CHECK_TREE(&crr->crr, alc->blk_order, crr->root, 0);

    /* Update the carrier tree with a potentially new (lower) max_sz
     */    
    if (crr->root) {
	if (crr->rbt_node.hdr.bhdr == crr->root->max_sz) {
	    return;
	}
	ASSERT(crr->rbt_node.hdr.bhdr > crr->root->max_sz);
	crr->rbt_node.hdr.bhdr = crr->root->max_sz; 
    }
    else {
	crr->rbt_node.hdr.bhdr = 0;
    }
    lower_max_size(&crr->rbt_node, NULL);
}
Example #5
0
    LIST_FOREACH(n, h, entry)
    {

            printf("n: %p n->prev: %p, *n->prev: %p, &(n->next):%p, n->next: %p \n",
                    n,
                    LIST_PREV(n, entry),
                    *LIST_PREV(n, entry),
                    &LIST_NEXT(n, entry),
                    LIST_NEXT(n, entry)
            );
    }
Example #6
0
UWord
erts_bfalc_test(UWord op, UWord a1, UWord a2)
{
    switch (op) {
    case 0x200:
        return (UWord) ((BFAllctr_t *) a1)->address_order; /* IS_AOBF */
    case 0x201:
        return (UWord) ((BFAllctr_t *) a1)->mbc_root;
    case 0x202:
        return (UWord) ((RBTree_t *) a1)->parent;
    case 0x203:
        return (UWord) ((RBTree_t *) a1)->left;
    case 0x204:
        return (UWord) ((RBTree_t *) a1)->right;
    case 0x205:
        return (UWord) LIST_NEXT(a1);
    case 0x206:
        return (UWord) IS_BLACK((RBTree_t *) a1);
    case 0x207:
        return (UWord) IS_TREE_NODE((RBTree_t *) a1);
    case 0x208:
        return (UWord) 1; /* IS_BF_ALGO */
    case 0x20a:
        return (UWord) !((BFAllctr_t *) a1)->address_order; /* IS_BF */
    case 0x20b:
        return (UWord) LIST_PREV(a1);
    default:
        ASSERT(0);
        return ~((UWord) 0);
    }
}
Example #7
0
File: bio.c Project: phoboz/vmx
struct buf* buf_new (
    struct vnode * vp,
    lblkno_t       blkno
    ) {
    struct buf *bp;
    LIST *pBufHead = &vp->v_mount->mnt_buflist;

    for (bp = (struct buf *) LIST_TAIL (pBufHead);
         bp != NULL;
         bp = (struct buf *) LIST_PREV (&bp->b_node)) {
        if ((bp->b_flags & B_BUSY) == 0) {
            bp->b_flags = B_BUSY;
            bp->b_lblkno = 0;
            bp->b_count = 0;
            bp->b_dev   = -1;
            bp->b_vp    = NULL;
            bp->b_error = OK;
            bp->b_resid = 0;

            return (bp);
        }
    }

    return (NULL);
}
Example #8
0
File: world.c Project: phoboz/yz
static void init_world_terrain(
  WORLD *world,
  int x,
  int y,
  int cw,
  int ch
  )
{
  int i, sx, sy;
  int mw, mh;
  FIELD *f, *fs, *fm, *fe, *field;

  sx = x / (world->tile_width * cw);
  sy = y / (world->tile_height * ch);

  mw = world->map->w / cw;
  mh = world->map->h / ch;

  for (i = 0; i < mh; i++) {

    f = process_world_row(world, i, mw, sx, sy, cw, ch, (i + 1) % 2);
    if (f != NULL)
      fm = f;

  }

  fs = (FIELD *) LIST_HEAD(&world->fieldList);
  fe = (FIELD *) LIST_TAIL(&world->fieldList);

  for(field = (FIELD *) LIST_TAIL(&world->fieldList);
      field != NULL;
      field = (FIELD *) LIST_PREV(&field->listNode)) {

    f = (FIELD *) LIST_PREV(&field->listNode);
    if (f == NULL)
      break;

    add_world_road_to(world, field, f);
    add_world_road_to(world, field, fs);
    add_world_road_to(world, field, fm);
    add_world_road_to(world, field, fe);
  }

  reveal_world_terrain(world, x, y);
}
Example #9
0
static ERTS_INLINE void
bf_unlink_free_block(Allctr_t *allctr, Block_t *block)
{
    BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
    RBTree_t **root = &bfallctr->mbc_root;
    RBTree_t *x = (RBTree_t *) block;

    if (IS_LIST_ELEM(x)) {
	/* Remove from list */
	ASSERT(LIST_PREV(x));
	LIST_NEXT(LIST_PREV(x)) = LIST_NEXT(x);
	if (LIST_NEXT(x))
	    LIST_PREV(LIST_NEXT(x)) = LIST_PREV(x);
    }
    else if (LIST_NEXT(x)) {
	/* Replace tree node by next element in list... */

	ASSERT(BF_BLK_SZ(LIST_NEXT(x)) == BF_BLK_SZ(x));
	ASSERT(IS_TREE_NODE(x));
	ASSERT(IS_LIST_ELEM(LIST_NEXT(x)));

#ifdef HARD_DEBUG
	check_tree(root, 0, 0);
#endif
	replace(root, x, LIST_NEXT(x));

#ifdef HARD_DEBUG
	check_tree(bfallctr, 0);
#endif
    }
    else {
	/* Remove from tree */
	tree_delete(allctr, block);
    }

    DESTROY_LIST_ELEM(x);
}
UWord
erts_aoffalc_test(UWord op, UWord a1, UWord a2)
{
    switch (op) {
    case 0x500: return (UWord) ((AOFFAllctr_t *) a1)->blk_order == FF_AOBF;
    case 0x501: {
	AOFF_RBTree_t *node = ((AOFFAllctr_t *) a1)->mbc_root; 
	Uint size = (Uint) a2;
	node = node ? rbt_search(node, size) : NULL;
	return (UWord) (node ? RBT_NODE_TO_MBC(node)->root : NULL);
    }
    case 0x502:	return (UWord) ((AOFF_RBTree_t *) a1)->parent;
    case 0x503:	return (UWord) ((AOFF_RBTree_t *) a1)->left;
    case 0x504:	return (UWord) ((AOFF_RBTree_t *) a1)->right;
    case 0x505:	return (UWord) LIST_NEXT(a1);
    case 0x506:	return (UWord) IS_BLACK((AOFF_RBTree_t *) a1);
    case 0x507:	return (UWord) IS_TREE_NODE((AOFF_RBTree_t *) a1);
    case 0x508: return (UWord) 0; /* IS_BF_ALGO */
    case 0x509: return (UWord) ((AOFF_RBTree_t *) a1)->max_sz;
    case 0x50a: return (UWord) ((AOFFAllctr_t *) a1)->blk_order == FF_BF;
    case 0x50b:	return (UWord) LIST_PREV(a1);
    default:	ASSERT(0); return ~((UWord) 0);
    }
}
Example #11
0
static void
bf_link_free_block(Allctr_t *allctr, Block_t *block)
{
    BFAllctr_t *bfallctr = (BFAllctr_t *) allctr;
    RBTree_t **root = &bfallctr->mbc_root;
    RBTree_t *blk = (RBTree_t *) block;
    Uint blk_sz = BF_BLK_SZ(blk);

    SET_TREE_NODE(blk);


    blk->flags	= 0;
    blk->left	= NULL;
    blk->right	= NULL;

    if (!*root) {
	blk->parent = NULL;
	SET_BLACK(blk);
	*root = blk;
    }
    else {
	RBTree_t *x = *root;
	while (1) {
	    Uint size;

	    size = BF_BLK_SZ(x);

	    if (blk_sz == size) {

		SET_LIST_ELEM(blk);
		LIST_NEXT(blk) = LIST_NEXT(x);
		LIST_PREV(blk) = x;
		if (LIST_NEXT(x))
		    LIST_PREV(LIST_NEXT(x)) = blk;
		LIST_NEXT(x) = blk;

		return; /* Finnished */
	    }
	    else if (blk_sz < size) {
		if (!x->left) {
		    blk->parent = x;
		    x->left = blk;
		    break;
		}
		x = x->left;
	    }
	    else {
		if (!x->right) {
		    blk->parent = x;
		    x->right = blk;
		    break;
		}
		x = x->right;
	    }
	}

	RBT_ASSERT(blk->parent);

	SET_RED(blk);
	if (IS_RED(blk->parent))
	    tree_insert_fixup(root, blk);

    }

    SET_TREE_NODE(blk);
    LIST_NEXT(blk) = NULL;

#ifdef HARD_DEBUG
    check_tree(root, 0, 0);
#endif
}
Example #12
0
/**
 * paxos_commit - Commit a value for an instance of the Paxos protocol.
 *
 * We totally order calls to paxos_learn by instance number in order to make
 * the join and greet protocols behave properly.  This also gives our chat
 * clients an easy mechanism for totally ordering their logs without extra
 * work on their part.
 *
 * It is possible that failed DEC_PART decrees (i.e., decrees in which the
 * proposer attempts to disconnect an acceptor who a majority of acceptors
 * believe is still alive) could delay the learning of committed chat
 * messages.  To avoid this, once a proposer receives enough rejections
 * of the decree, the part decree is replaced with a null decree.  The
 * proposer can then issue the part again with a higher instance number
 * if desired.
 */
int
paxos_commit(struct paxos_instance *inst)
{
  int r;
  struct paxos_request *req = NULL;
  struct paxos_instance *it;

  // Mark the commit.
  inst->pi_committed = true;

  // Pull the request from the request cache if applicable.
  if (request_needs_cached(inst->pi_val.pv_dkind)) {
    req = request_find(&pax->rcache, inst->pi_val.pv_reqid);

    // If we can't find a request and need one, send out a retrieve to the
    // request originator and defer the commit.
    if (req == NULL) {
      return paxos_retrieve(inst);
    }
  }

  // Mark the cache.
  inst->pi_cached = true;

  // We should already have committed and learned everything before the hole.
  assert(inst->pi_hdr.ph_inum >= pax->ihole);

  // Since we want our learns to be totally ordered, if we didn't just fill
  // the hole, we cannot learn.
  if (inst->pi_hdr.ph_inum != pax->ihole) {
    // If we're the proposer, we have to just wait it out.
    if (is_proposer()) {
      return 0;
    }

    // If the hole has committed but is just waiting on a retrieve, we'll learn
    // when we receive the resend.
    if (pax->istart->pi_hdr.ph_inum == pax->ihole && pax->istart->pi_committed) {
      assert(!pax->istart->pi_cached);
      return 0;
    }

    // The hole is either missing or uncommitted and we are not the proposer,
    // so issue a retry.
    return acceptor_retry(pax->ihole);
  }

  // Set pax->istart to point to the instance numbered pax->ihole.
  if (pax->istart->pi_hdr.ph_inum != pax->ihole) {
    pax->istart = LIST_NEXT(pax->istart, pi_le);
  }
  assert(pax->istart->pi_hdr.ph_inum == pax->ihole);

  // Now learn as many contiguous commits as we can.  This function is the
  // only path by which we learn commits, and we always learn in contiguous
  // blocks.  Therefore, it is an invariant of our system that all the
  // instances numbered lower than pax->ihole are learned and committed, and
  // none of the instances geq to pax->ihole are learned (although some may
  // be committed).
  //
  // We iterate over the instance list, detecting and breaking if we find a
  // hole and learning whenever we don't.
  for (it = pax->istart; ; it = LIST_NEXT(it, pi_le), ++pax->ihole) {
    // If we reached the end of the list, set pax->istart to the last existing
    // instance.
    if (it == (void *)&pax->ilist) {
      pax->istart = LIST_LAST(&pax->ilist);
      break;
    }

    // If we skipped over an instance number because we were missing an
    // instance, set pax->istart to the last instance before the hole.
    if (it->pi_hdr.ph_inum != pax->ihole) {
      pax->istart = LIST_PREV(it, pi_le);
      break;
    }

    // If we found an uncommitted or uncached instance, set pax->istart to it.
    if (!it->pi_committed || !it->pi_cached) {
      pax->istart = it;
      break;
    }

    // By our invariant, since we are past our original hole, no instance
    // should be learned.
    assert(!it->pi_learned);

    // Grab its associated request.  This is guaranteed to exist because we
    // have checked that pi_cached holds.
    req = NULL;
    if (request_needs_cached(it->pi_val.pv_dkind)) {
      req = request_find(&pax->rcache, it->pi_val.pv_reqid);
      assert(req != NULL);
    }

    // Learn the value.
    ERR_RET(r, paxos_learn(it, req));
  }

  return 0;
}
static void
rbt_insert(enum AOFFSortOrder order, AOFF_RBTree_t** root, AOFF_RBTree_t* blk)
{
    Uint blk_sz = AOFF_BLK_SZ(blk);

#ifdef DEBUG
    blk->flags  = (order == FF_BF) ? IS_BF_FLG : 0;
#else
    blk->flags  = 0; 
#endif
    blk->left	= NULL;
    blk->right	= NULL;
    blk->max_sz = blk_sz;

    if (!*root) {
	blk->parent = NULL;
	SET_BLACK(blk);
	*root = blk;
    }
    else {
	AOFF_RBTree_t *x = *root;
	while (1) {
	    SWord diff; 
	    if (x->max_sz < blk_sz) {
		x->max_sz = blk_sz;
	    }
	    diff = cmp_blocks(order, blk, x);
	    if (diff < 0) {
		if (!x->left) {
		    blk->parent = x;
		    x->left = blk;
		    break;
		}
		x = x->left;
	    }
	    else if (diff > 0) {
		if (!x->right) {
		    blk->parent = x;
		    x->right = blk;
		    break;
		}
		x = x->right;
	    }
	    else {
		ASSERT(order == FF_BF);
		ASSERT(blk->flags & IS_BF_FLG);			    
		ASSERT(x->flags & IS_BF_FLG);			    
		SET_LIST_ELEM(blk);
		LIST_NEXT(blk) = LIST_NEXT(x);
		LIST_PREV(blk) = x;
		if (LIST_NEXT(x))
		    LIST_PREV(LIST_NEXT(x)) = blk;
		LIST_NEXT(x) = blk;
		return;
	    }
	}

	/* Insert block into size tree */
	RBT_ASSERT(blk->parent);

	SET_RED(blk);
	if (IS_RED(blk->parent))
	    tree_insert_fixup(root, blk);
    }
    if (order == FF_BF) {
	SET_TREE_NODE(blk);
	LIST_NEXT(blk) = NULL;
    }
}
static AOFF_RBTree_t *
check_tree(Carrier_t* within_crr, enum AOFFSortOrder order, AOFF_RBTree_t* root, Uint size)
{
    AOFF_RBTree_t *res = NULL;
    Sint blacks;
    Sint curr_blacks;
    AOFF_RBTree_t *x;
    Carrier_t* crr;
    Uint depth, max_depth, node_cnt;

#ifdef PRINT_TREE
    print_tree(root);
#endif
    ASSERT((within_crr && order >= FF_AOFF) ||
           (!within_crr && order <= FF_AOFF));

    if (!root)
	return res;

    x = root;
    ASSERT(IS_BLACK(x));
    ASSERT(!x->parent);
    curr_blacks = 1;
    blacks = -1;
    depth = 1;
    max_depth = 0;
    node_cnt = 0;

    while (x) {
	if (!IS_LEFT_VISITED(x)) {
	    SET_LEFT_VISITED(x);
	    if (x->left) {
		x = x->left;
		++depth;
		if (IS_BLACK(x))
		    curr_blacks++;
		continue;
	    }
	    else {
		if (blacks < 0)
		    blacks = curr_blacks;
		ASSERT(blacks == curr_blacks);
	    }
	}

	if (!IS_RIGHT_VISITED(x)) {
	    SET_RIGHT_VISITED(x);
	    if (x->right) {
		x = x->right;
		++depth;
		if (IS_BLACK(x))
		    curr_blacks++;
		continue;
	    }
	    else {
		if (blacks < 0)
		    blacks = curr_blacks;
		ASSERT(blacks == curr_blacks);
	    }
	}

	++node_cnt;
	if (depth > max_depth)
	    max_depth = depth;

	if (within_crr) {
	    crr = FBLK_TO_MBC(&x->hdr);
	    ASSERT(crr == within_crr);
	    ASSERT((char*)x > (char*)crr);
	    ASSERT(((char*)x + AOFF_BLK_SZ(x)) <= ((char*)crr + CARRIER_SZ(crr)));

	}
	if (order == FF_BF) {
	    AOFF_RBTree_t* y = x;
	    AOFF_RBTree_t* nxt = LIST_NEXT(y);
	    ASSERT(IS_TREE_NODE(x));
	    while (nxt) {
		ASSERT(IS_LIST_ELEM(nxt));
		ASSERT(AOFF_BLK_SZ(nxt) == AOFF_BLK_SZ(x));
		ASSERT(FBLK_TO_MBC(&nxt->hdr) == within_crr);
		ASSERT(LIST_PREV(nxt) == y);
		y = nxt;
		nxt = LIST_NEXT(nxt);
	    }
	}

	if (IS_RED(x)) {
	    ASSERT(IS_BLACK(x->right));
	    ASSERT(IS_BLACK(x->left));
	}

	ASSERT(x->parent || x == root);

	if (x->left) {
	    ASSERT(x->left->parent == x);
	    ASSERT(cmp_blocks(order, x->left, x) < 0);
	    ASSERT(x->left->max_sz <= x->max_sz);	    
	}

	if (x->right) {
	    ASSERT(x->right->parent == x);
	    ASSERT(cmp_blocks(order, x->right, x) > 0);
	    ASSERT(x->right->max_sz <= x->max_sz);	    
	}
	ASSERT(x->max_sz >= AOFF_BLK_SZ(x));
	ASSERT(x->max_sz == AOFF_BLK_SZ(x)
	       || x->max_sz == (x->left ? x->left->max_sz : 0)
	       || x->max_sz == (x->right ? x->right->max_sz : 0));

	if (size && AOFF_BLK_SZ(x) >= size) {
	    if (!res || cmp_blocks(order, x, res) < 0) {
		res = x;
	    }
	}

	UNSET_LEFT_VISITED(x);
	UNSET_RIGHT_VISITED(x);
	if (IS_BLACK(x))
	    curr_blacks--;
	x = x->parent;
	--depth;
    }
    ASSERT(depth == 0 || (!root && depth==1)); 
    ASSERT(curr_blacks == 0);
    ASSERT((1 << (max_depth/2)) <= node_cnt);

    UNSET_LEFT_VISITED(root);
    UNSET_RIGHT_VISITED(root);

    return res;

}