void BJPlayer::reset(const BJShoe & shoe, BJRules & rules,
                     BJStrategy & strategy, BJProgress & progress) {

// Forget about any cards already dealt from the shoe, so shoe.reset(hand) will
// work.

    this->shoe = shoe;
    numHands = 0;
    for (int card = 1; card <= 10; card++) {
        playerHands[numHands].cards[card - 1] =
        playerHands[numHands].hitHand[card - 1] = 0;
        this->shoe.setTotalCards(card,shoe.getCards(card));
    }

// Remember resplit rules when enumerating player hands.

    for (int pairCard = 1; pairCard <= 10; pairCard++) {
        resplit[pairCard - 1] = rules.getResplit(pairCard);
    }

// Enumerate all possible player hands.

    currentHand.reset();
    countHands(numHands++, 1);
    linkHands();

// Compute dealer probabilities for each hand.  This takes the most time, so
// keep the caller updated on the progress.

    computeDealer(rules, progress);

// Compute expected values for standing, doubling down, and hitting (in that
// order, so all required values will be available when needed).

    linkHandCounts();
    computeStand();
    computeDoubleDown();
    computeHit(rules, strategy);

// Compute expected values for splitting pairs.  Re-link original hands by
// count for future use.

    computeSplit(rules, strategy);
    linkHandCounts();

// Blackjack pays 3:2, so correct the value for standing on this hand.  We wait
// to do this until after computing E(split) since a blackjack after splitting
// a pair only pays even money.

    correctStandBlackjack();

// Compute overall expected values, condition individual hands on no dealer
// blackjack, and finalize progress indicator.

    computeOverall(rules, strategy);
    conditionNoBlackjack();
    progress.indicate(100);
}
Ejemplo n.º 2
0
bool DecisionTreeClusterNode::computeBestSplitBestRandomSplit( const UINT &numSplittingSteps, const ClassificationData &trainingData, const Vector< UINT > &features, const Vector< UINT > &classLabels, UINT &featureIndex, Float &minError ){
    return computeSplit( numSplittingSteps, trainingData, features, classLabels, featureIndex, minError);
}