/**
		 * Recursively add a bunch of descendants to this node. TotalDescendants is how many descendants we should add before we stop.
		 *
		 * @param MakeChildrenForMe       The node to which to add children
		 * @param DescendantsLeftToMake   How many descendants we still need to make
		 * @param NestingDepth            Track the nesting depth to prevent super-deep nesting
		 *
		 * @return How many descendants we still need to make after this function ran.
		 */
		static int32 GenerateChildren( TSharedRef<FTestData> MakeChildrenForMe, int32 DescendantsLeftToMake, int32 NestingDepth )
		{
			for ( ; DescendantsLeftToMake >= 0; )
			{
				TSharedRef<FTestData> NewChild = FTestData::Make( LOCTEXT("ChildItem", "Child Item" ) );
				MakeChildrenForMe->AddChild( NewChild );
				--DescendantsLeftToMake;
				
				// Should we stop adding to this level and pop back up? Max out at 5 levels of nesting.
				const bool bAscend = BinaryProbability(5-NestingDepth);
				// Should we descend to a deeper nesting level?
				const bool bDescend = !bAscend && BinaryProbability(NestingDepth);
				
				if ( bAscend )
				{
					// We're done on this level; go up
					return DescendantsLeftToMake;
				}
				else if ( bDescend )
				{
					// Descend further
					DescendantsLeftToMake = GenerateChildren( NewChild, DescendantsLeftToMake, NestingDepth+1 );	
				}
				else
				{
					// Continue adding on this level
				}
				
			}
			return DescendantsLeftToMake;
		}
FPropertyTableRow::FPropertyTableRow( const TSharedRef< class IPropertyTable >& InTable, const TSharedRef< FPropertyPath >& InPropertyPath, const TSharedRef< FPropertyPath >& InPartialPropertyPath )
	: DataSource( MakeShareable( new PropertyPathDataSource( InPropertyPath ) ) )
	, Table( InTable )
	, Children()
	, PartialPath( InPartialPropertyPath )
{
	GenerateChildren();
}
FPropertyTableRow::FPropertyTableRow( const TSharedRef< class IPropertyTable >& InTable, const TWeakObjectPtr< UObject >& InObject, const TSharedRef< FPropertyPath >& InPartialPropertyPath )
	: DataSource( MakeShareable( new UObjectDataSource( InObject ) ) )
	, Table( InTable )
	, Children()
	, PartialPath( InPartialPropertyPath )
{
	GenerateChildren();
}
void FDetailItemNode::Initialize()
{
	if( ( Customization.HasCustomWidget() && Customization.WidgetDecl->VisibilityAttr.IsBound() )
		|| ( Customization.HasCustomBuilder() && Customization.CustomBuilderRow->RequiresTick() )
		|| ( Customization.HasPropertyNode() && Customization.PropertyRow->RequiresTick() )
		|| ( Customization.HasGroup() && Customization.DetailGroup->RequiresTick() ) )
	{
		// The node needs to be ticked because it has widgets that can dynamically come and go
		bTickable = true;
		ParentCategory.Pin()->AddTickableNode( *this );
	}

	if( Customization.HasPropertyNode() )
	{
		InitPropertyEditor();
	}
	else if( Customization.HasCustomBuilder() )
	{
		InitCustomBuilder();
	}
	else if( Customization.HasGroup() )
	{
		InitGroup();
	}

	if (Customization.PropertyRow.IsValid() && Customization.PropertyRow->GetForceAutoExpansion())
	{
		SetExpansionState(true);
	}

	// Cache the visibility of customizations that can set it
	if( Customization.HasCustomWidget() )
	{	
		CachedItemVisibility = Customization.WidgetDecl->VisibilityAttr.Get();
	}
	else if( Customization.HasPropertyNode() )
	{
		CachedItemVisibility = Customization.PropertyRow->GetPropertyVisibility();
	}
	else if( Customization.HasGroup() )
	{
		CachedItemVisibility = Customization.DetailGroup->GetGroupVisibility();
	}

	const bool bUpdateFilteredNodes = false;
	GenerateChildren( bUpdateFilteredNodes );
}
bool DfpnSolver::Validate(DfpnHashTable& hashTable, const SgBlackWhite winner,
                          SgSearchTracer& tracer)
{
    SG_ASSERT_BW(winner);
    DfpnData data;
    if (! TTRead(data))
    {
        PointSequence pv;
        StartSearch(hashTable, pv);
        const bool wasRead = TTRead(data);
        SG_DEBUG_ONLY(wasRead);
        SG_ASSERT(wasRead);
    }

    const bool orNode = (winner == GetColorToMove());
    if (orNode)
    {
        if (! data.m_bounds.IsWinning())
        {
            SgWarning() << "OR not winning. DfpnData:" << data << std::endl;
            return false;
        }
    }
    else // AND node
    {
        if (! data.m_bounds.IsLosing())
        {
            SgWarning() << "AND not losing. DfpnData:" << data << std::endl;
            return false;
        }
	}

    SgEmptyBlackWhite currentWinner;
    if (TerminalState(GetColorToMove(), currentWinner))
    {
        if (winner == currentWinner)
            return true;
        else
        {
            SgWarning() << "winner disagreement: " 
                << SgEBW(winner) << ' ' << SgEBW(currentWinner) 
                << std::endl;
            return false;
        }
    }

    std::vector<SgMove> moves;
    if (orNode)
        moves.push_back(data.m_bestMove);
    else // AND node
        GenerateChildren(moves);

    // recurse
    for (std::vector<SgMove>::const_iterator it = moves.begin();
         it != moves.end(); ++it)
    {
        tracer.AddTraceNode(*it, GetColorToMove());
        PlayMove(*it);
        if (! Validate(hashTable, winner, tracer))
            return false;
        UndoMove();
        tracer.TakeBackTraceNode();
    }
    return true;
}
size_t DfpnSolver::MID(const DfpnBounds& maxBounds, DfpnHistory& history)
{
    maxBounds.CheckConsistency();
    SG_ASSERT(maxBounds.phi > 1);
    SG_ASSERT(maxBounds.delta > 1);

    ++m_numMIDcalls;
    size_t prevWork = 0;
    SgEmptyBlackWhite colorToMove = GetColorToMove();

    DfpnData data;
    if (TTRead(data)) 
    {
        prevWork = data.m_work;
        if (! maxBounds.GreaterThan(data.m_bounds))
            // Estimated bounds are larger than we had
            // anticipated. The calling state must have computed
            // the max bounds with out of date information, so just
            // return here without doing anything: the caller will
            // now update to this new info and carry on.
            return 0;
    }
    else
    {
        SgEmptyBlackWhite winner = SG_EMPTY;
        if (TerminalState(colorToMove, winner))
        {
            ++m_numTerminal;
            DfpnBounds terminal;
            if (colorToMove == winner)
                DfpnBounds::SetToWinning(terminal);
            else
            {
                SG_ASSERT(SgOppBW(colorToMove) == winner);
                DfpnBounds::SetToLosing(terminal);
            }
            TTWrite(DfpnData(terminal, SG_NULLMOVE, 1));
            return 1;
        }
    }
    
    ++m_generateMoves;
    DfpnChildren children;
    GenerateChildren(children.Children());

    // Not thread safe: perhaps move into while loop below later...
    std::vector<DfpnData> childrenData(children.Size());
    for (size_t i = 0; i < children.Size(); ++i)
        LookupData(childrenData[i], children, i);
    // Index used for progressive widening
    size_t maxChildIndex = ComputeMaxChildIndex(childrenData);

    SgHashCode currentHash = Hash();
    SgMove bestMove = SG_NULLMOVE;
    DfpnBounds currentBounds;
    size_t localWork = 1;
    do
    {
        UpdateBounds(currentBounds, childrenData, maxChildIndex);
        if (! maxBounds.GreaterThan(currentBounds))
            break;

        // Select most proving child
        std::size_t bestIndex = 999999;
        DfpnBoundType delta2 = DfpnBounds::INFTY;
        SelectChild(bestIndex, delta2, childrenData, maxChildIndex);
        bestMove = children.MoveAt(bestIndex);

        // Compute maximum bound for child
        const DfpnBounds childBounds(childrenData[bestIndex].m_bounds);
        DfpnBounds childMaxBounds;
        childMaxBounds.phi = maxBounds.delta 
            - (currentBounds.delta - childBounds.phi);
        childMaxBounds.delta = delta2 == DfpnBounds::INFTY ? maxBounds.phi :
            std::min(maxBounds.phi,
                     std::max(delta2 + 1, DfpnBoundType(delta2 * (1.0 + m_epsilon))));
        SG_ASSERT(childMaxBounds.GreaterThan(childBounds));
        if (delta2 != DfpnBounds::INFTY)
            m_deltaIncrease.Add(float(childMaxBounds.delta-childBounds.delta));

        // Recurse on best child
        PlayMove(bestMove);
        history.Push(bestMove, currentHash);
        localWork += MID(childMaxBounds, history);
        history.Pop();
        UndoMove();

        // Update bounds for best child
        LookupData(childrenData[bestIndex], children, bestIndex);

        // Compute some stats when find winning move
        if (childrenData[bestIndex].m_bounds.IsLosing())
        {
            m_moveOrderingIndex.Add(float(bestIndex));
            m_moveOrderingPercent.Add(float(bestIndex) 
                                      / (float)childrenData.size());
            m_totalWastedWork += prevWork + localWork
                - childrenData[bestIndex].m_work;
        }
        else if (childrenData[bestIndex].m_bounds.IsWinning())
            maxChildIndex = ComputeMaxChildIndex(childrenData);

    } while (! CheckAbort());

    // Find the most delaying move for losing states, and the smallest
    // winning move for winning states.
    if (currentBounds.IsSolved())
    {
        if (currentBounds.IsLosing())
        {
            std::size_t maxWork = 0;
            for (std::size_t i = 0; i < children.Size(); ++i)
            {
                if (childrenData[i].m_work > maxWork)
                {
                    maxWork = childrenData[i].m_work;
                    bestMove = children.MoveAt(i);
                }
            }
        }
        else
        {
            std::size_t minWork = DfpnBounds::INFTY;
            for (std::size_t i = 0; i < children.Size(); ++i)
            {
                if (childrenData[i].m_bounds.IsLosing() 
                    && childrenData[i].m_work < minWork)
                {
                    minWork = childrenData[i].m_work;
                    bestMove = children.MoveAt(i);
                }
            }
        }
    }
    
    // Store search results
    TTWrite(DfpnData(currentBounds, bestMove, localWork + prevWork));
    return localWork;
}