コード例 #1
0
ファイル: SGFNode.cpp プロジェクト: julianchurchill/Melkor
/** @brief Print the property information for this node. Recursively look at
  * children also, but only the primary child - siblings are not looked at. */
void SGFNode::printInfo() const
{
	LogWriter log;
	set<SGFProperty, less<SGFProperty> >::const_iterator citer;
	citer = properties.begin();
	while(citer!=properties.end()) {
		log.print(citer->getName());
		log.print(" = ");
		vector<string> v = citer->getValues();
		for(int i=0;i<v.size();i++) {
			log.print(v.at(i));
			log.print(" ");
		}
		log.print("\n");
		citer++;
	}

	/** @todo Print info on siblings also... */
	if(child!=NULL)
		child->printInfo();
}
コード例 #2
0
ファイル: BPN.cpp プロジェクト: julianchurchill/Melkor
void BPN::completenessTest(NNDatabase& db)
{
	LogWriter log;
	string message;
	char buffer[50];

	log.println("Running training completeness test.");

	vector<Matrix<float> >& input = db.getTrainingInput();
	vector<Matrix<float> >& output = db.getTrainingOutput();
	int correct = 0;
	for(int i=0;i<input.size();i++)
	{
		getAnswer(input[i]);
		if(outputs[outputs.size()-1]==output[i])
			correct++;
		//delete bpnOutput;
		if(i%100==0)
		{
			log.print(".");
		}
	}

	log.print("\n");
	sprintf(buffer, "%d", correct);
	message+=buffer;
	message+= " correct out of ";
	sprintf(buffer, "%d", input.size());
	message+=buffer;
	log.println(message);
	int percent = ((float)correct/(float)input.size())*(float)100;
	sprintf(buffer, "%d", percent);
	message+=buffer;
	message+="% correct";
	log.println(message);
	log.println("Completeness test finished.");
}
コード例 #3
0
/** Extract all training pairs from an SGF file using the given
SGFReader object and add them to the given NNDatabase.
@params sgf An SGFReader object to use to read the file it 
represents.
@params database The NNDatabase to store the training pairs in. */
void Urgency3BPNGoTrainer::extractTrainingPairs(SGFReader* sgf, NNDatabase* database,
			int movesFrom /*=0*/, int movesTo /*=0*/, int lookahead /*=0*/, bool quiet /*=false*/) const
{
	// at each step in sgf file, score current move with current urgency net
	// then score next move of same colour
	// if first move doesn't score at least 0.2 higher than second
	// add as training pairs:
	// 1. First move output should be second move score +0.2 (0.9 max)
	// 2. Second move output should be first move score -0.2 (0.1 min)

	LogWriter log;
	string message;
	//char buffer[50];

	if(movesFrom > movesTo)
	{
		log.println("Start move is greater than end move.");
		return;
	}

	vector<Move> moves;
	sgf->getTree().getAllPrimaryMoves(moves);
	if(moves.size()==0)
		return;

	// if file already in the database skip it
	if(!database->addSignature(sgf->getSignature()))
	{
		log.println("File already in database.");
		return;
	}

	int size = 19;
	string v;
	if(sgf->getBoardSize(v))
		size = atoi(v.c_str());
	if(size>19 || size<5) 
	{
		message = "Boardsize too small or large to handle: ";
		message+=v;
		log.println(message);
		return;
	}
	BoardStruct board(size);
	
	vector<Move> futureMoves;
	int moveNumber=0;
	
	string c;
	Matrix<float> currentInput(1, goAdapter->getBPN().getWeights()[0].getHeight());
	Matrix<float> nextInput(1, goAdapter->getBPN().getWeights()[0].getHeight());
	Matrix<float> output(1, 1);
	Matrix<float>* answers;
//	vector<Matrix<float> > temp(goAdapter->getBPN().getNumberOfLayers());
	vector<vector<float> > tv;
	tv.resize(1);
	tv[0].resize(1);
	// colour of new move
	int colour;
	
	//sgf->initBoard(board);
	board.clear();
	// add setup points
	setupBoardFromSGF(*sgf, board);
/*	vector<Move> props;
	if(sgf->getRootNode()->getEmptySetup(props))
	{
		for(int i=0;i<props.size();i++)
			board.setPoint(props[i].getX(), props[i].getY(), EMPTY, false);
	}
	if(sgf->getRootNode()->getBlackSetup(props))
	{
		for(int i=0;i<props.size();i++)
			board.setPoint(props[i].getX(), props[i].getY(), BLACK, false);
	}
	if(sgf->getRootNode()->getWhiteSetup(props))
	{
		for(int i=0;i<props.size();i++)
			board.setPoint(props[i].getX(), props[i].getY(), WHITE, false);
	} */

	SGFNode* nextNode = &(sgf->getRootNode());
	bool useThisMove = true;
	
	bool currentIsMoveA = true;
	bool getInputSuccess = false;

	// continue until a null node forces the loop to break
	while (true) {
		// check bounds if necessary
		if(movesTo!=0) {
			// use ply instead of individual moves
			if(moveNumber/2 >= movesTo) break;
			else if(moveNumber/2 < movesFrom) useThisMove = false;
			else useThisMove = true;
		}

		if(movesTo==0 || useThisMove) {
			nextNode = nextNode->getChild();
			if(nextNode==NULL)
				break;
			
			// determine colour of move	
			vector<string> vs;
			if (nextNode->getProperty(SGFProperty::blackMoveTag, vs)) {
				colour = BLACK;
				c = vs[0];
			}
			else if (nextNode->getProperty(SGFProperty::whiteMoveTag, vs)) {
				colour = WHITE;
				c = vs[0];
			}
			else break;
		
			// find our next move to compare with
			// check 
			futureMoves.clear();
			// NOTE: The first move returned by getLookaheadMoves() is
			// always the current one, so we need 2 lookahead moves
			nextNode->getLookaheadMoves(2, colour, futureMoves);
			if(futureMoves.size()==2 && Move::SGFToX(c)!=-1 && Move::SGFToY(c)!=-1
				&& futureMoves[1].getX()!=-1 && futureMoves[1].getY()!=-1) {
				// At each step in sgf file, score current move with current urgency net
				// then score next move of same colour
				// if first move doesn't score at least 0.2 higher than second
				// add as training pairs:
				// 1. First move output should be second move score +0.2 (0.9 max)
				// 2. Second move output should be first move score -0.2 (0.1 min)

				// get current move score
				goAdapter->getInput(Move::SGFToX(c), Move::SGFToY(c), board, currentInput, colour);
				goAdapter->getBPN().getAnswer(currentInput);
				answers = &(goAdapter->getBPN().getOutputs()[goAdapter->getBPN().getNumberOfLayers()-1]);
				float currentMoveScore = answers->getValue(0, 0);

				// get next move score
				goAdapter->getInput(futureMoves[1].getX(), futureMoves[1].getY(), board, nextInput, colour);
				goAdapter->getBPN().getAnswer(nextInput);
				answers = &(goAdapter->getBPN().getOutputs()[goAdapter->getBPN().getNumberOfLayers()-1]);
				float nextMoveScore = answers->getValue(0, 0);

				// if current scores higher than next move by 0.2
				// don't train otherwise do train
				if(currentMoveScore<=(nextMoveScore+0.2)) {
					if((nextMoveScore+0.2)>0.9)
						tv[0][0] = 0.9f;
					else
						tv[0][0] = nextMoveScore+0.2;
					output.setValues(tv);
					database->addTrainingPair(&currentInput, &output);

					if((currentMoveScore-0.2)<0.1)
						tv[0][0] = 0.1f;
					else
						tv[0][0] = currentMoveScore-0.2;
					output.setValues(tv);
					database->addTrainingPair(&nextInput, &output);
				}
			}
		} // end if(movesTo==0 || useThisMove)
		
		board.setPoint(Move::SGFToX(c), Move::SGFToY(c), colour);
		if(!quiet)
			log.print(".");
		moveNumber++;
		// save after every board position has been
		// analysed and moves extracted
		// database.save();
	} // end while(true)
	if(!quiet)
		log.print("\n");
	//database.save();
} // end extractTrainingPairs
コード例 #4
0
ファイル: BPN.cpp プロジェクト: julianchurchill/Melkor
/** Print useful information about this object. In this case,
output all the weights for each layer. */
void BPN::printInfo(bool printWeights /* = true */) const
{
	LogWriter log;
	char buffer[50];
	sprintf(buffer, "Filename: %s", filename.c_str());
	log.println(buffer);
	sprintf(buffer, "Save version: %d", saveVersion);
	log.println(buffer);
	sprintf(buffer, "Type: %s", typeToString(id));
	log.println(buffer);
	sprintf(buffer, "Learning rate: %*g", 7, learningRate);
	log.println(buffer);
	sprintf(buffer, "Momentum: %*g", 7, momentum);
	log.println(buffer);
	sprintf(buffer, "Epochs completed: %d", epochsCompleted);
	log.println(buffer);
	sprintf(buffer, "Patterns completed: %g", patternsCompleted);
	log.println(buffer);
	sprintf(buffer, "Last pattern test: %g", lastPatternTest);
	log.println(buffer);
	if(dynamicLearningRate)
		sprintf(buffer, "Dynamic learning rate: True");
	else
		sprintf(buffer, "Dynamic learning rate: False");
	log.println(buffer);
	if(dynamicMomentum)
		sprintf(buffer, "Dynamic momentum: True");
	else
		sprintf(buffer, "Dynamic momentum: False");
	log.println(buffer);

	if(inputFieldShape==IFS_SQUARE)
		log.println("Input field shape: IFS_SQUARE");
	else if(inputFieldShape==IFS_DIAMOND)
		log.println("Input field shape: IFS_DIAMOND");
	else
		log.println("Input field shape: UNKNOWN");

	// output neurons info
	sprintf(buffer, "Input neurons: %d", weights[0].getHeight());
	log.println(buffer);
	for(int i=1;i<weights.size();i++)
	{
		sprintf(buffer, "Hidden neurons(%d): %d", i-1, weights[i].getHeight());
		log.println(buffer);
	}
	sprintf(buffer, "Output neurons: %d", weights[weights.size()-1].getWidth());
	log.println(buffer);

	if(printWeights)
	{
		for(int i=0;i<weights.size();i++)
		{
			sprintf(buffer, "Weights for layer %d:", i);
			log.println(buffer);
			getWeights()[i].printInfo();
		}
		for(i=0;i<biasWeights.size();i++)
		{
			sprintf(buffer, "Bias weights for layer %d:", i);
			log.println(buffer);
			biasWeights[i].printInfo();
		}
	}

	// print rank test values
//	if(printTestResults)
//	{
//		log.print("Rank test values: ");
//		for(i=0;i<rankTestValue.size();i++)
//		{
//			sprintf(buffer, "%d", rankTestEpoch[i]);
//			message+=buffer;
//			message+="[";
//			sprintf(buffer, "%*g", 9, rankTestValue[i]);
//			message+=buffer;
//			message+="] ";
//			log.print(message);
//		}
//	}
	log.print("\n");
}