コード例 #1
0
/** Process the file list and extract training pairs from each file.
@params files A list of SGF files to extract patterns from.
@params db The NNDatabase to store the training pairs in. 
NOTE: The files list should be full path names and files, e.g.
"c:\Docs\Cheese\Toe.txt" */
void Urgency3BPNGoTrainer::extractPairs(vector<string>& files, 
		NNDatabase* db, int movesFrom /*=0*/, int movesTo /*=0*/, const GoRankRange* rankRange /*=NULL*/) const
{	
	LogWriter log;
	log.println("Processing file list", goAdapter->getBPN().getTypeString());
	
	string message;
	string file;

	for(int i=0;i<files.size();i++)
	{
		message = "Reading: ";
		message+=files[i];
		log.println("Reading: "+files[i], goAdapter->getBPN().getTypeString());
		//file = path;
		//file+="\\";
		//file+=files[i];
		SGFReader sgf;
		if(!sgf.load(files[i]))
		{
			log.println("SGF failed to load");
			continue;
		}
		if(rankRange==NULL || rankRangeFilterSGF(sgf, *rankRange))
			extractTrainingPairs(&sgf, db, movesFrom, movesTo);
		else
		{
			message = "* Skipping - : ";
			message+=files[i];
			log.println(message);
		}
	}
	log.println("Finished processing file ", goAdapter->getBPN().getTypeString());
}
コード例 #2
0
/** Extract training pairs from all sgf files found in the
specified directory.
@params path The directory to search for SGF files.
@params db The NNDatabase to store the training pairs in. */
void Urgency3BPNGoTrainer::extractPairsFromDirectory(string path, NNDatabase* db, const GoRankRange* rankRange /*=NULL*/) const
{
	LogWriter log;
	char buffer[200];
	if(_getcwd(buffer, 200)==NULL)
	{
		log.println("Error with _getcwd");
		return;
	}
	if(_chdir(path.c_str())==-1)
	{
		log.println("Error changing directory");
		return;
	}

	vector<string> files;
	getFileList(files, "*.mgt");
	getFileList(files, "*.sgf");


	if(files.size()<=0)
	{
		string message = "No appropriate files found in ";
		message+=path;
		log.println(message);
		// change back to original directory
		_chdir(buffer);
		return;
	}
	
	extractPairs(files, db, 0, 0, rankRange);
	// change back to original directory
	_chdir(buffer);
}
コード例 #3
0
ファイル: BPN.cpp プロジェクト: julianchurchill/Melkor
void BPN::completenessTest(NNDatabase& db)
{
	LogWriter log;
	string message;
	char buffer[50];

	log.println("Running training completeness test.");

	vector<Matrix<float> >& input = db.getTrainingInput();
	vector<Matrix<float> >& output = db.getTrainingOutput();
	int correct = 0;
	for(int i=0;i<input.size();i++)
	{
		getAnswer(input[i]);
		if(outputs[outputs.size()-1]==output[i])
			correct++;
		//delete bpnOutput;
		if(i%100==0)
		{
			log.print(".");
		}
	}

	log.print("\n");
	sprintf(buffer, "%d", correct);
	message+=buffer;
	message+= " correct out of ";
	sprintf(buffer, "%d", input.size());
	message+=buffer;
	log.println(message);
	int percent = ((float)correct/(float)input.size())*(float)100;
	sprintf(buffer, "%d", percent);
	message+=buffer;
	message+="% correct";
	log.println(message);
	log.println("Completeness test finished.");
}
コード例 #4
0
/** Extract all training pairs from an SGF file using the given
SGFReader object and add them to the given NNDatabase.
@params sgf An SGFReader object to use to read the file it 
represents.
@params database The NNDatabase to store the training pairs in. */
void Urgency3BPNGoTrainer::extractTrainingPairs(SGFReader* sgf, NNDatabase* database,
			int movesFrom /*=0*/, int movesTo /*=0*/, int lookahead /*=0*/, bool quiet /*=false*/) const
{
	// at each step in sgf file, score current move with current urgency net
	// then score next move of same colour
	// if first move doesn't score at least 0.2 higher than second
	// add as training pairs:
	// 1. First move output should be second move score +0.2 (0.9 max)
	// 2. Second move output should be first move score -0.2 (0.1 min)

	LogWriter log;
	string message;
	//char buffer[50];

	if(movesFrom > movesTo)
	{
		log.println("Start move is greater than end move.");
		return;
	}

	vector<Move> moves;
	sgf->getTree().getAllPrimaryMoves(moves);
	if(moves.size()==0)
		return;

	// if file already in the database skip it
	if(!database->addSignature(sgf->getSignature()))
	{
		log.println("File already in database.");
		return;
	}

	int size = 19;
	string v;
	if(sgf->getBoardSize(v))
		size = atoi(v.c_str());
	if(size>19 || size<5) 
	{
		message = "Boardsize too small or large to handle: ";
		message+=v;
		log.println(message);
		return;
	}
	BoardStruct board(size);
	
	vector<Move> futureMoves;
	int moveNumber=0;
	
	string c;
	Matrix<float> currentInput(1, goAdapter->getBPN().getWeights()[0].getHeight());
	Matrix<float> nextInput(1, goAdapter->getBPN().getWeights()[0].getHeight());
	Matrix<float> output(1, 1);
	Matrix<float>* answers;
//	vector<Matrix<float> > temp(goAdapter->getBPN().getNumberOfLayers());
	vector<vector<float> > tv;
	tv.resize(1);
	tv[0].resize(1);
	// colour of new move
	int colour;
	
	//sgf->initBoard(board);
	board.clear();
	// add setup points
	setupBoardFromSGF(*sgf, board);
/*	vector<Move> props;
	if(sgf->getRootNode()->getEmptySetup(props))
	{
		for(int i=0;i<props.size();i++)
			board.setPoint(props[i].getX(), props[i].getY(), EMPTY, false);
	}
	if(sgf->getRootNode()->getBlackSetup(props))
	{
		for(int i=0;i<props.size();i++)
			board.setPoint(props[i].getX(), props[i].getY(), BLACK, false);
	}
	if(sgf->getRootNode()->getWhiteSetup(props))
	{
		for(int i=0;i<props.size();i++)
			board.setPoint(props[i].getX(), props[i].getY(), WHITE, false);
	} */

	SGFNode* nextNode = &(sgf->getRootNode());
	bool useThisMove = true;
	
	bool currentIsMoveA = true;
	bool getInputSuccess = false;

	// continue until a null node forces the loop to break
	while (true) {
		// check bounds if necessary
		if(movesTo!=0) {
			// use ply instead of individual moves
			if(moveNumber/2 >= movesTo) break;
			else if(moveNumber/2 < movesFrom) useThisMove = false;
			else useThisMove = true;
		}

		if(movesTo==0 || useThisMove) {
			nextNode = nextNode->getChild();
			if(nextNode==NULL)
				break;
			
			// determine colour of move	
			vector<string> vs;
			if (nextNode->getProperty(SGFProperty::blackMoveTag, vs)) {
				colour = BLACK;
				c = vs[0];
			}
			else if (nextNode->getProperty(SGFProperty::whiteMoveTag, vs)) {
				colour = WHITE;
				c = vs[0];
			}
			else break;
		
			// find our next move to compare with
			// check 
			futureMoves.clear();
			// NOTE: The first move returned by getLookaheadMoves() is
			// always the current one, so we need 2 lookahead moves
			nextNode->getLookaheadMoves(2, colour, futureMoves);
			if(futureMoves.size()==2 && Move::SGFToX(c)!=-1 && Move::SGFToY(c)!=-1
				&& futureMoves[1].getX()!=-1 && futureMoves[1].getY()!=-1) {
				// At each step in sgf file, score current move with current urgency net
				// then score next move of same colour
				// if first move doesn't score at least 0.2 higher than second
				// add as training pairs:
				// 1. First move output should be second move score +0.2 (0.9 max)
				// 2. Second move output should be first move score -0.2 (0.1 min)

				// get current move score
				goAdapter->getInput(Move::SGFToX(c), Move::SGFToY(c), board, currentInput, colour);
				goAdapter->getBPN().getAnswer(currentInput);
				answers = &(goAdapter->getBPN().getOutputs()[goAdapter->getBPN().getNumberOfLayers()-1]);
				float currentMoveScore = answers->getValue(0, 0);

				// get next move score
				goAdapter->getInput(futureMoves[1].getX(), futureMoves[1].getY(), board, nextInput, colour);
				goAdapter->getBPN().getAnswer(nextInput);
				answers = &(goAdapter->getBPN().getOutputs()[goAdapter->getBPN().getNumberOfLayers()-1]);
				float nextMoveScore = answers->getValue(0, 0);

				// if current scores higher than next move by 0.2
				// don't train otherwise do train
				if(currentMoveScore<=(nextMoveScore+0.2)) {
					if((nextMoveScore+0.2)>0.9)
						tv[0][0] = 0.9f;
					else
						tv[0][0] = nextMoveScore+0.2;
					output.setValues(tv);
					database->addTrainingPair(&currentInput, &output);

					if((currentMoveScore-0.2)<0.1)
						tv[0][0] = 0.1f;
					else
						tv[0][0] = currentMoveScore-0.2;
					output.setValues(tv);
					database->addTrainingPair(&nextInput, &output);
				}
			}
		} // end if(movesTo==0 || useThisMove)
		
		board.setPoint(Move::SGFToX(c), Move::SGFToY(c), colour);
		if(!quiet)
			log.print(".");
		moveNumber++;
		// save after every board position has been
		// analysed and moves extracted
		// database.save();
	} // end while(true)
	if(!quiet)
		log.print("\n");
	//database.save();
} // end extractTrainingPairs
コード例 #5
0
ファイル: BPN.cpp プロジェクト: julianchurchill/Melkor
/** Load a BPN network from the specified file.
Save version differences:
Version 0: Includes saveVersion, type, learningRate, momentum, epochsCompleted and weights.
Version 1: Adds lastPatternTest, patternsCompleted.
Version 2: Adds dynamicLearningRate, dynamicMomentum.
Version 3: Adds inputFieldShape.
@param f The filename to read this BPN network from. 
@param quiet An optional parameter to select level of text output, default is false.
@see BPN::save() */
bool BPN::load(string f, bool quiet /*=false*/)
{
	LogWriter log;
	string message = "Loading ";
	message+=f;
	if(!quiet)
		log.println(message);

	ifstream in(f.c_str(), ios::binary);

	if(!in)
	{
		message = "BPN: Could not load file: ";
		message+=f;
		LogWriter::printerr(message+"\n");
		return false;
	}

	this->filename = f;

	// read save version
	in.read(reinterpret_cast<char*>(&saveVersion), sizeof(int));
	if(in.fail()) 
	{
		LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
		return false;
	}

	// read type
	in.read(reinterpret_cast<char*>(&id), sizeof(int));
	if(in.fail()) 
	{
		LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
		return false;
	}


	// load learning rate
	in.read(reinterpret_cast<char*>(&learningRate), sizeof(float));
	if(in.fail()) 
	{
		LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
		return false;
	}

	// load momentum
	in.read(reinterpret_cast<char*>(&momentum), sizeof(float));
	if(in.fail()) 
	{
		LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
		return false;
	}

	// load the number of epochs this net has completed
	in.read(reinterpret_cast<char*>(&epochsCompleted), sizeof(int));
	if(in.fail()) 
	{
		LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
		return false;
	}

	// load number of layers

	// load weights as matrices including top row of each for bias weights
	int it = 0;
	in.read(reinterpret_cast<char*>(&it), sizeof(int));
	if(in.fail()) 
	{
		LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
		return false;
	}

	activationFunction.clear();
	weights.resize(it);
	biasWeights.resize(it);
	errors.resize(it);
	outputs.resize(it+1);
//	net.resize(it);
	// load layer sizes - width, height
	Matrix<float>* temp;
	for(int l=0;l<it;l++)
	{
		temp = Matrix<float>::load(in);
		if(temp==NULL)
		{
			LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
			return false;
		}

		//weights[l] = *temp;
		biasWeights[l].resize(temp->getWidth(), 1);
		weights[l].resize(temp->getWidth(), temp->getHeight()-1);
		for(int y=0;y<temp->getHeight();y++)
		{
			for(int x=0;x<temp->getWidth();x++)
			{
				// if this is the bias weight row
				if(y==0)
					biasWeights[l].setValue(x, 0, temp->getValue(x, 0));
				else
					weights[l].setValue(x, y-1, temp->getValue(x, y));
			}
		}
		delete temp;
		temp = NULL;
		activationFunction.push_back(SIGMOID);
	}

	weightsSize = weights.size();

	// load lastPatternTest and patternsCompleted
	if(saveVersion>0)
	{
		in.read(reinterpret_cast<char*>(&lastPatternTest), sizeof(double));
		if(in.fail()) 
		{
			LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
			return false;
		}
		in.read(reinterpret_cast<char*>(&patternsCompleted), sizeof(double));
		if(in.fail()) 
		{
			LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
			return false;
		}
		dynamicLearningRate = false;
		dynamicMomentum = false;
	}

	if(saveVersion>1)
	{
		in.read(reinterpret_cast<char*>(&dynamicLearningRate), sizeof(bool));
		if(in.fail()) 
		{
			LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
			return false;
		}
		in.read(reinterpret_cast<char*>(&dynamicMomentum), sizeof(bool));
		if(in.fail()) 
		{
			LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
			return false;
		}
	}

	if(saveVersion>2)
	{
		in.read(reinterpret_cast<char*>(&inputFieldShape), sizeof(int));
		if(in.fail()) 
		{
			LogWriter::printerr("Corrupt or incorrect version of .bpn file, aborting...\n", typeToString(id));
			return false;
		}
	}

	in.close();

	resetWeightChanges();
	
	return true;
}
コード例 #6
0
ファイル: BPN.cpp プロジェクト: julianchurchill/Melkor
/** Print useful information about this object. In this case,
output all the weights for each layer. */
void BPN::printInfo(bool printWeights /* = true */) const
{
	LogWriter log;
	char buffer[50];
	sprintf(buffer, "Filename: %s", filename.c_str());
	log.println(buffer);
	sprintf(buffer, "Save version: %d", saveVersion);
	log.println(buffer);
	sprintf(buffer, "Type: %s", typeToString(id));
	log.println(buffer);
	sprintf(buffer, "Learning rate: %*g", 7, learningRate);
	log.println(buffer);
	sprintf(buffer, "Momentum: %*g", 7, momentum);
	log.println(buffer);
	sprintf(buffer, "Epochs completed: %d", epochsCompleted);
	log.println(buffer);
	sprintf(buffer, "Patterns completed: %g", patternsCompleted);
	log.println(buffer);
	sprintf(buffer, "Last pattern test: %g", lastPatternTest);
	log.println(buffer);
	if(dynamicLearningRate)
		sprintf(buffer, "Dynamic learning rate: True");
	else
		sprintf(buffer, "Dynamic learning rate: False");
	log.println(buffer);
	if(dynamicMomentum)
		sprintf(buffer, "Dynamic momentum: True");
	else
		sprintf(buffer, "Dynamic momentum: False");
	log.println(buffer);

	if(inputFieldShape==IFS_SQUARE)
		log.println("Input field shape: IFS_SQUARE");
	else if(inputFieldShape==IFS_DIAMOND)
		log.println("Input field shape: IFS_DIAMOND");
	else
		log.println("Input field shape: UNKNOWN");

	// output neurons info
	sprintf(buffer, "Input neurons: %d", weights[0].getHeight());
	log.println(buffer);
	for(int i=1;i<weights.size();i++)
	{
		sprintf(buffer, "Hidden neurons(%d): %d", i-1, weights[i].getHeight());
		log.println(buffer);
	}
	sprintf(buffer, "Output neurons: %d", weights[weights.size()-1].getWidth());
	log.println(buffer);

	if(printWeights)
	{
		for(int i=0;i<weights.size();i++)
		{
			sprintf(buffer, "Weights for layer %d:", i);
			log.println(buffer);
			getWeights()[i].printInfo();
		}
		for(i=0;i<biasWeights.size();i++)
		{
			sprintf(buffer, "Bias weights for layer %d:", i);
			log.println(buffer);
			biasWeights[i].printInfo();
		}
	}

	// print rank test values
//	if(printTestResults)
//	{
//		log.print("Rank test values: ");
//		for(i=0;i<rankTestValue.size();i++)
//		{
//			sprintf(buffer, "%d", rankTestEpoch[i]);
//			message+=buffer;
//			message+="[";
//			sprintf(buffer, "%*g", 9, rankTestValue[i]);
//			message+=buffer;
//			message+="] ";
//			log.print(message);
//		}
//	}
	log.print("\n");
}