Example #1
0
int main()
{
    // Get data prepared
    double **trainingData, **testData;
    //readInData(&trainingData, &testData);
    int minTestPicNum = 100;
    readInMiniData(&trainingData, minTestPicNum);

    // Get the network
    struct SimpleNet myNet;
    int inputSize = 724;
    int layerNum = 2;
    int layerSizes[2] = {100, 10};
    initNetWork(&myNet, inputSize, layerNum, layerSizes);



    // Simple test of the forward pass of the network
    TestForwardPass(trainingData, minTestPicNum, &myNet);

    // Params for learning
    //values below are kind of hand-tuned with no math directions which need improving
    double stepFactor = 0.003, minorDiff = 0.0001; //Set the parameter for M(i,j) = M(i,j) - stepParam*(Partial Derivative)

    int maxIteration = 50; //As they always set it to 50 in Currennt

    //Start doing backprpagation
    //printf("\n%lf\n",(double)bp(&myNet, trainingData, minTestPicNum, stepFactor));
    return 0;
}
Example #2
0
void LogSender::sendMatches(list<MatchedLogRec>* matches)
{
	readSendFaild(matches);
	initNetWork();
	while(matches->size()>0)
	{
		int sfd=send(fd,&(*(matches->begin())),sizeof(MatchedLogRec),0);
		if(sfd<=0)
		{
			cout<<"send failed!"<<endl;
			break;
		}
		matches->erase(matches->begin());
	}
	close(fd);
	saveSendFaild(matches);
}