示例#1
0
	/**
	 * @brief Creates a trained proxy to a FFNet model
	 *
	 * @param data training data
	 *
	 * @return proxy to FFNet model
	 */
	shark::ArgMaxConverter<shark::FFNet<shark::LogisticNeuron, shark::LinearNeuron>> 
	createFFNetModel(shark::ClassificationDataset& data) {
		using namespace shark;
		using namespace std;

		FFNet<LogisticNeuron, LinearNeuron> model;
		vector<size_t> layers = {inputDimension(data), 10, numberOfClasses(data)};
		model.setStructure(layers, FFNetStructures::Full, true);	
		initRandomUniform(model,-0.1,0.1);

		CrossEntropy loss;

		ErrorFunction<RealVector, unsigned int> errorFunction(data, &model, &loss);
		IRpropPlus optimizer;
		optimizer.init(errorFunction);

//		TrainingError<> stoppingCriterion(10, 1.e-5);

		// Lower precision for faster learning 
		TrainingError<> stoppingCriterion(10, 1.e-3);
		OptimizationTrainer<FFNet<LogisticNeuron, LinearNeuron>, unsigned int> trainer(&loss, &optimizer, &stoppingCriterion);
		trainer.train(model, data);

		shark::ArgMaxConverter<FFNet<LogisticNeuron, LinearNeuron>> converter;
		converter.decisionFunction() = model;
	
		return std::move(converter);		
	}
int main(){
	//get problem data
	Problem problem(1.0);
	LabeledData<RealVector,unsigned int> training = problem.generateDataset(1000);
	LabeledData<RealVector,unsigned int> test = problem.generateDataset(100);
	
	std::size_t inputs=inputDimension(training);
	std::size_t outputs = numberOfClasses(training);
	std::size_t hiddens = 10;
	unsigned numberOfSteps = 1000;

	//create network and initialize weights random uniform
	FFNet<LogisticNeuron,LinearNeuron> network;
	network.setStructure(inputs,hiddens,outputs);
	initRandomUniform(network,-0.1,0.1);
	
	//create error function
	CrossEntropy loss;
	ErrorFunction error(training,&network,&loss);
	
	// loss for evaluation
	// The zeroOneLoss for multiclass problems assigns the class to the highest output
	ZeroOneLoss<unsigned int, RealVector> loss01; 

	// evaluate initial network
	Data<RealVector> prediction = network(training.inputs());
	cout << "classification error before learning:\t" << loss01.eval(training.labels(), prediction) << endl;

	//initialize Rprop
	IRpropPlus optimizer;
	optimizer.init(error);
	
	for(unsigned step = 0; step != numberOfSteps; ++step) 
		optimizer.step(error);

	// evaluate solution found by training
	network.setParameterVector(optimizer.solution().point); // set weights to weights found by learning
	prediction = network(training.inputs());
	cout << "classification error after learning:\t" << loss01(training.labels(), prediction) << endl;
}