示例#1
0
/*!\brief Train a network.
 * \param net Pointer to a neural network.
 *
 * Before calling this routine, net_compute() and
 * net_compute_output_error() should have been called to compute outputs
 * for given inputs and to prepare the neural network for training by
 * computing the output error. This routine performs the actual training
 * by backpropagating the output error through the layers.
 */
void
net_train (network_t *net)
{
  assert (net != NULL);

  backward_pass (net);
  adjust_weights (net);
}
示例#2
0
/*!\brief Train a network in batch mode.
 * \param net Pointer to a neural network.
 *
 * Before calling this routine, net_begin_batch() should have been
 * called (at the start of the batch) to begin batch training.
 * Furthermore, for the current input/target pair, net_compute() and
 * net_compute_output_error() should have been called to compute outputs
 * for given the inputs and to prepare the neural network for training
 * by computing the output error using the given targets. This routine
 * performs the actual training by backpropagating the output error
 * through the layers, but does not change the weights. The weights
 * will be changed when (at the end of the batch) net_end_batch() is
 * called.
 */
void
net_train_batch (network_t *net)
{
  assert (net != NULL);

  net->no_of_patterns++;
  backward_pass (net);
  adjust_deltas_batch (net);
}
示例#3
0
文件: lstm.cpp 项目: dunghand/msrds
void main()
{

	int i, j, k,
	trialnr;

    
	/* input pars */
	getpars();

	/* input training set and test set */
	getsets();

	if (maxtrials>20)
		maxtrials=20;

	if (bias1==1)
		in_mod++;
	
	if (bias2==1)
		hid_mod++;

	hi_in_mod = in_mod+hid_mod;
	cell_mod=hi_in_mod;

	for (i=0;i<num_blocks;i++)
		cell_mod+=(2+block_size[i]);

	ges_mod = cell_mod+out_mod;
	if (ges_mod>max_units)
	{
		printf("Program terminated!\n");
		printf("You have to set the constant max_units at begin\n");
		printf("of the program file greater or equal %d and then\n",ges_mod);
		printf("compile the program again.\n");
		exit(0);
	}

	srand(ran_sta);
	for (trialnr=0;trialnr<maxtrials;trialnr++)
	{


		outfile = outf[trialnr];

		weightfile = weig[trialnr];

		fp1 = fopen(outfile, "w");
		fprintf(fp1,"Trial Nr.:%.1d\n",trialnr);
		fclose(fp1);

		fp2 = fopen(weightfile, "w");
		fprintf(fp2,"Trial Nr.:%.1d\n",trialnr);
		fclose(fp2);


		initia();

		examples=0;
		epoch=0;

		maxepoch=maxepoch_init;

		stop_learn=0;
		learn = 1;

		while (learn == 1)
		{

			/* executing the environment
				and setting the input
				*/
			execute_act();

			/* forward pass */
			forward_pass();


			if (targ==1) /* only if target for this input */
			{
				/* compute error */
				for (k=cell_mod,j=0;k<ges_mod;k++,j++)
				{
					error[j]=  target_a[j] - Yk_mod_new[k];
				};
				/* Training error */
				comp_err();
			}

			/* backward pass */
			if (targ==1) /* only if target for this input */
			{
				backward_pass();
			}
			else
			{
				derivatives();
			}
	

			/* set old activations */
			for (i=0;i<ges_mod;i++)
			{
				Yk_mod_old[i] = Yk_mod_new[i];
			}


			/* update weights */
			if (weight_up==1)
			{
				weight_up=0;
				weight_update();
			}

			/* stop if maxepoch reached */
			if (epoch>maxepoch)
				learn=0;
		}

		weight_out();
		test();
	}

	exit(0);
}