Ejemplo n.º 1
0
FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut(unsigned int num_layers, ...)
{
	struct fann *ann;
	int i;
	va_list layer_sizes;
	unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));

	if(layers == NULL)
	{
		fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
		return NULL;
	}


	va_start(layer_sizes, num_layers);
	for(i = 0; i < (int) num_layers; i++)
	{
		layers[i] = va_arg(layer_sizes, unsigned int);
	}
	va_end(layer_sizes);

	ann = fann_create_shortcut_array(num_layers, layers);

	free(layers);

	return ann;
}
Ejemplo n.º 2
0
bool ViFann::setStructure(const Type &type, const QList<int> &neurons, const qreal &connectionRate)
{
	#ifdef GPU
		if(type != Standard)
		{
			LOG("The GPU version of FANN currently doesn't support shortcut, sparse or cascade networks.", QtFatalMsg);
			exit(-1);
		}
	#endif
	clear();

	mType = type;
	mInputCount = neurons.first();
	mOutputCount = neurons.last();
	mNeurons.clear();
	for(mI = 0; mI < neurons.size(); ++mI)
	{
		if(neurons[mI] != 0) mNeurons.append(neurons[mI]);
	}

	if(mInput == NULL) delete [] mInput;
	if(mOutput == NULL) delete [] mOutput;
	mInput = new float[mInputCount];
	mOutput = new float[mOutputCount];

	unsigned int layers = mNeurons.size();
	unsigned int layerNeurons[layers];
	for(mI = 0; mI < layers; ++mI) layerNeurons[mI] = mNeurons[mI];

	if(type == Standard) mNetwork = fann_create_standard_array(layers, layerNeurons);
	#ifndef GPU
		else if(type == Sparse)
		{
			mNetwork = fann_create_sparse_array(connectionRate, layers, layerNeurons);
			mConnectionRate = connectionRate;
		}
		else if(type == Shortcut) mNetwork = fann_create_shortcut_array(layers, layerNeurons);
	#endif
	else return false;

	fann_set_train_stop_function(mNetwork, FANN_STOPFUNC_MSE);

	if(ENABLE_CALLBACK)
	{
		fann_set_callback(mNetwork, &ViFann::trainCallback);
		mMseTotal.clear();
		mMseCount = 0;
	}

	return true;
}
Ejemplo n.º 3
0
int sci_fann_create(char * fname)
{
  int * pi_command_addr = NULL;
  int m_layers,  n_layers,  * pi_layers_addr = NULL;
  int * pi_conn_addr = NULL;
  char * Command = NULL;
  double * layers = NULL, conn = 0.0;
  unsigned int * ui_layers = NULL;
  int res, numLayers, i;
  struct fann * result_ann = NULL;
  SciErr _sciErr;

  if (Rhs<2)
    {
      Scierror(999,"%s usage: ann = %s(command,[layers ...])", fname, fname);
      return 0;
    }

  _sciErr = getVarAddressFromPosition(pvApiCtx, 1, &pi_command_addr);
  if (_sciErr.iErr)
    {
      printError(&_sciErr, 0);
      return 0;
    }
  getAllocatedSingleString(pvApiCtx,  pi_command_addr, &Command);

  _sciErr = getVarAddressFromPosition(pvApiCtx, 2, &pi_layers_addr);
  if (_sciErr.iErr)
    {
      printError(&_sciErr, 0);
      return 0;
    }
  _sciErr = getMatrixOfDouble(pvApiCtx, pi_layers_addr, &m_layers, &n_layers, &layers);

  if ((n_layers != 1) & (m_layers !=1))
    {
      Scierror(999,"%s: Layers must be a vector!",fname);
      return 0;
    }
  
  numLayers = m_layers * n_layers;
  ui_layers = (unsigned int *)MALLOC(numLayers*sizeof(unsigned int));
  for(i=0; i<numLayers; i++) ui_layers[i] = layers[i];

  if (strcmp(Command,"standard") == 0)
    {
      freeAllocatedSingleString(Command);

      // fann_create_standard_array  Just like fann_create_standard, but with an array of layer sizes instead of individual parameters.
      result_ann = fann_create_standard_array(numLayers,ui_layers);
      FREE(ui_layers);
      if (result_ann==NULL)
	{
	  Scierror(999,"%s: not able to create standard network\n",fname);
	  return 0;
	}
    }
  
  if (strcmp(Command,"sparse") == 0)
    {
      freeAllocatedSingleString(Command);

      // fann_create_sparse_array    Just like fann_create_sparse, but with an array of layer sizes instead of individual parameters.
      _sciErr = getVarAddressFromPosition(pvApiCtx, 3, &pi_conn_addr);
      if (_sciErr.iErr)
	{
	  printError(&_sciErr, 0);
	  return 0;
	}
      getScalarDouble(pvApiCtx, pi_conn_addr, &conn);

      result_ann = fann_create_sparse_array(conn,numLayers,ui_layers);
      FREE(ui_layers);
      if (result_ann==NULL)
	{
	  Scierror(999,"%s: not able to create sparse network\n",fname);
	  return 0;
	}
    }

  if (strcmp(Command,"shortcut") == 0)
    {
      freeAllocatedSingleString(Command);

      // fann_create_shortcut_array  Just like fann_create_shortcut, but with an array of layer sizes instead of individual parameters.
      result_ann = fann_create_shortcut_array(numLayers,ui_layers);
      FREE(ui_layers);
      if (result_ann==NULL)
	{
	  Scierror(999,"%s: not able to create shortcut network\n",fname);
	  return 0;
	}
    }

  //Create the struct representing this ann in scilab
  res = createScilabFannStructFromCFannStruct(result_ann, Rhs + 1);
  if (res==-1) return 0;

  LhsVar(1) = Rhs + 1;

  return 0;
}