Ejemplo n.º 1
0
FANN_EXTERNAL void FANN_API fann_get_bias_array(struct fann *ann, unsigned int *bias)
{
    struct fann_layer *layer_it;

    for (layer_it = ann->first_layer; layer_it != ann->last_layer; ++layer_it, ++bias) {
        switch (fann_get_network_type(ann)) {
            case FANN_NETTYPE_LAYER: {
                /* Report one bias in each layer except the last */
                if (layer_it != ann->last_layer-1)
                    *bias = 1;
                else
                    *bias = 0;
                break;
            }
            case FANN_NETTYPE_SHORTCUT: {
                /* The bias in the first layer is reused for all layers */
                if (layer_it == ann->first_layer)
                    *bias = 1;
                else
                    *bias = 0;
                break;
            }
            default: {
                /* Unknown network type, assume no bias present  */
                *bias = 0;
                break;
            }
        }
    }
}
Ejemplo n.º 2
0
bool ViFann::loadFromFile(const QString &path)
{
	QFile file(path);
	if(!file.exists()) return false;

	clear();
	mNetwork = fann_create_from_file(path.toLatin1().data());
	if(mNetwork == NULL) return false;

	fann_nettype_enum type = fann_get_network_type(mNetwork);
	//TODO: get type

	int layerCount = fann_get_num_layers(mNetwork);
	unsigned int layers[layerCount];
	fann_get_layer_array(mNetwork, layers);
	for(int i = 0; i < layerCount; ++i)
	{
		mNeurons.append(layers[i]);
	}
	mInputCount = mNeurons.first();
	mOutputCount = mNeurons.last();

	if(mInput == NULL) delete [] mInput;
	if(mOutput == NULL) delete [] mOutput;
	mInput = new float[mInputCount];
	mOutput = new float[mOutputCount];

	mConnectionRate = fann_get_connection_rate(mNetwork);

	//TODO: get weights

	return true;
}
Ejemplo n.º 3
0
FANN_EXTERNAL void FANN_API fann_get_layer_array(struct fann *ann, unsigned int *layers)
{
    struct fann_layer *layer_it;

    for (layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) {
        unsigned int count = layer_it->last_neuron - layer_it->first_neuron;
        /* Remove the bias from the count of neurons. */
        switch (fann_get_network_type(ann)) {
            case FANN_NETTYPE_LAYER: {
                --count;
                break;
            }
            case FANN_NETTYPE_SHORTCUT: {
                /* The bias in the first layer is reused for all layers */
                if (layer_it == ann->first_layer)
                    --count;
                break;
            }
            default: {
                /* Unknown network type, assume no bias present  */
                break;
            }
        }
        *layers++ = count;
    }
}