Ejemplo n.º 1
0
// Ntk Construction Functions
void
V3BvNtk::initialize() {
   assert (!_inputData.size());
   // Create Constant BV_CONST = 1'b0 for Sync with AIG_FALSE
   const V3NetId id = createNet(1); assert (!id.id);
   _inputData.back().push_back(0); createConst(id);
}
Ejemplo n.º 2
0
NeuralNet::NeuralNet() {
  	numInputs = Params::numInputs;
  	numOutputs = Params::numOutputs;
  	numHiddenLayers = Params::numHidden;
  	neuronsPerHiddenLayer = Params::neuronsPerHiddenLayer;
  	createNet();
}
Ejemplo n.º 3
0
BlisSolution * 
VrpModel::userFeasibleSolution(const double *solution, bool &userFeasible)
{
    double objValue = 0.0;
    CoinPackedVector *solVec = getSolution(solution);
    VrpSolution *vrpSol = NULL;

    int msgLevel = AlpsPar_->entry(AlpsParams::msgLevel);
    userFeasible = true;

    createNet(solVec);
    
    if (!n_->isIntegral_){
	if (msgLevel > 200) {
	    std::cout << "UserFeasible: not integral" << std::endl;
	}
        userFeasible = false;
    }
    else {
        int rcnt = n_->connected();
        int i;
        for (i = 0; i < rcnt; i++){
            if (n_->compCuts_[i+1] < 2 - etol_){
                userFeasible = false;
		if (msgLevel > 200) {
		    std::cout << "UserFeasible: not 2" << std::endl;
		}
                break;
            }
            else if (n_->compDemands_[i+1] > capacity_){
                userFeasible = false;
		if (msgLevel > 200) {
		    std::cout << "UserFeasible: greater than capacity" << std::endl;
		}
                break;
            }
        }
    }
    
    if (userFeasible) {
        // Compute obj value
        for (int k = 0; k < numCols_; ++k) {
            objValue += objCoef_[k] * solution[k];
        }
        
        // Create a VRP solution
        vrpSol = new VrpSolution(getNumCols(),
                                 solution,
                                 objValue,
				 this);
        
        // TODO: add tour
    }

    // Free memory.
    delete solVec;
    
    return vrpSol;
}
Ejemplo n.º 4
0
RaceClient::RaceClient(QString host, int port, QObject *parent)
{
    m_host = host;
    m_port = port;

    m_socket = new QTcpSocket(this);
    m_state = RaceClient::Connecting;
    createNet(false);
    emit stateChanged(m_state);

    connect(m_socket, SIGNAL(readyRead()),
            this, SLOT(readFromServer()));

    connect(m_socket, SIGNAL(connected()),
            this, SLOT(onConnected()));

    connect(m_socket, SIGNAL(error(QAbstractSocket::SocketError)),
            this, SLOT(onError(QAbstractSocket::SocketError)));
}
Ejemplo n.º 5
0
void Signatures::createSets(vector<Node*> vNodes){
	createNet(vNodes);
	createNetPred(vNodes);
	createA_Sig(vNodes);
}
Ejemplo n.º 6
0
NNet::NNet( int numberOfInputs, int numberOfOutputs, std::vector<int> layerSize )
{
	setMemory( false );

	createNet( numberOfInputs, numberOfOutputs, layerSize );
}
Ejemplo n.º 7
0
int main() {

    double inputs[MAX_NO_OF_INPUTS];
    double outputTargets[MAX_NO_OF_OUTPUTS];

    /* determine layer paramaters */
    int noOfLayers = 2; // input layer excluded
    int noOfNeurons[] = {10,1};
    int noOfInputs[] = {2,10};
    char axonFamilies[] = {'g','l'};
    double actFuncFlatnesses[] = {1,1,1};

    createNet(noOfLayers, noOfNeurons, noOfInputs, axonFamilies, actFuncFlatnesses, 1);

    /* train it using batch method */
    int i;
    double tempTotal1, tempTotal2;
    int counter = 0;
    for(i = 0; i < TRAINING_ITERATION; i++) {
        inputs[0] = getRand();
        inputs[1] = getRand();
        inputs[2] = getRand();
        inputs[3] = getRand();
        tempTotal1 = inputs[0] + inputs[1];
        tempTotal2 = inputs[2] - inputs[3];
        // tempTotal = inputs[0] + inputs[1];
        feedNetInputs(inputs);
        updateNetOutput();
        // outputTargets[0] = sin(tempTotal1)*2+0.5*exp(tempTotal2)-cos(inputs[1]+inputs[3])/2;
        outputTargets[0] = sin(tempTotal1)*2+0.5*exp(tempTotal2);
 //       outputTargets[1] = (double)cos(tempTotal);
        /* train using batch training ( don't update weights, just cumulate them ) */
        //trainNet(0, 0, 1, outputTargets);
        trainNet(0, 0, 1, outputTargets);
        counter++;
        /* apply batch changes after 100 loops use .8 learning rate and .8 momentum */
        if(counter == 100) { applyBatchCumulations(.3,.3); counter = 0;}  //!~~~swd: should be within (0,1)
    }

    /* test it */
    double *outputs;
    double target_out[50];
    double actual_out[50];
    printf("Sin Target \t Output \n");
    printf("---------- \t -------- \t ---------- \t --------\n");
    for(i = 0; i < 50; i++) {
        inputs[0] = getRand();
        inputs[1] = getRand();
        inputs[2] = getRand();
        inputs[3] = getRand();
        tempTotal1 = inputs[0] + inputs[1];
        tempTotal2 = inputs[2] - inputs[3];

	target_out[i] = sin(tempTotal1)*2+0.5*exp(tempTotal2);
    // target_out[i] = sin(tempTotal1)*2+0.5*exp(tempTotal2)-cos(inputs[1]+inputs[3])/2;

        feedNetInputs(inputs);
        updateNetOutput();
        outputs = getOutputs();

	actual_out[i] = outputs[0];
	

        printf( "%f \t %f \n", target_out[i], actual_out[i]);
    }
    
    float Rsquared_ans=Rsquared(target_out, actual_out, 50);
    printf("Result Summary: \n");
    printf("The Rsquared Value is : %f \n", Rsquared_ans);
    printf("finish!!!\n");

    //getch();
    return 0;

}
Ejemplo n.º 8
0
// Ntk Construction Functions
void
V3Ntk::initialize() {
   assert (!_inputData.size());
   // Create Constant AIG_FALSE
   const V3NetId id = createNet(1); assert (!id.id); createConst(id);
}