/* 
CONCISE DESCRIPTION OF FUNCTION:
By initializing a vector as the "trigram" of the file given by the user,
each term of the vector is printed to the command line, seperated by spaces.
If there are more arguments than 2, the program will compare frequency vectors
with that of the files of each previous argument to the last argument.
*/
int main(int argc, char *argv[]) {
	if (argc == 2){
  		std::vector <int> gram = trigram(argv[1]);
  		std::cout << gram[0];	
  		for (size_t i = 1; i < gram.size(); ++i){
  			std::cout << " ";
    		std::cout << gram[i];		
   		}
   		std::cout << std::endl;
	} else {
		std::vector <int> testfreq = trigram(argv[argc-1]);	//Note to self: last argument in command line is test. Freqvec for test is found.
		double simnumber = learn(testfreq,trigram(argv[1]));	
		double newsim = 0;
		char *language = argv[1];
		for (int n = 2; n < argc - 1; ++n){	
			newsim = learn(testfreq, trigram(argv[n])); 	
			std::cout << newsim;
			std::cout << " ";
			std::cout << argv[n];
			std::cout << std::endl;
			if (simnumber < newsim){
				simnumber = newsim;		//Going through each argument as a file name, The highest cos^2(theta) is stored in simnumber,
				language = argv[n];		//and the language corresponding to that simnumber is stored in language
			}
  			
   		}
   		std::cout << "Answer: ";
   		std::cout << language;
   		std::cout << std::endl;
	}
    return 0;
}
void NearestNeighbourClassifier::learn(vector<Mat>& patches, bool isPositive)
{
    for(size_t i = 0; i < patches.size(); i++)
    {
        learn(patches[i],isPositive);
    }
}
Esempio n. 3
0
static void
handle_packet_in( uint64_t datapath_id, packet_in message ) {
  if ( !packet_type_ether( message.data ) ) {
    return;
  }

  struct key new_key;
  packet_info packet_info = get_packet_info( message.data );
  memcpy( new_key.mac, packet_info.eth_macsa, OFP_ETH_ALEN );
  new_key.datapath_id = datapath_id;
  hash_table *forwarding_db = message.user_data;
  learn( forwarding_db, new_key, message.in_port );

  struct key search_key;
  memcpy( search_key.mac, packet_info.eth_macda, OFP_ETH_ALEN );
  search_key.datapath_id = datapath_id;
  forwarding_entry *destination = lookup_hash_entry( forwarding_db, &search_key );

  if ( destination == NULL ) {
    do_flooding( message );
  }
  else {
    send_packet( destination->port_no, message );
  }
}
Esempio n. 4
0
void PseudoBooleanProcessor::learn(const NodeVec& assertions){
  NodeVec::const_iterator ci, cend;
  ci = assertions.begin(); cend=assertions.end();
  for(; ci != cend; ++ci ){
    learn(*ci);
  }
}
Esempio n. 5
0
void rlMain::onEnd(bool isWinner)
{
	//Altera a recompensa do último estado para a recompensa do episódio.
	StepsTaken.back().reward = GetEpisodeReward(isWinner);
	learn();
	saveQtable();
}
Esempio n. 6
0
void patternRecognitionPCA::learnPCA(int inputDim, int outputDim, int numberSamples, double *& samples, double *& reducedSamples)
{
	int i, j;

	this->inputDim = inputDim;
	this->outputDim = outputDim;

	UnsupervisedLearning learn(inputDim, 1, numberSamples);
	eigenvalues = new double[outputDim];
	reductionMatrix = new double[inputDim * outputDim];
	mean = new double[inputDim];

	//learn.performPCA(inputDim, outputDim, samples, reducedSamples, reductionMatrix, eigenvalues, mean);
	learn.performPCASpaceRestrictedSVD(inputDim, outputDim, samples, reductionMatrix, eigenvalues, mean);

	//Compute the reduced samples:
	double * sample = new double[inputDim];
	double * reducedSample = new double[outputDim];

	for(i = 0; i < numberSamples; i++)
	{
		for(j = 0; j < inputDim; j++) 
			sample[j] = samples[i*inputDim+j];

		projectToPCA(sample, reducedSample);

		for(j = 0; j < outputDim; j++)
			reducedSamples[i*outputDim+j] = reducedSample[j];
	}

	delete [] sample;
	delete [] reducedSample;
}
Esempio n. 7
0
/* class LEARN */
LEARN::LEARN (string sgfpath)
{
  INOUT io;
  // 因为缓式计算的关系,要保证都 load(), 应该用 &&
  if (g_init.load_xorand_key2rand ("encodelib/xorand")
      && io.load (lib_pemis, "encodelib/pemis")
      && io.load (lib_zobrist[0], "encodelib/zobrist0")
      && io.load (lib_zobrist[1], "encodelib/zobrist1")
      && io.load (lib_zobrist[2], "encodelib/zobrist2")
      && io.load (lib_zobrist[3], "encodelib/zobrist3")
      && io.load (lib_zobrist[4], "encodelib/zobrist4")
      && io.load (lib_zobrist[5], "encodelib/zobrist5")
      && io.load (lib_zobrist[6], "encodelib/zobrist6")
      && io.load (lib_zobrist[7], "encodelib/zobrist7")
      && io.load (lib_zobrist[8], "encodelib/zobrist8"))
    {
      clog << "All FSMs been loaded ." << endl;
    }
  else
    {
      VS vf = io.getfilename (sgfpath, "*.sgf");
      clog << "Learning from these file :" << endl << vf;
      learn (vf);
    }
}
static void _FFNet_Pattern_Categories_learn (FFNet me, Pattern p, Categories c, long maxNumOfEpochs, double tolerance, Any parameters, int costFunctionType, void (*learn) (FFNet, Pattern, Activation, long, double, Any, int)) {
	_FFNet_Pattern_Categories_checkDimensions (me, p, c);
	autoActivation activation = FFNet_Categories_to_Activation (me, c);
	double min, max;
	Matrix_getWindowExtrema (p, 0, 0, 0, 0, &min, &max);
	learn (me, p, activation.peek(), maxNumOfEpochs, tolerance, parameters, costFunctionType);
}
ARPAddress* Computer::receiveRequest(ARPAddress *arp){
    learn(arp);
    if (arpAddress->getIP()->getIp() == arp->getIP()->getIp()
            && arpAddress->getMAC()->getAddress() == arp->getMAC()->getAddress())
        return arpAddress;
    return NULL;
}
Esempio n. 10
0
void population::learn(unsigned int n, T foot_size) {
    int i, j, k;
    int unit = n/PROGRESS_DIV;
    int progress;
    
    /* start with same individuals */
    for(i = 0; i < num; i++) {
        trial[i].copy(ref[i]);
    }
    
    for(i = 0; i < n; i++) {
        for(j = 0; j < LEARN_NUM; j++) {
            learn(foot_size);
        }
        
        /* draw progress bar */
        if((i % unit) == 0) {
            progress = i/unit;
            printf("\r[");
            for(k = 0; k < progress; k++) {
                printf("-");
            }
            for(; k < PROGRESS_DIV; k++) {
                printf(" ");
            }
            printf("] %d%% || max score : %f", progress, max_score);
        }
    }
    cout << endl;
}
void PluginManager::search() {
	qDebug() << "[PluginManager]"
			 << "Searching: " << pluginDirectory();
	for (const QFileInfo& file : pluginDirectory().entryInfoList(QDir::Files)) {
		learn(file);
	}
	emit pluginListUpdated();
}
Esempio n. 12
0
void DlgControllerLearning::loadControl(const MixxxControl& control) {
    m_currentControl = control;
    QString message = tr("Ready to map: %1. Now move a control on your controller.")
            .arg(m_currentControl.description());
    controlToMapMessage->setText(message);
    labelMappedTo->setText("");
    labelNextHelp->hide();
    emit(learn(m_currentControl));
}
Esempio n. 13
0
int Classifier::test(string testConf, int resNum, string pathToSil, string reportPath, void* param /*= NULL*/)
{
    // Load data
    int status = loadData(testConf, pathToSil);
    if (status != 0)
    {
        printf ("Error occured while loading data!\n");
        return -1;
    }

    // Learn classifier
    learn(learningData, param);

    // Acquire statistics
    int wrong = 0;                      // Number of wrong classifications (in best resNum)
    int wrongStrict = 0;                // Number of wrong classifs (just the best)

    int total = 0;                      // Total number of test cases
    ConfusionMatrix confusionMat;
    ClassifResults classifResults;

    map< string, vector<Mat> >::iterator iter;
    for (iter = testData.begin(); iter != testData.end(); iter++)
    {
        string realClassId = iter->first;
        vector<Mat>& imgs = iter->second;

        for (int i = 0; i < imgs.size(); i++, total++)
        {
            // Get classifier output for single test image
            vector< pair<string, double> > predClasses = classify(imgs[i], resNum);
            // Save output for report generation later;
            classifResults.push_back( make_pair(realClassId, predClasses) );

            // Wrong if not equal to any of predicted classes
            if (!inPredClasses(realClassId, predClasses)) wrong++;

            // Update confusion matrix - take only best match
            string predClassId = predClasses[0].first;
            confusionMat[predClassId][realClassId] += 1;

            if (predClassId != realClassId) wrongStrict++;
        }
    }

    // -------------------------- Report generation -------------------------- //
    
    status = generateReport(confusionMat, wrong, total, reportPath, resNum, classifResults, wrongStrict);
    if (status != 0)
    {
        printf ("Error occured while generating report!\n");
        return -1;
    }

    return wrong;
}
Esempio n. 14
0
void PseudoBooleanProcessor::learn(Node assertion){
  if(assertion.getKind() == kind::AND){
    Node::iterator ci=assertion.begin(), cend = assertion.end();
    for(; ci != cend; ++ci){
      learn(*ci);
    }
  }else{
    learnInternal(assertion, false, assertion);
  }
}
Esempio n. 15
0
void nnet::learn() 
{
	if( ::exec_map->empty() ) 
    {
        // train synchronous layers first
        {
            MapPtr nodes = ::root_map->get( "Nodes" );

            Map::iterator i = nodes->begin();
            Map::iterator iend = nodes->end();
            while( i != iend )
            {
                NodeLayerPtr node = nodes->get( i );
                BooleanPtr enabled( node->get( "enabled" ) );

                if( !node->asyncRecallOrder() && enabled->get() )
                {
                    learn( node );
                }
                i++;
            }
        }
        // then train async layers
        {
            RecallOrder::iterator i = recall_order.order.begin();
            RecallOrder::iterator iend = recall_order.order.end();
            while( i != iend )
            {
                i->node->learn( i->neuron );
                i++;
            }
        }
	} 
    else 
    {
        try 
        {
            ExecEnginePtr exec( ::exec_map->first() );
            exec->learn();
        }
        catch( std::exception& e ) 
        {
            LOG_EXCEPTION_E( e );
            error::std_exception( "nnet::Learn() running ExecEngine", e.what() );
            return;
        }
        catch( ... )
        {
            LOG_EXCEPTION;
            error::alert( "Critical exception in ExecEngine!" );
        }
	}
	
    nnet::global::learn_signal();
}
Esempio n. 16
0
void ConvNet::learn(dmatrix4 &stimulus, dmatrix2 &target, int lessons)
{
    assert(stimulus.size()==target.size());
    
    if(lessons==0) lessons=stimulus.size();
    
    for(int n=0;n<lessons;n++) {
        if (n%10 == 0) std::cout << "Learning lesson #" << n+1 << std::endl;
        learn(stimulus[n], target[n]);
    }
}
Esempio n. 17
0
int run_safety(optionst &options, mstreamt &result, const symbol_tablet &st,
    const goto_functionst &gf)
{
  srand(options.get_unsigned_int_option(CEGIS_SEED));
  safety_preprocessingt prep(options, st, gf, get_constant_strategy(options));
  const safety_programt &safety_program=prep.get_safety_program();
  safety_learn_configt learn(safety_program);
  safety_verify_configt verify_cfg(safety_program);
  cegis_symex_verifyt<safety_verify_configt> verify(options, verify_cfg);
  return configure_backend(result, options, safety_program, learn, verify, prep);
}
Esempio n. 18
0
double WeightedDIDCompass::currentView( Img* CV ) {
    if ( learnMode == LEARNING ) {
        if ( learnIndex >= learnStart && learnIndex <= learnStop )
            learn(SS, CV);
        learnIndex++;
        return computeAngle(SS, CV, false);
    } else if ( learnMode == NO_LEARNING )
        return computeAngle(SS, CV, false);
    else if ( learnMode == USING )
        return computeAngle(SS, CV, true);
}
Esempio n. 19
0
int run_refactor(optionst &options, messaget::mstreamt &result,
                 const symbol_tablet &st, const goto_functionst &gf)
{
    refactor_preprocessingt preproc(options, st, gf);
    refactor_symex_learnt learn_cfg(preproc.get_program());
    refactor_symex_verifyt verify_cfg(preproc.get_program());
    cegis_symex_learnt<refactor_preprocessingt, refactor_symex_learnt> learn(
        options, preproc, learn_cfg);
    cegis_symex_verifyt<refactor_symex_verifyt> oracle(options, verify_cfg);
    return run_cegis_with_statistics_wrapper(
               result, options, learn, oracle, preproc);
}
Esempio n. 20
0
static void thread_method (void* arg)
{
  uv_mutex_lock(&factors_mutex);
  free_learned_factors(factors);
  compile_training_set (training_set);
  factors = learn (training_set, server_param->model);
  
  uv_mutex_lock(&factors_backup_mutex);
  printf("factors_backup_mutex locked inside of thread \n");
  free_learned_factors(factors_backup);
  factors_backup = copy_learned_factors (factors);
  uv_mutex_unlock(&factors_backup_mutex);
  uv_mutex_unlock(&factors_mutex);
}
Esempio n. 21
0
    std::vector<ActionBundle> selectActions(const State& startState, TerminationChecker& terminationChecker) override {
        if (domain.isGoal(startState)) {
            // Goal is already reached
            return std::vector<ActionBundle>();
        }

        // Learning phase
        if (openList.isNotEmpty()) {
            learn(terminationChecker);
        }

        const auto bestNode = explore(startState, terminationChecker);

        return extractPath(bestNode, nodes[startState]);
    }
Esempio n. 22
0
void patternRecognitionPCA::learnPCA(int inputDim, int outputDim, int numberSamples, double * &samples)
{
	int i, j;
	
	this->inputDim = inputDim;
	this->outputDim = outputDim;

	UnsupervisedLearning learn(inputDim, 1, numberSamples);
	eigenvalues = new double[outputDim];
	reductionMatrix = new double[inputDim * outputDim];
	mean = new double[inputDim];

	learn.performPCASpaceRestrictedSVD(inputDim, outputDim, samples, reductionMatrix, eigenvalues, mean);
        
 
}
Esempio n. 23
0
void TLD::processImage(Mat img) {
	storeCurrentData();
	Mat grey_frame;
	cvtColor( img,grey_frame, CV_RGB2GRAY );
	currImg = grey_frame; // Store new image , right after storeCurrentData();

	if(trackerEnabled) {
		medianFlowTracker->track(prevImg, currImg, prevBB);
	}

	if(detectorEnabled && (!alternating || medianFlowTracker->trackerBB == NULL)) {
		detectorCascade->detect(grey_frame);
	}

	fuseHypotheses();
	learn();
}
Esempio n. 24
0
//主函数,功能:添加新的脸,批量添加新脸,识别
void main( int argc, char** argv )
{
	// validate that an input was specified
	if( argc < 3 )
	{
		printUsage();
		return;
	}
	//通过判断命令行参数分别执行学习和识别代码
	if( !strcmp(argv[1], "addFace") ) learn();
	else if( !strcmp(argv[1], "test") ) recognize();
	else
	{
		printf("Unknown command: %s\n", argv[1]);
		printUsage();
	}
}
Esempio n. 25
0
int main( int argc, char** argv )
{
 // validate that an input was specified
 if( argc != 2 )
 {
 printUsage();
 return 1;
 }
 
 if( !strcmp(argv[1], "train") ) learn();
 else if( !strcmp(argv[1], "test") ) recognize();
 else
 {
 printf("Unknown command: %s\n", argv[1]);
 }
 return 0;
}
Esempio n. 26
0
std::vector<GraspHypothesis> Localization::predictAntipodalHands(const std::vector<GraspHypothesis>& hand_list, 
	const std::string& svm_filename)
{
	double t0 = omp_get_wtime();
	std::vector<GraspHypothesis> antipodal_hands;
	Learning learn(num_threads_);
	Eigen::Matrix<double, 3, 2> cams_mat;
	cams_mat.col(0) = cam_tf_left_.block<3, 1>(0, 3);
	cams_mat.col(1) = cam_tf_right_.block<3, 1>(0, 3);
	antipodal_hands = learn.classify(hand_list, svm_filename, cams_mat);
	//std::cout << " runtime: " << omp_get_wtime() - t0 << " sec\n";
	//std::cout << antipodal_hands.size() << " antipodal hand configurations found\n";
  if (plotting_mode_ == PCL_PLOTTING)
		plot_.plotHands(hand_list, antipodal_hands, cloud_, "Antipodal Hands");
	//else if (plotting_mode_ == RVIZ_PLOTTING)
	//	plot_.plotGraspsRviz(antipodal_hands, visuals_frame_, true);
	return antipodal_hands;
}
Esempio n. 27
0
void MainWindow::learning()
{
    QString dir_weight = QFileDialog::getOpenFileName(this, tr("Open File"), "/home", tr("Text (*.txt)"));
    dataset_ = new dataset_t(dir_weight.toStdString(), X_SIZE, Y_SIZE);
    dataset_->split_train_test(0.7);
    perceptron_ = new perceptron_t(dataset_->dim());

    thread_ = new QThread;
    connect(this, SIGNAL(finish_learn()), thread_, SLOT(quit()));
    connect(this, SIGNAL(finish_learn()), thread_, SLOT(deleteLater()));
    connect(thread_, SIGNAL(started()), this, SLOT(learn()));
    thread_->start();

    progress_ = new QProgressDialog("Learning...", "Cancel", 0, EPOCH_COUNT, this);
    connect(progress_, SIGNAL(canceled()), progress_, SLOT(cancel()));
    progress_->setWindowModality(Qt::WindowModal);

    ui->load->setDisabled(false);
}
Esempio n. 28
0
main()
{
	int i,j,order,wave;
	srand(time(NULL));
	printf("Ready? Go! 30 waves!\n\n");
	ph=50;pa=3;pd=0;wave=30;phealth=200;
	for (i=1;i<=wave;i++)
	{
		printf("\n***********The No.%d wave**********\n",i);
		if (i<=25) learn();
		h=rand()%(30*(1+i/4))+ph+10;
		a=rand()%(5*(1+i/8))+pa+1;
		d=rand()%(3*(1+i/8))+pd;
		m=i;
		ph=h;pa=a;pd=d;win=2;
		preskill();
		do
		{
			for (j=0;j<=4;j++)
			      if (k[j]) k[j]--;
		printf("Your situation:health:%d  attack:%d  defend:%d\n",health,attack,defend);
		printf("The enemy's situation:health:%d  attack:%d  defend:%d\n",h,a,d);
		printf("Your turn   1:attack;2:skills   ");
		scanf("%d",&order);
		switch (order)
		{
			case 1:attacke();break;
			case 2:skill();break;
			default:printf("Order error!Lose one turn!");break;
		}
		if (h>0) reattack();
		printf("\n");
	}   while (health>0&&h>0);
		if (win==1) {printf("You win the wave!\n");money+=m;honor++;} else if (win==0) {printf("You lose the wave!\n");health=phealth;}
		attack++;
		shop();
		health+=i*10+pa;
		(phealth+=i*12);
		attack+=rand()%3;
		defend+=rand()%2;
	}
	printf("You succeeded in %d waves out of %d!",honor,wave);
}
Esempio n. 29
0
int main (int argc, char** argv)
{
	int r;
	server_param = parse_arguments (argc, argv);
	LOG ("Extracting data ...");
	training_set = server_extract_data (server_param);
	LOG ("Learning ...");
	compile_training_set (training_set);
	factors = learn (training_set, server_param->model);
	factors_backup = copy_learned_factors (factors);
	LOG ("Learning completed");
	complete = malloc(20 *sizeof(int));
	memset(complete,0,20 *sizeof(int));
	//parser_settings.on_headers_complete = on_headers_complete;
	parser_settings.on_url = on_url;
	parser_settings.on_header_value = on_value;
	uv_loop = uv_default_loop();

	r = uv_tcp_init (uv_loop, &server);
	CHECK (r, "bind");

	struct sockaddr_in address = uv_ip4_addr ("0.0.0.0", server_param->port);

	r = uv_tcp_bind (&server, address);
	CHECK (r, "bind");
	uv_listen ( (uv_stream_t*) &server, 128, on_connect);

	LOGF ("listening on port %u", server_param->port);
    	uv_timer_t timer;
    	r = uv_timer_init(uv_default_loop(), &timer);
    	assert(r == 0);
   	r = uv_timer_start(&timer, timer_cb, 10000, 10000);
    	assert(r == 0);
	
  	r = uv_mutex_init(&factors_mutex);
  	assert(r == 0);
  	r = uv_mutex_init(&factors_backup_mutex);
  	assert(r == 0);
  	r = uv_mutex_init(&tset_mutex);
  	assert(r == 0);
	uv_run (uv_loop);
}
Esempio n. 30
0
void population::learnTemp(unsigned int n, T foot_size) {
	int i = 0;

	/* start with same individuals */
	for (i = 0; i < num; i++) {
		trial[i].copy(ref[i]);
	}

	for (i = 0; i < n; i++) {
		learn(foot_size);
		if (i % 30 == 0){
			//print result temporary part
			for (int qw = 0; qw < num; qw++){
				ref[qw].calTotalScore(layers, data);
				cout << "[" << qw << "] : " << ref[qw].score << endl;
			}
			//
		}
	}
}