Esempio n. 1
0
PyObject * PyMmseg_Segment(PyObject * self, PyObject* args)
{
	csfHelper_MMSegObject *self2 = (csfHelper_MMSegObject *)self;
	char *fromPython; 

	if (!PyArg_Parse(args, "(s)", &fromPython))
		return NULL;
	else
	{
		Segmenter* seg = self2->m_segmgr->getSegmenter(false); 
		seg->setBuffer((u1*)fromPython, (u4)strlen(fromPython));

		PyObject* seg_result = PyList_New(0);
		while(1)
		{
			u2 len = 0, symlen = 0;
			char* tok = (char*)seg->peekToken(len,symlen);
			if(!tok || !*tok || !len){
				break;
			}
			//append new item
			PyList_Append(seg_result, PyString_FromStringAndSize(tok,len));
			seg->popToken(len);
		}
		//FIXME: free the segmenter
		delete seg;

		return seg_result;
	}
}
 PyObject *segment(PyObject *self, PyObject *args) {

	PyObject *module = PyImport_ImportModule("cmmseg");
	 SegmenterManager* mgr =  NULL;
	{
		PyObject *module_dict = PyModule_GetDict(module);
		if(!module_dict) {
			PyErr_SetString(PyExc_ValueError, "Needs load segment dictionary library frist!");
			return NULL;
		}
		PyObject *c_api_object = PyDict_GetItemString(module_dict, "__segmgr");

		if (!c_api_object || !PyCObject_Check(c_api_object)) {
			PyErr_SetString(PyExc_ValueError, "Needs load segment dictionary library frist!");
			return NULL;
		}
		mgr = (SegmenterManager*)PyCObject_AsVoidPtr(c_api_object);
	}

    // Chris: not get from pool and delete later, or get from pool and clear
	Segmenter* seg = mgr->getSegmenter(false);
	char *fromPython;

	if (!PyArg_Parse(args, "(s)", &fromPython)) {
        delete seg;
        return NULL;
    } else {
        seg->setBuffer((u1*)fromPython, (u4)strlen(fromPython));

		PyObject* seg_result = PyList_New(0);
		while(1)
		{
			u2 len = 0, symlen = 0;
			char* tok = (char*)seg->peekToken(len,symlen);
			if(!tok || !*tok || !len){
				break;
			}
			//append new item
			PyObject *res = PyString_FromStringAndSize(tok,len);
			PyList_Append(seg_result, res);
            Py_DECREF(res);
			seg->popToken(len);
		}
        delete seg;
        return seg_result;
    }
}
Esempio n. 3
0
int mmseg_segment(char *text, Token ** tokens)
{
	if (!text) {
		return -1;
	}
	if (!mgr_) {
		cerr << "error: init not called\n";
		return -1;
	}
	Segmenter *seg = mgr_->getSegmenter(0);
	seg->setBuffer((u1 *) text, (u4) strlen(text));

	vector<Token> segs;
	while (1) {
		u2 len = 0, symlen = 0;
		char *token = (char *)seg->peekToken(len, symlen);
		if (!token || !*token || !len) {
			break;
		}
		Token t = {token, int(len)};
		segs.push_back(t);
		seg->popToken(len);
	}
	int size = segs.size();

	if (!tokens) {
		delete seg;
		return size;
	}
	(*tokens) = (Token *) malloc(segs.size() * sizeof(Token));
	int i = 0;
	vector < Token >::iterator iter = segs.begin();
	for (; iter != segs.end(); iter++) {
		(*tokens)[i] = *iter;
		i++;
	}
	delete seg;
	return size;
}
int main(int argc, char** argv)
{
    ros::init(argc, argv, "agario_indo_node");
    ros::NodeHandle n;
    ros::Rate loop_rate(100);

    image_transport::ImageTransport image_transport(n);

    // subscribers
    image_transport::Publisher image_pub =
	image_transport.advertise("video_output", 1);

    // service
    ros::ServiceClient client_mouse =
	n.serviceClient<agario_mouse::Mouse>("agario_mouse");
    ros::ServiceServer notify_death =
	n.advertiseService("agario_info_switch", cb);

    dynamic_reconfigure::Server<agario_info::dynparamsConfig> server;
    dynamic_reconfigure::Server<agario_info::dynparamsConfig>::CallbackType f;
    f = boost::bind(&callbackDynConf, _1, _2);
    server.setCallback(f);

    // get node parameters
    getParameters(n);

    cv::namedWindow("Results", CV_WINDOW_NORMAL);
    ImageFromDisplay image_from_display;
    DecisionMaker decision_maker(train_mode);
    float previous_player_radious = 0;
    while (ros::ok()) {
	if (is_alive) {
	    auto frame_img = image_from_display.getImage();

	    if (frame_img.rows && frame_img.cols) {
		Segmenter segmenter;
		segmenter.segment(frame_img);

		Frame frame(frame_img, segmenter.getCenter(),
			    segmenter.getRadius(), segmenter.getPolyContour(),
			    frame_img.rows, frame_img.cols, is_alive,
			    previous_player_radious);

		decision_maker.execute(&frame);

		auto goal = decision_maker.getGoal();
		moveMouse(client_mouse, goal);

		previous_player_radious = frame.getPlayer()->radius;

		if (DISPLAY) {
		    frame.display(frame_img);
		    decision_maker.drawPelletsRegions(frame_img);
		    decision_maker.drawRays(frame_img);
		    decision_maker.drawAllPlans(frame_img);
		    // decision_maker.drawPlan(frame_img);

		    if (RECORD) {
			sensor_msgs::ImagePtr img_msg =
			    cv_bridge::CvImage(std_msgs::Header(), "bgr8",
					       frame_img).toImageMsg();
			image_pub.publish(img_msg);
		    }

		    /// Show your results
		    cv::imshow("Results", frame_img);
		    cv::waitKey(5);
		}
	    }

	} else {
	    if (RECORD) {
		cv::Mat null;
		sensor_msgs::ImagePtr img_msg =
		    cv_bridge::CvImage(std_msgs::Header(), "bgr8", null)
			.toImageMsg();
		image_pub.publish(img_msg);
	    }
	}
	ros::spinOnce();
	loop_rate.sleep();
    }
    return 0;
}
Esempio n. 5
0
int segment(char* buffer, int length)
{
	Segmenter* seg = g_seg_mgr.getSegmenter();

    seg->setBuffer((u1*)buffer, length);
    u2 len = 0, symlen = 0;
    u2 kwlen = 0, kwsymlen = 0;
    //check 1st token.
    char txtHead[3] = {239,187,191};
    char* tok = (char*)seg->peekToken(len, symlen);
    seg->popToken(len);
    if(seg->isSentenceEnd()){
        do {
            char* kwtok = (char*)seg->peekToken(kwlen , kwsymlen,1);
            if(kwsymlen)
                printf("[kw]%*.*s/x ",kwsymlen,kwsymlen,kwtok);
        }while(kwsymlen);
    }

    if(len == 3 && memcmp(tok,txtHead,sizeof(char)*3) == 0){
    }else{
        printf("%*.*s/x ",symlen,symlen,tok);
    }	

    while(1){
        len = 0;
        char* tok = (char*)seg->peekToken(len,symlen);
        if(!tok || !*tok || !len)
            break;
        seg->popToken(len);
        if(seg->isSentenceEnd()){
            do {
                char* kwtok = (char*)seg->peekToken(kwlen , kwsymlen,1);
                if(kwsymlen)
                    printf("[kw]%*.*s/x ",kwsymlen,kwsymlen,kwtok);
            }while(kwsymlen);
        }

        if(*tok == '\r')
            continue;
        if(*tok == '\n'){
            printf("\n");
            continue;
        }


        printf("%*.*s/x ",symlen,symlen,tok);
        {
            const char* thesaurus_ptr = seg->thesaurus(tok, symlen);
            while(thesaurus_ptr && *thesaurus_ptr) {
                len = strlen(thesaurus_ptr);
                printf("%*.*s/s ",len,len,thesaurus_ptr);
                thesaurus_ptr += len + 1; //move next
            }
        }
    }
	return 0;
}
Esempio n. 6
0
int main(int argc, const char* argv[])
{
    CooccurrenceMatrix comatrix;
    comatrix.WordNetTest();
    return 0;

    enum Option
    {
        NONE,
        CLASSIFIER_TRAIN,
        CLASSIFIER_PREDICT,
        CLASSIFIER_VALIDATE,
        SEGMENTATION_TRAIN,
        SEGMENTATION_PREDICT,
        TOPIC_MODEL
    };

    Option opt = NONE;
    //string targetfiles;
    
    // .TextAnalysis [--train|--validation|--predict] file      
    if (argc == 2)
    {
        if (string(argv[1]) == "--train")
        {
            opt = CLASSIFIER_TRAIN;
        }
        else if(string(argv[1]) == "--predict")
        {
            opt = CLASSIFIER_PREDICT;
        }        
        else if(string(argv[1]) == "--validate")
        {
            opt = CLASSIFIER_VALIDATE;
        }
        else if(string(argv[1]) == "--seg-train")
        {
            opt = SEGMENTATION_TRAIN;
        }
        else if(string(argv[1]) == "--seg-predict")
        {
            opt = SEGMENTATION_PREDICT;
        }
        else if(string(argv[1]) == "--topic")
        {
            opt = TOPIC_MODEL;
        }
        else
        {
            cout << "Unknown command." << endl;
            return -1;
        }
        //targetfiles = argv[2];
        //cout << targetfiles << endl;
    }
    else
    {
        cout << "Unknown command." << endl;
        return -1;
    }

    string pathTriplets = "/home/csa/CAS2/wang296/Projects/tSegment/Data/Triplets/coreNLP/";
    /*if (argc == 3)
    {
        pathRoot = argv[1];     //   /home/csa/CAS2/Arash/StoryTextInfo/   
        pathTriplets = argv[2]; //   /home/csa/CAS2/wang296/Projects/tSegment/Data/Triplets/coreNLP/
    }
    */

    TextAnalysis cws;
    vector<StoryInfo> stories;
    
    ifstream documentList;
    documentList.open(FILE_NEWSLIST, ios::in);
    if (!documentList.is_open())
    {
        cout<<"News list cannot be opened."<<endl;
        return -1;
    }

    cout << "Triplets loading..." << endl;
    int document_id = 0;
    vector<int> trueSegments;
    vector<StoryInfo> tstories;
    while (!documentList.eof() && documentList.good())
    {
        char buffer[512];
        documentList.getline(buffer, 512);
        string base = buffer;
        base = base.substr(0 , 15);        
        cout << "Reading " << base << endl;
        string tripletsFilename = (pathTriplets + base +"_US_CNN_Newsroom.html.align.chunk_coref_triplets.dat");
        vector<Triplet> triplets = cws.ReadTripletsFile(tripletsFilename);
        
        // Ground truth
        tstories = cws.TripletsToStories(triplets);
        for (int i = 0; i < tstories.size(); i++)
        {
            trueSegments.push_back(tstories[i].num_sentences);
        }

        // To test
        vector<StoryInfo> tmp_stories = cws.TripletsToStories(triplets, (opt==SEGMENTATION_PREDICT));
        for (int i = 0; i < tmp_stories.size(); i++)
        {
            tmp_stories[i].document_id = document_id;
        }
        stories.insert(stories.end(), tmp_stories.begin(), tmp_stories.end());        
        document_id++;
        if (opt == SEGMENTATION_PREDICT)
        {            
            break;
        }
    }    
    
    // remove stop words
    stories = cws.Lemmatize(stories);

    if (opt == CLASSIFIER_TRAIN)
    {        
        // Train model
        stories = cws.Cleasing(stories);
        stories = cws.RemoveStopWords(stories);

        cout << "Training Classifier..." << endl;
        NaiveBayesClassifier classifier;
        classifier.Train(stories, 27);
        classifier.Save("output/model.txt");
    }
    else if (opt == CLASSIFIER_VALIDATE)
    {
        stories = cws.Cleasing(stories);
        stories = cws.RemoveStopWords(stories);

        cout << "Classifier cross validation..." << endl;
        cws.CrossValidation(stories);
    }
    else if (opt == CLASSIFIER_PREDICT)
    {
        stories = cws.RemoveStopWords(stories);
        // Predict by default
        NaiveBayesClassifier classifier("output/model.txt");
        for (int i = 0; i < stories.size(); i++)
        {
            PredictResult result = classifier.Predict(stories[i]);            
        }
    }
    else if (opt == SEGMENTATION_TRAIN)
    {
        stories = cws.Cleasing(stories);
        stories = cws.RemoveStopWords(stories);

        cout << "Training Segmenter..." << endl;
        Segmenter segmenter;
        segmenter.Train(stories, 27);
        segmenter.Save("output/model_segmenter.txt");
    }
    else if (opt == SEGMENTATION_PREDICT)
    {
        cout << "Predicting segmentation..." << endl;
        Segmenter segmenter("output/model_segmenter.txt");
        //for (int i = 0; i < stories.size(); i++)
        //{
            vector<Sentence> sentences = cws.StoryToSentences(stories[0]);
            segmenter.DoSegment(sentences);
            segmenter.CalculateTrueScore(sentences, trueSegments);
        //}

        cout << "------------" << endl;
        cout << "TRUE SEGMENTATION:" << endl;
        for (int i = 0; i < tstories.size(); i++)
        {
            cout << " " << tstories[i].num_sentences;
        }
        cout << endl;
        cout << "TRUE CATEGORY:" << endl;
        for (int i = 0; i < tstories.size(); i++)
        {
            cout << " " << tstories[i].category_id;
        }
        cout << endl;
    }
    else if (opt == TOPIC_MODEL)
    {
        // Build a Co-occurrence Matrix and see what it like.
        vector<Sentence> allSentences;
        for (int i = 0; i < 300; i++)
        {
            if (i >= stories.size())
            {
                break;
            }
            vector<Sentence> sentences = cws.StoryToSentences(stories[i]);
            allSentences.insert(allSentences.end(),
                    sentences.begin(), sentences.end());        
        }
        CooccurrenceMatrix comatrix;
        comatrix.BuildMatrix(allSentences);
    }

    // Clustering based on NP1 similarities.    
    //cws.CalculateSimilarity(stories);

  return 0;
}
void
SuturoPerceptionNode::segment(pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_in)
{
	// init segmentation
	Segmenter *segmenter;
	
	switch (pipelineData_->task_.task_type)
	{
		case suturo_msgs::Task::TASK_1:
		case suturo_msgs::Task::TASK_2:
		case suturo_msgs::Task::TASK_3:
		case suturo_msgs::Task::TASK_4:
		case suturo_msgs::Task::TASK_5:
			logger.logInfo("Using task 4 segmenter");
			segmenter = task4_segmenter_;
		break;
		case suturo_msgs::Task::TASK_6:
			logger.logInfo("Using task 6 segmenter");
			segmenter = task6_segmenter_;
		break;
		default:
			logger.logInfo("Using projection segmenter");
			segmenter = new ProjectionSegmenter();
		break;
	}
	if (!segmenter)
	{
		logger.logError("segmenter creation failed!");
		return;
	}
	
	// start segmentation
	bool segmentation_result = segmenter->segment(cloud_in, pipelineData_, pipelineObjects_);
	
	if (segmentation_result)
	{
		processing_ = false;
	}
	else
	{
		logger.logInfo("segmentation failed");
	}
	std::vector<pcl::PointCloud<pcl::PointXYZRGB>::Ptr> projected_points_clusters =
		segmenter->getProjectionClusters();
	std::vector<pcl::PointCloud<pcl::PointXYZRGB>::Ptr> projected_point_hulls =
		segmenter->getProjectionClusterHulls();
	for (int i = 0; i < projected_points_clusters.size(); i++)
	{
		std::stringstream ss;
		ss << i;
		ph_.publish_pointcloud(PROJECTED_CLUSTERS_PREFIX_TOPIC + ss.str(), 
				projected_points_clusters[i], DEPTH_FRAME);
	}
	for (int i = 0; i < projected_point_hulls.size(); i++)
	{
		std::stringstream ss;
		ss << i;
		ph_.publish_pointcloud(PROJECTED_CLUSTER_HULLS_PREFIX_TOPIC + ss.str(), 
				projected_point_hulls[i], DEPTH_FRAME);
}
	// Publish the segmentation debug topics
	ph_.publish_pointcloud(TABLE_TOPIC, segmenter->getTablePointCloud()
				, DEPTH_FRAME);

	ph_.publish_pointcloud(DOWNSAMPLED_CLOUD, segmenter->getDownsampledPointCloud()
				, DEPTH_FRAME);
	ph_.publish_pointcloud(POINTS_ABOVE_TABLE_CLOUD, segmenter->getPointsAboveTable()
				, DEPTH_FRAME);

	ph_.publish_pointcloud(PROJECTED_POINTS_TOPIC, segmenter->getProjectedPoints()
				, DEPTH_FRAME);
}
Esempio n. 8
0
int main(int argc, char **argv) {
  ParseCommandLineFlags(&argc, &argv, true);
  
  SpeechProblemSet *speech_problems = 
    SpeechProblemSet::ReadFromFile(FLAGS_base_directory + FLAGS_problem_name + "/pho", 
                                   FLAGS_base_directory + FLAGS_problem_name + "/utt", 
                                   FLAGS_base_directory + FLAGS_problem_name + "/cent"); 
  const ClusterSet &cluster_problems = speech_problems->MakeClusterSet();
  
  // Primal coordinate descent.
  if (FLAGS_algorithm == "pc") {

    SpeechKMeans kmeans(*speech_problems);

    if (FLAGS_starting_model != "") {
      // Load in a model to start from. 
      SpeechSolution solution(cluster_problems);
      speech::SpeechSolution solution_buf;
      fstream input(FLAGS_starting_model.c_str(), ios::in | ios::binary);
      solution_buf.ParseFromIstream(&input);  
      solution.FromProtobuf(solution_buf);
      vector<vector <DataPoint> > centers;
      centers.resize(cluster_problems.num_modes());
      for (int mode = 0; mode < cluster_problems.num_modes(); ++mode) {
        centers[mode].resize(cluster_problems.num_types());
        for (int type = 0; type < cluster_problems.num_types(); ++type) {
          centers[mode][type] = 
            solution.TypeToSpecial(type, mode);
        }
      }
      kmeans.SetCenters(centers);
      input.close();
    } else { 
      kmeans.InitializeCenters();
    }

    kmeans.set_use_medians(FLAGS_pc_use_medians);
    for (int i = 0; i < 100; ++i) {

      kmeans.set_use_isotropic(FLAGS_pc_use_isotropic);

      if (FLAGS_pc_use_gmm) { 
        kmeans.set_use_gmm();
      }
      if (FLAGS_pc_unsupervised) { 
        kmeans.set_use_unsup();
      }
      kmeans.Run(100);
      
      stringstream buf;
      if (FLAGS_starting_model == "") {
        buf << "results/" << FLAGS_problem_name << "_pc_solution_"<< "_" 
            << FLAGS_output_name << "_" << i*10 + 10;
      } else {
        buf << "results/" << FLAGS_problem_name << "_pc_solution_starting_" 
            << FLAGS_output_name << "_"  << i*10 + 10;
      }

      fstream output(buf.str().c_str(), ios::out | ios::binary);
      SpeechSolution *solution = kmeans.MakeSolution();

      speech::SpeechSolution sol;
      solution->ToProtobuf(sol, *speech_problems);
      sol.SerializeToOstream(&output);
      output.close();
    }
  } else if (FLAGS_algorithm == "prune") {
    Segmenter segmenter;
    fstream output("/tmp/utterances", ios::out | ios::binary);
    speech::UtteranceSet all_utt;
    for (int i = 0; i < speech_problems->utterance_size(); ++i) {
      cerr << "Running " << endl;
      Utterance *utterance = segmenter.Run(speech_problems->MakeClusterSet().problem(i), 
                                           speech_problems->utterance(i));
      speech::Utterance *utterance_buf = all_utt.add_utterances();
      utterance->ToProtobuf(utterance_buf);     
      delete utterance;
    }
    all_utt.SerializeToOstream(&output);
    output.close();
  } else if (FLAGS_algorithm == "lr") {
    SpeechSubgradient *speech_subgrad = new SpeechSubgradient(*speech_problems);
    SpeechKMeans kmeans(*speech_problems);

    for (uint i = 0; i < 5000; ++i) {
      speech_subgrad->MPLPRound(i);
      cerr << "INFO: Round: " << i << endl;

      // if (i % 10 == 5) {
      //   cerr << "INFO: Running kmeans" << endl;
      //   kmeans.SetCenters(speech_subgrad->centers());
      //   kmeans.set_use_medians(true);
      //   kmeans.Run(2);
        
      //   // Write out kmeans solution
      //   stringstream buf;
      //   buf << "results/" << FLAGS_problem_name << "_lr_solution_" << FLAGS_output_name << " " << i;
      //   fstream output(buf.str().c_str(), ios::out | ios::binary);
      //   SpeechSolution *solution = kmeans.MakeSolution();
      //   speech::SpeechSolution sol;
      //   solution->ToProtobuf(sol, *speech_problems);
      //   sol.SerializeToOstream(&output);
      //   output.close();
      // }
    }
  } else if (FLAGS_algorithm == "lp") {
    AlignmentLP alignment_lp(*speech_problems);
    SpeechSolution solution(cluster_problems);
    alignment_lp.ConstructLP(&solution);
    vector<DataPoint > centroids;
    double score = speech_problems->MaximizeMedians(solution, &centroids);
    cerr << score << endl;
  }
};