Ejemplo n.º 1
0
  int segment(const char* str, std::vector<std::string> & words) {
    ltp::framework::ViterbiFeatureContext ctx, bs_ctx;
    ltp::framework::ViterbiScoreMatrix scm;
    ltp::framework::ViterbiDecoder decoder;
    ltp::segmentor::Instance inst;
 
    int ret = preprocessor.preprocess(str, inst.raw_forms, inst.forms,
      inst.chartypes);

    if (-1 == ret || 0 == ret) {
      words.clear();
      return 0;
    }

    ltp::segmentor::SegmentationConstrain con;
    con.regist(&(inst.chartypes));
    build_lexicon_match_state(lexicons, &inst);
    extract_features(inst, model, &ctx, false);
    extract_features(inst, bs_model, &bs_ctx, false);
    calculate_scores(inst, (*bs_model), (*model), bs_ctx, ctx, true, &scm);

    // allocate a new decoder so that the segmentor support multithreaded
    // decoding. this modification was committed by niuox
    decoder.decode(scm, con, inst.predict_tagsidx);
    build_words(inst.raw_forms, inst.predict_tagsidx, words);

    return words.size();
  }
Ejemplo n.º 2
0
int main(int argc,char ** argv){
  if(argc < 3){
    std::vector<float> histogram=extract_features("in.jpg");
    save_histogram("in.jpg","out.txt", histogram);
  }else{
    std::vector<float> histogram=extract_features(argv[1]);
    save_histogram(argv[1],argv[2], histogram);
    std::cout << argv[2]<<"\n";
  }
  return 0;
}
Ejemplo n.º 3
0
int main(int argc, char *argv[]){

    // Variables for directory handling
    DIR *directory;
    struct dirent *file_struct;

    // Variables for file handling
    char *dic_path = argv[1];
    char file_path[500];

    // Variables for feature handling
    double gray_histo_array[100];
    double h_entropy_array[100];
    int feature_count = 0;
    double gray_mean, h_mean, gray_stdDev, h_stdDev, correlation;

    // Checks argument and opens training directory
    if(argc != 2){
        printf("Wrong number of arguments");
        printf("Usage: $ %s directory-path", argv[0]);
        exit(EXIT_FAILURE);
    }

    if( (directory = opendir(dic_path)) == NULL){
        printf("Error while opening directory %s", dic_path);
        exit(EXIT_FAILURE);
    }

    /* Iterates throught all files in training directory
       and extract the features 
    */
    while( file_struct = readdir(directory) )
        // . and .. are no valid file names
        if( strcmp("..", file_struct -> d_name) && strcmp(".", file_struct -> d_name) ){
            // Constructs file absolute path
            strcpy(file_path, dic_path);
            strcat(file_path, file_struct -> d_name);
            // Calculates features
            extract_features(file_path, &gray_histo_array[feature_count], &h_entropy_array[feature_count]);
            ++feature_count;
        }

    closedir(directory);

    gray_mean = calc_mean(gray_histo_array, feature_count);
    h_mean = calc_mean(h_entropy_array, feature_count);

    gray_stdDev = calc_stdDev(gray_histo_array, feature_count, gray_mean);
    h_stdDev = calc_stdDev(h_entropy_array, feature_count, h_mean);

    correlation = calc_correlation(gray_histo_array, h_entropy_array, gray_mean, h_mean, gray_stdDev, h_stdDev, feature_count);

    printf("Mean: %f and %f\n", gray_mean, h_mean);
    printf("Standart dev: %f and %f\n", gray_stdDev, h_stdDev);
    printf("Correlation %f\n", correlation);
    printf("Class name : %f , %f , %f , %f , %f \n", gray_mean, h_mean, gray_stdDev, h_stdDev, correlation);

    return 0;
}
Ejemplo n.º 4
0
void Postagger::build_feature_space(void) {
    // build feature space, it a wrapper for
    // featurespace.build_feature_space
    int N = Extractor::num_templates();
    int L = model->num_labels();
    model->space.set_num_labels(L);

    for (int i = 0; i < train_dat.size(); ++ i) {
        extract_features(train_dat[i], true);
        if ((i + 1) % train_opt.display_interval == 0) {
            TRACE_LOG("[%d] instances is extracted.", (i+1));
        }
    }

    TRACE_LOG("[%d] instances is extracted.", train_dat.size());
}
Ejemplo n.º 5
0
vector<Cand> Decoder::expand(const Cand &cand)
{
	vector<Cand> candvec;
	int cur_tok_id = m_token_matrix_ptr->at(cur_pos).at(0);
	int last_tag = cand.taglist.at(cand.taglist.size()-1);
	vector<int> validtagset = m_model->get_validtagset(cur_tok_id,last_tag);

	for (const auto &e_tag : validtagset)
	{
		Cand cand_new;
		cand_new.taglist = cand.taglist;
		cand_new.taglist.push_back(e_tag);
		vector<vector<int> > local_features = extract_features(cand_new.taglist,cand_new.taglist.size()-1);
		double local_score = m_model->cal_local_score(local_features);
		cand_new.acc_score = cand.acc_score+local_score;
		candvec.push_back(cand_new);
	}
	return candvec;
}
int main()
{
	

    Mat imgL = imread("aloeL.jpg", IMREAD_GRAYSCALE);
    Mat imgR = imread("aloeR.jpg", IMREAD_GRAYSCALE);

    if(imgL.empty() || imgR.empty()){
	cout << "read image error "<< endl;
	return 0 ;
    }
	
//    double threshold_value = 0.0001;
//    harris_detection(image,threshold_value);
	
//    int minHessian = 80;
 //   sift_detection(image,minHessian);

//    akaze_detection(image);

    vector<string> image_names = {"aloeL.jpg","aloeR.jpg"};
    vector<vector<KeyPoint>> key_points_for_all;
    vector<Mat> descriptor_for_all;
    vector<vector<Vec3b>> colors_for_all;
	
    Mat query;
    Mat train;
    vector<DMatch> matches;

    
	
    extract_features( image_names,key_points_for_all,descriptor_for_all, colors_for_all);
    match_features(query, train, matches);
    find_transform(Mat& K, vector<Point2f>& p1, vector<Point2f>& p2, Mat& R, Mat& T, Mat& mask);
    reconstruct(Mat& K, Mat& R, Mat& T, vector<Point2f>& p1, vector<Point2f>& p2, Mat& structure);
	
    return 0;
	
}
Ejemplo n.º 7
0
static void adjust_weights(
        const uint8_t **field_buf,
        const size_t *field_len,
        size_t n_fields,
        size_t n_items,
        real *weights,
        size_t weights_len,
        const label *labels,
        real weight_diff,
        double t,
        double *average_weights,
        int use_dropout,
        feat_hash_t dropout_seed)
{
    size_t i, j;
    feat_hash_t invariant_hashes[N_INVARIANTS*n_items];
    feat_hash_t feature_hashes[N_FEATURES];
    feat_hash_t mask = (feat_hash_t)weights_len - 1;

    extract_invariant(
            field_buf, field_len, n_fields, n_items, invariant_hashes);

    for (i=0; i<n_items; i++) {
        extract_features(
                labels, labels[i], i, n_items,
                invariant_hashes, feature_hashes);
        for (j=0; j<N_FEATURES; j++) {
            const feat_hash_t h = feature_hashes[j];
            if ((!use_dropout) ||
                (hash32_mix(dropout_seed, h) >= DROPOUT_CONSTANT)) {
                const size_t idx = h & mask;
                average_weights[idx*2] +=
                    (t - average_weights[idx*2+1]) * (double)weights[idx];
                average_weights[idx*2+1] = t;
                weights[idx] += weight_diff;
            }
        }
    }
}
Ejemplo n.º 8
0
void Postagger::evaluate(void) {
    const char * holdout_file = train_opt.holdout_file.c_str();

    ifstream ifs(holdout_file);

    if (!ifs) {
        ERROR_LOG("Failed to open holdout file.");
        return;
    }

    PostaggerReader reader(ifs, true);
    Instance * inst = NULL;

    int num_recalled_tags = 0;
    int num_tags = 0;

    while ((inst = reader.next())) {
        int len = inst->size();
        inst->tagsidx.resize(len);
        for (int i = 0; i < len; ++ i) {
            inst->tagsidx[i] = model->labels.index(inst->tags[i]);
        }

        extract_features(inst, false);
        calculate_scores(inst, true);
        decoder->decode(inst);

        num_recalled_tags += inst->num_corrected_predicted_tags();
        num_tags += inst->size();

        delete inst;
    }

    double p = (double)num_recalled_tags / num_tags;

    TRACE_LOG("P: %lf ( %d / %d )", p, num_recalled_tags, num_tags);
    return;
}
Ejemplo n.º 9
0
int main(int argc, char *argv[]){

    FILE *stats_file;
    char *image_name = argv[1];
//    char input_string[100];
    int class_counter;
    double gray_feature, h_feature;
    int max_likelihood;

    // Checks arguments
    if(argc != 2){
        if(argc == 1)
            printf("No input image given");
        else
            printf("Wrong number of arguments");
        printf("Usage: $%s input_image_file", argv[0]);
        exit(EXIT_FAILURE);
    }

    // Opens training.stats file
    if( (stats_file = fopen("training.stats", "r")) == NULL){
        printf("Unable to open/locate training.stats file");
        exit(EXIT_FAILURE);
    }

    class_counter = count_lines(stats_file);
    bigauss_stats class_array[class_counter];
    load_stats(stats_file, class_array, class_counter);
    fclose(stats_file);

    extract_features(image_name, &gray_feature, &h_feature);
    max_likelihood = bigauss_likelihood(class_array, class_counter, gray_feature, h_feature);

    printf("Max bi-gaussian likelihood for %s class\n", class_array[max_likelihood].class_name);

    return 0;
}
Ejemplo n.º 10
0
void Postagger::test(void) {
    const char * model_file = test_opt.model_file.c_str();
    ifstream mfs(model_file, std::ifstream::binary);

    if (!mfs) {
        ERROR_LOG("Failed to load model");
        return;
    }

    model = new Model;
    if (!model->load(mfs)) {
        ERROR_LOG("Failed to load model");
        return;
    }

    TRACE_LOG("Number of labels                 [%d]", model->num_labels());
    TRACE_LOG("Number of features               [%d]", model->space.num_features());
    TRACE_LOG("Number of dimension              [%d]", model->space.dim());

    const char * test_file = test_opt.test_file.c_str();

    ifstream ifs(test_file);

    if (!ifs) {
        ERROR_LOG("Failed to open holdout file.");
        return;
    }

    decoder = new Decoder(model->num_labels());
    PostaggerReader reader(ifs, true);
    PostaggerWriter writer(cout);
    Instance * inst = NULL;

    int num_recalled_tags = 0;
    int num_tags = 0;

    double before = get_time();

    while ((inst = reader.next())) {
        int len = inst->size();
        inst->tagsidx.resize(len);
        for (int i = 0; i < len; ++ i) {
            inst->tagsidx[i] = model->labels.index(inst->tags[i]);
        }

        extract_features(inst);
        calculate_scores(inst, true);
        decoder->decode(inst);

        build_labels(inst, inst->predicted_tags);
        writer.write(inst);
        num_recalled_tags += inst->num_corrected_predicted_tags();
        num_tags += inst->size();

        delete inst;
    }

    double after = get_time();

    double p = (double)num_recalled_tags / num_tags;

    TRACE_LOG("P: %lf ( %d / %d )", p, num_recalled_tags, num_tags);
    TRACE_LOG("Eclipse time %lf", after - before);

    sleep(1000000);
    return;
}
Ejemplo n.º 11
0
void Pipeline::run(const bool save_clouds, const bool show_clouds)
{
	Logger _log("Pipeline");

	/**
	 * Stage 0: Load images from file
	 */
	Images images;
	load_images(folder_path, images);


	/**
	 * Stage 1: Detect features in loaded images
	 */
	CamFrames cam_Frames;
	DescriptorsVec descriptors_vec;
	extract_features(images, cam_Frames, descriptors_vec);

	// Free Image.gray
	for (int i = 0; i < images.size(); i++)
		const_cast<cv::Mat&>(images[i].gray).release();


	/**
	 * Stage 2: Calculate descriptors and find image pairs through matching
	 */
	ImagePairs image_pairs;
	find_matching_pairs(images, cam_Frames, descriptors_vec, image_pairs);


	// Free some memory
	DescriptorsVec().swap(descriptors_vec);



	/**
	 * State 3: Compute pairwise R and t
	 */
	register_camera(image_pairs, cam_Frames);


	/**
	 * Stage 4: Construct associativity matrix and spanning tree
	 */
	Associativity assocMat(cam_Frames.size());
	for (int p = 0; p < image_pairs.size(); p++)
	{
		ImagePair* pair = &image_pairs[p];
		int i = pair->pair_index.first,
		    j = pair->pair_index.second;

		if (pair -> R.empty()) continue;

		assocMat(i, j) = pair;
		assocMat(j, i) = pair;

		assert(assocMat(i, j) == assocMat(j, i));
		assert(assocMat(i, j)->pair_index.first  == i &&
		       assocMat(i, j)->pair_index.second == j);
	}

	Associativity tree;
	const int camera_num = build_spanning_tree(image_pairs, assocMat, tree);


	/**
	 * Stage 5: Compute global Rs and ts
	 */
	CameraPoses gCameraPoses;
	glo_cam_poses(images, gCameraPoses, image_pairs, tree);


	/**
	 * Stage 6: Find and cluster depth points from local camera frame to global camera frame
	 */
	PointClusters pointClusters;
	PointMap pointMap;
	find_clusters(assocMat, gCameraPoses, cam_Frames, pointClusters, pointMap);

	// Free some memory
	ImagePairs().swap(image_pairs);


	/**
	 * Stage 7: get center of mass from clusters
	 */
	PointCloud pointCloud(pointClusters.size());
	find_CoM(pointClusters, pointCloud);

	// Free pointClusters
	for (int i = 0; i < pointClusters.size(); i++)
		PointCluster().swap(pointClusters[i]);
	PointClusters().swap(pointClusters);


	// Save cloud before BA
	Viewer viewer("Before BA");
	auto cloud  = viewer.createPointCloud(images, gCameraPoses, cameraMatrix);
	int  n      = cloud->points.size();
	auto time   = ptime::second_clock::local_time();
	auto tstamp = ptime::to_iso_string(time);
	auto folder = fs::path(folder_path).filename().string();
	auto fname  = (fmt("%s_%s_%d_noBA.pcd") % folder % tstamp % n).str();

	if (save_clouds)
		viewer.saveCloud(cloud, fname);
	if (show_clouds)
		viewer.showCloudPoints(cloud, false);


	/**
	 * State 8: Bundle Adjustment
	 */
	bundle_adjustment(pointMap, cam_Frames, false, gCameraPoses, pointCloud);
	_log.tok();

	/**
	 * Show calculated point cloud
	 */
	Viewer viewer_ba("After BA no Depth");
	cloud = viewer_ba.createPointCloud(images, gCameraPoses, cameraMatrix);
	n     = cloud->points.size();
	fname = (fmt("%s_%s_%d_BA_noD.pcd") % folder % tstamp % n).str();


	if (save_clouds)
		viewer_ba.saveCloud(cloud, fname);
	if (show_clouds)
		viewer_ba.showCloudPoints(cloud,false);

	bundle_adjustment(pointMap, cam_Frames, true, gCameraPoses, pointCloud);

	// Free some memory
	PointMap().swap(pointMap);
	CamFrames().swap(cam_Frames);
	PointCloud().swap(pointCloud);

	/**
	 * Show calculated point cloud
	 */
	Viewer viewer_baD("After BA with Depth");
	cloud = viewer_baD.createPointCloud(images, gCameraPoses, cameraMatrix);
	n     = cloud->points.size();
	fname = (fmt("%s_%s_%d_BA_D.pcd") % folder % tstamp % n).str();

	// Free some memory
	Images().swap(images);
	CameraPoses().swap(gCameraPoses);

	if (save_clouds)
		viewer_baD.saveCloud(cloud, fname);
	if (show_clouds)
		viewer_baD.showCloudPoints(cloud);



}