void sdError_P(const char* str) { cout << pstr("error: "); cout << pgm(str) << endl; if (card.errorCode()) { cout << pstr("SD error: ") << hex << int(card.errorCode()); cout << ',' << int(card.errorData()) << dec << endl; } while (1); }
std::unique_ptr<output> preprocess(std::unique_ptr<input> in) { cv::Mat greyscale = cv::imread(config->input_filename(), CV_LOAD_IMAGE_GRAYSCALE); cv::medianBlur(greyscale, greyscale, 9); std::string workspace = config->workspace_path(); cv::imwrite(workspace + "smooth.pgm", greyscale); { // because OpenCV sucks at saving pbm // or I suck at OpenCV... qgar::PgmFile pgm((workspace + "smooth.pgm").c_str()); qgar::PbmFile pbm((workspace + "smooth.pbm").c_str()); pbm.write(qgar::ThresBinaryImage(pgm.read(), 170)); } try { executor::os_proxy().call("QAtextGraphicSeparation.exe", { "-in", workspace + "smooth.pbm", "-outxt", workspace + "text.pbm", "-outg", workspace + "graphics.pbm", "-outu", workspace + "unknown.pbm", "-area", "10", "-elbbox", "100", "-elmaer", "2", "-dmaer", ".5" }); } catch (executor::system_call_failure c) { config->log().error_log(std::string("System call failure: ") + c.what() + "\nContinuing, but expect crashes"); } auto skeleton = skeletize_zhang_suen(greyscale); cv::imwrite(config->workspace_path() + "whole_skeleton.pgm", skeleton); auto wide_lines = pick_wide_lines(workspace + "graphics.pbm"); cv::imwrite(config->workspace_path() + "wide_lines_only.pgm", wide_lines); auto wide_skeleton = skeletize_zhang_suen(wide_lines); cv::imwrite(config->workspace_path() + "wide_lines_only_skeleton.pgm", wide_skeleton); std::unique_ptr<output> out(std::make_unique<output>()); out->wide_lines_filename = config->workspace_path() + "wide_lines_only_skeleton.pgm"; out->original_filename = config->workspace_path() + "smooth.pgm"; return out; }
//// //// class ConsTracking //// vector<vector<Event> > ConsTracking::operator()(TraxelStore& ts) { cout << "-> building energy functions " << endl; double detection_weight = 10; Traxels empty; boost::function<double(const Traxel&, const size_t)> detection, division; boost::function<double(const double)> transition; bool use_classifier_prior = false; Traxel trax = *(ts.begin()); FeatureMap::const_iterator it = trax.features.find("detProb"); if(it != trax.features.end()) { use_classifier_prior = true; } if (use_classifier_prior) { LOG(logINFO) << "Using classifier prior"; detection = NegLnDetection(detection_weight); } else if (use_size_dependent_detection_) { LOG(logINFO) << "Using size dependent prior"; vector<double> means; if (means_.size() == 0 ) { for(int i = 0; i<max_number_objects_+1; ++i) { means.push_back(i*avg_obj_size_); LOG(logINFO) << "mean[" << i << "] = " << means[i]; } } else { assert(sigmas_.size() != 0); for(int i = 0; i<max_number_objects_+1; ++i) { means.push_back(means_[i]); LOG(logINFO) << "mean[" << i << "] = " << means[i]; } } vector<double> sigma2; if (sigmas_.size() == 0) { double s2 = (avg_obj_size_*avg_obj_size_)/4.0; if (s2 < 0.0001) { s2 = 0.0001; } for(int i = 0; i<max_number_objects_+1; ++i) { sigma2.push_back(s2); LOG(logINFO) << "sigma2[" << i << "] = " << sigma2[i]; } } else { for (int i = 0; i<max_number_objects_+1; ++i) { sigma2.push_back(sigmas_[i]); LOG(logINFO) << "sigma2[" << i << "] = " << sigma2[i]; } } for(TraxelStore::iterator tr = ts.begin(); tr != ts.end(); ++tr) { Traxel trax = *tr; FeatureMap::const_iterator it = trax.features.find("count"); if(it == trax.features.end()) { throw runtime_error("get_detection_prob(): cellness feature not in traxel"); } double vol = it->second[0]; vector<double> detProb; detProb = computeDetProb(vol,means,sigma2); feature_array detProbFeat(feature_array::difference_type(max_number_objects_+1)); for(int i = 0; i<=max_number_objects_; ++i) { double d = detProb[i]; if (d < 0.01) { d = 0.01; } else if (d > 0.99) { d = 0.99; } LOG(logDEBUG2) << "detection probability for " << trax.Id << "[" << i << "] = " << d; detProbFeat[i] = d; } trax.features["detProb"] = detProbFeat; ts.replace(tr, trax); } detection = NegLnDetection(detection_weight); // weight 1 } else { LOG(logINFO) << "Using hard prior"; // assume a quasi geometric distribution vector<double> prob_vector; double p = 0.7; // e.g. for max_number_objects_=3, p=0.7: P(X=(0,1,2,3)) = (0.027, 0.7, 0.21, 0.063) double sum = 0; for(double state = 0; state < max_number_objects_; ++state) { double prob = p*pow(1-p,state); prob_vector.push_back(prob); sum += prob; } prob_vector.insert(prob_vector.begin(), 1-sum); detection = bind<double>(NegLnConstant(detection_weight,prob_vector), _2); } LOG(logDEBUG1) << "division_weight_ = " << division_weight_; LOG(logDEBUG1) << "transition_weight_ = " << transition_weight_; division = NegLnDivision(division_weight_); transition = NegLnTransition(transition_weight_); cout << "-> building hypotheses" << endl; SingleTimestepTraxel_HypothesesBuilder::Options builder_opts(1, // max_nearest_neighbors max_dist_, true, // forward_backward with_divisions_, // consider_divisions division_threshold_ ); SingleTimestepTraxel_HypothesesBuilder hyp_builder(&ts, builder_opts); HypothesesGraph* graph = hyp_builder.build(); LOG(logDEBUG1) << "ConsTracking(): adding distance property to edges"; HypothesesGraph& g = *graph; g.add(arc_distance()).add(tracklet_intern_dist()).add(node_tracklet()).add(tracklet_intern_arc_ids()).add(traxel_arc_id()); property_map<arc_distance, HypothesesGraph::base_graph>::type& arc_distances = g.get(arc_distance()); property_map<node_traxel, HypothesesGraph::base_graph>::type& traxel_map = g.get(node_traxel()); bool with_optical_correction = false; Traxel some_traxel = (*traxel_map.beginValue()); if (some_traxel.features.find("com_corrected") != some_traxel.features.end()) { LOG(logINFO) << "optical correction enabled"; with_optical_correction = true; } for(HypothesesGraph::ArcIt a(g); a!=lemon::INVALID; ++a) { HypothesesGraph::Node from = g.source(a); HypothesesGraph::Node to = g.target(a); Traxel from_tr = traxel_map[from]; Traxel to_tr = traxel_map[to]; if (with_optical_correction) { arc_distances.set(a, from_tr.distance_to_corr(to_tr)); } else { arc_distances.set(a, from_tr.distance_to(to_tr)); } } //border_width_ is given in normalized scale, 1 corresponds to a maximal distance of dim_range/2 boost::function<double(const Traxel&)> appearance_cost_fn, disappearance_cost_fn; LOG(logINFO) << "using border-aware appearance and disappearance costs, with absolute margin: " << border_width_; appearance_cost_fn = SpatialBorderAwareWeight(appearance_cost_, border_width_, false, // true if relative margin to border fov_); disappearance_cost_fn = SpatialBorderAwareWeight(disappearance_cost_, border_width_, false, // true if relative margin to border fov_); cout << "-> init ConservationTracking reasoner" << endl; ConservationTracking pgm( max_number_objects_, detection, division, transition, forbidden_cost_, ep_gap_, with_tracklets_, with_divisions_, disappearance_cost_fn, appearance_cost_fn, true, // with_misdetections_allowed true, // with_appearance true, // with_disappearance transition_parameter_, with_constraints_ ); cout << "-> formulate ConservationTracking model" << endl; pgm.formulate(*graph); cout << "-> infer" << endl; pgm.infer(); cout << "-> conclude" << endl; pgm.conclude(*graph); cout << "-> storing state of detection vars" << endl; last_detections_ = state_of_nodes(*graph); cout << "-> pruning inactive hypotheses" << endl; prune_inactive(*graph); cout << "-> constructing unresolved events" << endl; boost::shared_ptr<std::vector< std::vector<Event> > > ev = events(*graph); if (max_number_objects_ > 1 && with_merger_resolution_ && all_true(ev->begin(), ev->end(), has_data<Event>)) { cout << "-> resolving mergers" << endl; MergerResolver m(graph); FeatureExtractorMCOMsFromMCOMs extractor; DistanceFromCOMs distance; FeatureHandlerFromTraxels handler(extractor, distance); calculate_gmm_beforehand(*graph, 1, number_of_dimensions_); m.resolve_mergers(handler); HypothesesGraph g_res; resolve_graph(*graph, g_res, transition, ep_gap_, with_tracklets_, transition_parameter_, with_constraints_); prune_inactive(*graph); cout << "-> constructing resolved events" << endl; boost::shared_ptr<std::vector< std::vector<Event> > > multi_frame_moves = multi_frame_move_events(*graph); cout << "-> merging unresolved and resolved events" << endl; return *merge_event_vectors(*ev, *multi_frame_moves); } else { return *ev; } }
int main(int argc, char *argv[]) { float *data; float *out; char *mp3_file; unsigned int srate; unsigned int len = 0; unsigned int num_octaves; unsigned int out_len; char *cmd; TagLib_File *f; if(argc <= 1) { printf("USAGE: %s <MP3 File>\n", argv[0]); return 0; } mp3_file = argv[1]; f = taglib_file_new(mp3_file); if(f == NULL) { printf("Cannot open %s\n",mp3_file); exit(-1); } srate = taglib_audioproperties_samplerate(taglib_file_audioproperties(f)); taglib_file_free(f); if(srate == 0) { printf("Sample rate in tag == 0, cannot proceed\n"); exit(-1); } printf("Sampling rate = %d\n",srate); cmd = calloc(512, sizeof(char)); sprintf(cmd, "spectgen -r -o ./tmp.raw \"%s\"",mp3_file); if(system(cmd) != 0) { printf("spectgen failed!\n"); printf("Cmd = %s\n",cmd); exit(-1); } free(cmd); cmd = NULL; if(get_file(&data, &len, "./tmp.raw")) { printf("Getting file failed!\n"); printf("Len = %d\n",len); exit(-1); } if(len < srate) { printf("File too short!\n"); exit(-1); } printf("Number of samples = %d\n", len); printf("Doing gabor!\n"); if(cwt_gabor(&num_octaves, &out_len, &out, data, len, srate, 1.0/16.0)) { printf("Cwt failed!\n"); exit(-1); } free(data); unlink("./tmp.raw"); #if 0 { int i,j; for(i = 0; i < out_len; i++) { for(j = 0; j < 12 * (num_octaves - 1); j++) { printf("%.3f ", out[(j * (num_octaves - 1)) + i]); } printf("\n"); } } exit(-1); #endif pgm("test.pgm", out, out_len, 12 * (num_octaves - 2), BACKGROUND_WHITE, GREYSCALE, COL_NORMALIZATION); free(out); return 0; }