void ListKeyModel::setCurrentPage(int page) { if (page == currentPage) { return; } clear(); QStringList labels("Value"); setHorizontalHeaderLabels(labels); currentPage = page; int size = rawData->size(); setRowCount( (itemsOnPageLimit > size)? size : itemsOnPageLimit); int startShiftPosition = itemsOnPageLimit * (currentPage - 1); int limit = startShiftPosition + itemsOnPageLimit; for (int i = startShiftPosition, row = 0; i < limit && i < size; ++i, ++row) { QStandardItem * value = new QStandardItem(rawData->at(i)); value->setData(QVariant(i), KeyModel::KEY_VALUE_TYPE_ROLE); setItem(row, 0, value); } }
/* >>>>>>>>>> EXA_10 <<<<<<<<<< */ void exa_10 (void) { int i; char cbuf[40], cstr[4]; for (i = 0; i < 18; i++) xray[i] = 1.f; setpag ("da4p"); disini (); setvlt ("small"); pagera (); hwfont (); axspos (250, 2700); axslen (1600, 2200); titlin ("Shading Patterns (PIEGRF)", 3); height (50); legini (cbuf, 18, 2); for (i = 0; i < 18; i++) { sprintf (cstr, "%d", i); leglin (cbuf, cstr, i + 1); } chnpie ("both"); labels ("none", "pie"); piegrf (cbuf, 1, xray, 18); title (); disfin (); }
/* >>>>>>>>>> EX13_1 <<<<<<<<<< */ void ex13_1 (void) { setpag ("da4l"); disini (); pagera (); hwfont (); frame (3); axspos (400, 1850); axslen (2400, 1400); name ("Longitude", "x"); name ("Latitude", "y"); titlin ("World Coastlines and Lakes", 3); labels ("map", "xy"); grafmp (-180.f, 180.f, -180.f, 90.f, -90.f, 90.f, -90.f, 30.f); gridmp (1, 1); color ("green"); world (); color ("fore"); height (50); title (); disfin (); }
void Compute(const ImageRGB<byte>& image, const ImageHSV<byte>& imagehsv) { const int& w = image.GetWidth(); const int& h = image.GetHeight(); label_map.Resize(h, w); // Compute pixel-wise features pixel_ftrs.Compute(image, imagehsv); // Run K-means to generate textons vector<VecD> points; const vector<VecD>& ftrs = pixel_ftrs.features; random_sample_n(ftrs.begin(), ftrs.end(), back_inserter(points), 5000); VecI labels(points.size()); KMeans::Estimate(points, 20, textons, labels); // Label the pixels for (int r = 0; r < h; r++) { for (int c = 0; c < w; c++) { const VecD& ftr = (*pixel_ftrs.feature_map)(r, c); double mindist = INFINITY; for (int i = 0; i < textons.size(); i++) { const double dist = VectorSSD(textons[i], ftr); if (dist < mindist) { mindist = dist; label_map[r][c] = i; } } } } }
void Geometry::draw() //Display saved geometry (default uses display list) { GL_Error_Check; //Default to no shaders if (glUseProgram) glUseProgram(0); if (geom.size()) { if (redraw) update(); GL_Error_Check; redraw = false; //Draw using display lists if available for (unsigned int i=0; i<geom.size(); i++) { //Because of quad surface sorting, have to check drawable when creating lists //When quads moved to triangle renderer can re-enable this and won't have to //recreate display lists when hiding/showing/switching viewports //if (drawable(i) && displaylists[i] && glIsList(displaylists[i])) if (displaylists[i] && glIsList(displaylists[i])) glCallList(displaylists[i]); GL_Error_Check; } } GL_Error_Check; labels(); }
void IntervalTier_changeLabels (I, long from, long to, const wchar_t *search, const wchar_t *replace, int use_regexp, long *nmatches, long *nstringmatches) { iam (IntervalTier); try { if (from == 0) { from = 1; } if (to == 0) { to = my intervals -> size; } if (from > to || from < 1 || to > my intervals -> size) { Melder_throw ("Incorrect specification of where to act."); } if (use_regexp && wcslen (search) == 0) Melder_throw ("The regex search string cannot be empty.\n" "You may search for an empty string with the expression \"^$\""); long nlabels = to - from + 1; autoNUMvector<wchar_t *> labels (1, nlabels); for (long i = from; i <= to; i++) { TextInterval interval = (TextInterval) my intervals -> item[i]; labels[i - from + 1] = interval -> text; // Shallow copy. } autostringvector newlabels (strs_replace (labels.peek(), 1, nlabels, search, replace, 0, nmatches, nstringmatches, use_regexp), 1, nlabels); for (long i = from; i <= to; i++) { TextInterval interval = (TextInterval) my intervals -> item[i]; Melder_free (interval -> text); interval -> text = newlabels[i - from + 1]; // Transfer of ownership. newlabels[i - from + 1] = 0; } } catch (MelderError) { Melder_throw (me, ": labels not changed."); } }
/** * Provide names of the quantities (column labels) of the force value(s) reported * */ OpenSim::Array<std::string> ElasticFoundationForce::getRecordLabels() const { OpenSim::Array<std::string> labels(""); const ContactParametersSet& contactParametersSet = get_contact_parameters(); for (int i = 0; i < contactParametersSet.getSize(); ++i) { ContactParameters& params = contactParametersSet.get(i); for (int j = 0; j < params.getGeometry().size(); ++j) { ContactGeometry& geom = _model->updContactGeometrySet().get(params.getGeometry()[j]); std::string bodyName = geom.getBodyName(); labels.append(getName()+"."+bodyName+".force.X"); labels.append(getName()+"."+bodyName+".force.Y"); labels.append(getName()+"."+bodyName+".force.Z"); labels.append(getName()+"."+bodyName+".torque.X"); labels.append(getName()+"."+bodyName+".torque.Y"); labels.append(getName()+"."+bodyName+".torque.Z"); } } return labels; }
void test_lda() { printf("[test lda]\n"); double feats[3*10] = {1,2,3,4,5,6,7,8,9,3,5,7,5,3,7, 11,12,13,14,15,16,17,18,19, 13,15,17,15,13,17}; int lbls[10] = {-1, -1, 1, 1, -1, 1, 1, 1, -1, 1}; //HFMatrix<double> features(feats, 3, 10); Loader loader("data/hello_matrix"); HFMatrix<double> features(loader); //HFVector<int> labels(lbls, 10); Loader loader2("data/hello_label"); HFVector<int> labels(loader2); //Saver saver("data/hello_label"); //labels.save(saver); //Saver saver("data/hello_matrix"); //features.save(saver); LDA lda; lda.train(&features, &labels); }
bool ModelGeneral::Do4DRockPhysicsInversion(ModelSettings* model_settings) { std::vector<FFTGrid*> predictions = state4d_.doRockPhysicsInversion(*time_line_, rock_distributions_.begin()->second, time_evolution_); int nParamOut = static_cast<int>(predictions.size()); std::vector<std::string> labels(nParamOut); int i=0; for (std::map<std::string, std::vector<DistributionWithTrend *> >::iterator it = reservoir_variables_.begin(); it != reservoir_variables_.end(); it++) { labels[i] = it->first; i++; } std::string outPre = "mu_"; for (int i=0;i<nParamOut;i++) { std::string fileName; fileName= outPre + labels[i]; WriteToFile(simbox_, time_depth_mapping_, model_settings, predictions[i] , fileName, labels[i]); } for (size_t i = 0; i < predictions.size(); i++) delete predictions[i]; return 0; }
/** * Prepares to draw info about best solver. * @param status shared level status */ SolverDrawer::SolverDrawer(LevelStatus *status) { try { Font usedFont(Path::dataReadPath("font/font_menu.ttf"), 14); SDL_Color usedColor = {255, 255, 255, 255}; Labels labels(Path::dataReadPath("script/labels.lua")); const char *labelName; switch (status->compareToBest()) { case 1: labelName = "solver_better"; break; case 0: labelName = "solver_equals"; break; default: labelName = "solver_worse"; } StringTool::t_args args; args.push_back(""); args.push_back(StringTool::toString(status->getBestMoves())); args.push_back(status->getBestAuthor()); WiPara *para = new WiPara( labels.getFormatedLabel(labelName, args), usedFont, usedColor); para->enableCentered(); para->recenter(); addWidget(para); } catch (BaseException &e) { LOG_WARNING(e.info()); } }
/** * Provide names of the quantities (column labels) of the force value(s) reported * */ OpenSim::Array<std::string> HuntCrossleyForce::getRecordLabels() const { OpenSim::Array<std::string> labels(""); const ContactParametersSet& contactParametersSet = get_contact_parameters(); for (int i = 0; i < contactParametersSet.getSize(); ++i) { ContactParameters& params = contactParametersSet.get(i); for (int j = 0; j < params.getGeometry().size(); ++j) { const ContactGeometry& geom = getModel().getComponent<ContactGeometry>(params.getGeometry()[j]); std::string frameName = geom.getFrame().getName(); labels.append(getName()+"."+frameName+".force.X"); labels.append(getName()+"."+frameName+".force.Y"); labels.append(getName()+"."+frameName+".force.Z"); labels.append(getName()+"."+frameName+".torque.X"); labels.append(getName()+"."+frameName+".torque.Y"); labels.append(getName()+"."+frameName+".torque.Z"); } } return labels; }
AplusFuncLabel::AplusFuncLabel(A a_, AplusLabelOut *alo_) : AplusLabelOut() { if (alo_!=0 && alo_->outFunc()!=0) { outFunc(alo_->outFunc()); v(alo_->v()); } if (alo_!=0 && alo_->format()!=AplusFormatter::BadFormat) { format(alo_->format()); precision(alo_->precision()); } if (verify(a_)==MSTrue) { a((A) ic(a_)); } else { MSStringVector emptyStringVector; a((A)0); tick((A)0); grid((A)0); value((A)0); labels(emptyStringVector); } }
//----------------------------------------------------------------- void MenuOptions::prepareMenu() { if (m_container) { deregisterDrawable(m_container); delete m_container; m_container = NULL; } Labels labels(Path::dataReadPath("script/labels.lua")); IWidget *soundBox = createSoundPanel(labels); IWidget *musicBox = createMusicPanel(labels); VBox *vbox = new VBox(); vbox->addWidget(soundBox); vbox->addWidget(new WiSpace(0, 10)); vbox->addWidget(musicBox); vbox->addWidget(new WiSpace(0, 10)); vbox->addWidget(createLangPanel(labels)); vbox->addWidget(new WiSpace(0, 5)); vbox->addWidget(createSpeechPanel(labels)); vbox->addWidget(new WiSpace(0, 5)); vbox->addWidget(createSubtitlesPanel(labels)); IWidget *backButton = createBackButton(labels); m_statusBar = createStatusBar(musicBox->getW() - backButton->getW()); HBox *backBox = new HBox(); backBox->addWidget(m_statusBar); backBox->addWidget(backButton); vbox->addWidget(backBox); m_container = vbox; registerDrawable(m_container); }
void Diffusion_MPI_Rewrite<GM,ACC>::computePhi(IndexType factorIndex, IndexType varIndex, uIterator begin, uIterator end) { auto firstVarId = _gm[factorIndex].variableIndex(0); auto secondVarId = _gm[factorIndex].variableIndex(1); // dirty hack :) auto labelId = 0; if(secondVarId == varIndex) { labelId = 1; } IndexType secondLabelId; labelId == 0 ? secondLabelId = 1 : secondLabelId = 0; std::vector<LabelType> labels(2); labels[labelId] = 0; std::vector<LabelType> label(1); label[0] = 0; // update dual variable for each label for (auto it = begin; it!= end; ++it) { labels[secondLabelId]=0; // compute minimal factor value and subtract it from it (it = relaxed labels) auto mini = this->getFactorValue(factorIndex, varIndex, labels.begin()); for (auto i=1; i<_gm.numberOfLabels(secondVarId); ++i) { labels[secondLabelId] = i; auto temp = this->getFactorValue(factorIndex,varIndex, labels.begin()); if ( temp < mini ) mini = temp; } *it -= mini; ++labels[labelId]; *it += _weights[varIndex] * this->getVariableValue(varIndex, label[0]); ++label[0]; } }
vector<vector<DisambiguatedData> > Disambiguator::Disambiguate( const vector<Token>& tokens, int numberOfHypothesis , vector<double>* hypothesisDistribution) { vector<PredisambiguatedData> predisambiguated = featureCalculator->CalculateFeatures(tokens); // Create chain size_t size = predisambiguated.size(); vector<wstring> words(size); vector<vector<wstring> > features(size); vector<wstring> labels(size); for (size_t chainIndex = 0; chainIndex < size; ++chainIndex) { words[chainIndex] = predisambiguated[chainIndex].content; features[chainIndex] = predisambiguated[chainIndex].features; } LinearCRF::Chain chain( std::move(words) , std::move(features) , std::move(labels) , vector<vector<wstring> >()); vector<vector<wstring> > bestSequences; vector<vector<double> > bestSequenceWeights; this->Apply(chain , numberOfHypothesis , &bestSequences , &bestSequenceWeights , hypothesisDistribution); vector<vector<DisambiguatedData> > topDisambiguatedSequences; for (size_t chainIndex = 0; chainIndex < bestSequences.size() ; ++chainIndex) { vector<DisambiguatedData> disambiguatedData; for (size_t tokenIndex = 0; tokenIndex < size; ++tokenIndex) { wstring& label = bestSequences[chainIndex][tokenIndex]; shared_ptr<Morphology> grammInfo = getBestGrammInfo( predisambiguated[tokenIndex], label); applyPostprocessRules(&label, grammInfo); const wstring& lemma = dictionary == 0 ? DICT_IS_NULL : *(grammInfo->lemma) == NOT_FOUND_LEMMA ? Tools::ToLower(predisambiguated[tokenIndex].content) : *(grammInfo->lemma); disambiguatedData.emplace_back( predisambiguated[tokenIndex].content , predisambiguated[tokenIndex].punctuation , predisambiguated[tokenIndex].source , predisambiguated[tokenIndex].isNextSpace , lemma , label , bestSequenceWeights[chainIndex][tokenIndex] , grammInfo->lemma_id); } topDisambiguatedSequences.push_back(std::move(disambiguatedData)); } return topDisambiguatedSequences; }
void purge( const boost::program_options::variables_map & options ) { boost::filesystem::path rootPath( options[ "objectStoreRootPath" ].as< std::string >() ); Osmosis::ObjectStore::Store store( rootPath ); Osmosis::ObjectStore::Labels labels( rootPath, store ); Osmosis::ObjectStore::Purge purge( store, labels ); purge.purge(); }
Gura_ImplementMethod(model, get_labels) { struct model *pModel = Object_model::GetObjectThis(arg)->GetEntity(); int nClasses = ::get_nr_class(pModel); std::unique_ptr<int []> labels(new int [nClasses]); ::get_labels(pModel, labels.get()); return Value::CreateList(env, labels.get(), nClasses); }
void DragWidget::updateLabelPos( const QSize &newSize ) { QMap<DragLabel*,QPoint> sizemap; QList<DragLabel*> labels( findChildren<DragLabel*>() ); foreach( DragLabel *label, labels ) { sizemap.insert( label, pos2grid( label->pos() ) ); }
std::vector<unsigned> DbScan::labels() const { std::vector<unsigned> labels(points_.size()); for(unsigned i = 0; i < points_.size(); ++i) { labels[i] = points_[i].label; } return labels; }
int main(int argc, char *argv[]) { // ULogger::setType(ULogger::kTypeConsole); // ULogger::setLevel(ULogger::kInfo); // ULogger::setLevel(ULogger::kDebug); std::vector<std::string> dbfiles; for (int i = 1; i < argc; i++) { dbfiles.emplace_back(argv[i]); } QCoreApplication app(argc, argv); std::unique_ptr<WordsKdTree> words(new WordsKdTree()); std::unique_ptr<LabelsSimple> labels(new LabelsSimple()); QThread identObjThread; std::cout << "Reading data" << std::endl; if (!RTABMapDBAdapter::readData(dbfiles, *words, *labels)) { qCritical() << "Reading data failed"; return 1; } std::cout << "Initializing IdentificationObj Service" << std::endl; std::shared_ptr<IdentificationObj> identObj( new IdentificationObj(std::move(words), std::move(labels))); identObj->moveToThread(&identObjThread); identObjThread.start(); // BWServer std::cout << "Initializing BW server" << std::endl; // TODO use shared_ptr unsigned int maxClients = 10; std::shared_ptr<BWFrontEndObj> bwFrontEndObj(new BWFrontEndObj()); identObj->setBWFrontEndObj(bwFrontEndObj); bwFrontEndObj->setIdentificationObj(identObj); QThread bwThread; bwThread.start(); bwFrontEndObj->moveToThread(&bwThread); QObject::connect(bwFrontEndObj.get(), &BWFrontEndObj::triggerInit, bwFrontEndObj.get(), &BWFrontEndObj::init); emit bwFrontEndObj->triggerInit(maxClients); // HTTPFrontEndObj std::cout << "Initializing HTTP Front End" << std::endl; std::shared_ptr<HTTPFrontEndObj> httpFrontEndObj(new HTTPFrontEndObj()); identObj->setHTTPFrontEndObj(httpFrontEndObj); httpFrontEndObj->setIdentificationObj(identObj); if (!httpFrontEndObj->init()) { qCritical() << "Starting HTTP Front End Failed"; return 1; } std::cout << "Initialization Done" << std::endl; return app.exec(); }
std::shared_ptr< Dataset > Utility::ReadDataSet(const std::string& path) { auto buffer = std::make_shared< Dataset >(); //read fann FILE* file=fopen(path.data(), "rb"); //read if(file) { unsigned int ninput=0,natt=0,nout=0; fscanf(file, "%u %u %u\n", &ninput, &natt, &nout); //alloc buffer->mDatas.resize(ninput); buffer->mLabels.resize(ninput); //alloc labels double std::vector< double > labels(ninput); //alloc al values for(auto& row: buffer->mDatas) row.resize(natt); //read rows for(size_t n=0;n!=ninput;++n) { //get fiels for(size_t a=0;a!=natt;++a) { //get value double value = 0.0; fscanf(file,"%le ",&value); //put value buffer->mDatas[n][a] = value; } //get class fscanf(file, "\n%le \n", &labels[n]); } //double labels into labels "integer" for(size_t n = 0;n != ninput;++n) { buffer->mLabels[n] = (int)(labels[n] != 0.0 ? 1.0 / labels[n] : 0.0); //.... if ( buffer->mLabels[n] < 0.0 ) buffer->mLabels[n] *= -1.0; else buffer->mLabels[n] *= 2.0; } } //count classes std::vector< int > classes; //count for(int id:buffer->mLabels) { auto it = std::find(classes.begin(), classes.end(), id); if( it == classes.end() ) classes.push_back(id); } //save count buffer->mCCount = classes.size(); //return buffer return buffer; }
void test_for_GP() { MatrixXd datasets(4,2); datasets<<1,1,-1,1,1,-1,-1,-1; MatrixXd labels(1,4); labels<<1,1,-1,-1; GuassianProcessExact gpe(datasets, labels); cout<<gpe.result(datasets)<<endl; }
PersonRecognizer::PersonRecognizer(const vector<Mat> &imgs, int radius, int neighbors, int grid_x, int grid_y, double threshold) { //all images are faces of the same person, so initialize the same label for all. vector<int> labels(imgs.size()); for (vector<int>::iterator it = labels.begin() ; it != labels.end() ; *(it++) = PERSON_LABEL); _faceSize = Size(imgs[0].size().width, imgs[0].size().height); //build recognizer model: _model = createLBPHFaceRecognizer(radius, neighbors, grid_x, grid_y, threshold); _model->train(imgs, labels); }
/******************************************************************* * Streaming implementation ******************************************************************/ void work(void) { //handle input messages in the packet work method auto inPort0 = this->input(0); if (_channels.size() <= 1 and inPort0->hasMessage()) this->packetWork(); int flags = 0; long long timeNs = 0; size_t numElems = this->workInfo().minInElements; if (numElems == 0) return; //parse labels (from input 0) for (const auto &label : inPort0->labels()) { //skip out of range labels if (label.index >= numElems) break; //found a time label if (label.id == "txTime") { if (label.index == 0) //time for this packet { flags |= SOAPY_SDR_HAS_TIME; timeNs = label.data.convert<long long>(); } else //time on the next packet { //truncate to not include this time label numElems = label.index; break; } } //found an end label if (label.id == "txEnd") { flags |= SOAPY_SDR_END_BURST; numElems = std::min<size_t>(label.index+label.width, numElems); break; } } //write the stream data const long timeoutUs = this->workInfo().maxTimeoutNs/1000; const auto &buffs = this->workInfo().inputPointers; const int ret = _device->writeStream(_stream, buffs.data(), numElems, flags, timeNs, timeoutUs); //handle result if (ret > 0) for (auto input : this->inputs()) input->consume(size_t(ret)); else if (ret == SOAPY_SDR_TIMEOUT) return this->yield(); else { for (auto input : this->inputs()) input->consume(numElems); //consume error region throw Pothos::Exception("SDRSink::work()", "writeStream "+std::string(SoapySDR::errToStr(ret))); } }
const char *MSLabelOut::formatOutput(MSString &buffer_,double data_) { if (data_<UINT_MAX) { unsigned index=unsigned(data_); if (index<labels().length()) { buffer_=labels()(index); return buffer_.string(); } } switch (format().formatType()) { case MSFormat::Date: { MSDate aDate((MSJulian)data_); return aDate.format(buffer_,format()); } case MSFormat::Money: { MSMoney aMoney(data_); return aMoney.format(buffer_,format()); } case MSFormat::Rate: { MSRate aRate(data_); return aRate.format(buffer_,format()); } case MSFormat::Time: { MSTime aTime((time_t)data_); return aTime.format(buffer_,format()); } case MSFormat::Float: default: { MSFloat aFloat(data_); return aFloat.format(buffer_,format()); } } /* return buffer_.string(); */ }
TEST_F( Labels, Remove ) { auto m = ml->addMedia( "media.avi" ); auto l1 = ml->createLabel( "sea otter" ); auto l2 = ml->createLabel( "cony the cone" ); m->addLabel( l1 ); m->addLabel( l2 ); auto labels = m->labels()->all(); ASSERT_EQ( labels.size(), 2u ); bool res = m->removeLabel( l1 ); ASSERT_TRUE( res ); // Check for existing media first labels = m->labels()->all(); ASSERT_EQ( labels.size(), 1u ); ASSERT_EQ( labels[0]->name(), "cony the cone" ); // And now clean fetch another instance of the media & check again for DB replication auto media = ml->media( m->id() ); labels = media->labels()->all(); ASSERT_EQ( labels.size(), 1u ); ASSERT_EQ( labels[0]->name(), "cony the cone" ); // Remove a non-linked label res = m->removeLabel( l1 ); ASSERT_FALSE( res ); // Remove the last label res = m->removeLabel( l2 ); ASSERT_TRUE( res ); labels = m->labels()->all(); ASSERT_EQ( labels.size(), 0u ); // Check again for DB replication media = ml->media( m->id() ); labels = media->labels()->all(); ASSERT_EQ( labels.size(), 0u ); }
void MemoryReservationDirective::enhanceSymbolTable( const EnhanceSymbolTableImmutableArguments& immutable, CompileErrorList& errors, SymbolGraph& graph) { // We calculate the absolute memory position and enhance our symbol table. _absolutePosition = immutable.allocator().absolutePosition(_relativePosition); auto absolutePositioned = PositionedString(std::to_string(_absolutePosition)); for (const auto& label : labels()) { graph.addNode(Symbol(label, absolutePositioned)); } }
/** * @brief cwLinePlotLabelView::addCaves * @param begin - The index of the first cave that's added to the Region * @param end - The index of the last cave that's added to the region * * This will add all the labels to the 3d label from the cave */ void cwLinePlotLabelView::addCaves(int begin, int end) { for(int i = begin; i <= end; i++) { cwCave* cave = Region->cave(i); connectCave(cave); cwLabel3dGroup* labelGroup = new cwLabel3dGroup(this); QList<cwLabel3dItem> caveLabels = labels(cave); labelGroup->setLabels(caveLabels); CaveLabelGroups.insert(i, labelGroup); } }
void Classifier::init(const std::string& model_def, const std::string& trained_weights, const std::string& mean_file, const std::string& label_file, const int &gpu_id) { if(gpu_id>=0) { caffe::Caffe::set_mode(caffe::Caffe::GPU); caffe::Caffe::SetDevice(gpu_id); } else caffe::Caffe::set_mode(caffe::Caffe::CPU); /* Load the network. */ net_.reset(new caffe::Net<float>(model_def, caffe::TEST)); net_->CopyTrainedLayersFrom(trained_weights); CHECK_EQ(net_->num_inputs(), 1) << "Network should have exactly one input."; CHECK_EQ(net_->num_outputs(), 1) << "Network should have exactly one output."; caffe::Blob<float>* input_layer = net_->input_blobs()[0]; num_channels_ = input_layer->channels(); CHECK(num_channels_ == 3 || num_channels_ == 1) << "Input layer should have 1 or 3 channels."; input_geometry_ = cv::Size(input_layer->width(), input_layer->height()); /* Load the binaryproto mean file. */ if(mean_file!="") SetMean(mean_file); else { mean_ = cv::Mat::zeros(input_geometry_, CV_MAKE_TYPE(CV_32F, num_channels_)); } /* Load labels. */ caffe::Blob<float>* output_layer = net_->output_blobs()[0]; if(label_file!="") { std::ifstream labels(label_file.c_str()); CHECK(labels) << "Unable to open labels file " << label_file; std::string line; while (std::getline(labels, line)) labels_.push_back(std::string(line)); CHECK_EQ(labels_.size(), output_layer->channels()) << "Number of labels is different from the output layer dimension."; } else { for (int i = 0; i < output_layer->channels(); ++i) { std::stringstream ss; ss << i; labels_.push_back(ss.str()); } } is_ready = true; }
void NormalBayes::predict(Data *data){ int nSamples = data->getNSamples(), cntSamples; Mat predictSamples = dataExtractFeatureVectors(data), labels(nSamples, 1, CV_32FC1); normalBayes.predict(predictSamples, &labels); for(cntSamples = 0; cntSamples < nSamples; ++cntSamples){ data->setClassificationLabel(cntSamples, (int)labels.at<float>(cntSamples, 0)); } }