/****************************************************************************** * Timer callback used during animation playback. ******************************************************************************/ void AnimationSettings::onPlaybackTimer() { // Check if the animation playback has been deactivated in the meantime. if(!_isPlaybackActive) return; // Add one frame to current time int newFrame = timeToFrame(time()) + 1; TimePoint newTime = frameToTime(newFrame); // Loop back to first frame if end has been reached. if(newTime > animationInterval().end()) newTime = animationInterval().start(); // Set new time. setTime(newTime); // Wait until the scene is ready. Then jump to the next frame. dataset()->runWhenSceneIsReady([this]() { if(_isPlaybackActive) { _isPlaybackActive = false; startAnimationPlayback(); } }); }
TEST(HDF5IO, ReadWrite1D) { const unsigned int nelements = 5; std::vector<float> a(nelements); for (unsigned int i=0; i < a.size(); i++) a[i] = 2 * i; // 1D array std::vector<unsigned int> rank(1); rank[0] = a.size(); std::string filename("/tmp/sxmc_test_1d.hdf5"); std::string dataset("/a"); ASSERT_TRUE(write_float_vector_hdf5(filename, dataset, a, rank) >= 0); /////// std::vector<float> test_a; std::vector<unsigned int> test_rank; ASSERT_TRUE(read_float_vector_hdf5(filename, dataset, test_a, test_rank) >= 0); ASSERT_EQ((unsigned) 1, test_rank.size()); EXPECT_EQ(nelements, test_rank[0]); ASSERT_EQ(nelements, test_a.size()); for (unsigned int i=0; i < test_a.size(); i++) EXPECT_EQ(2*i, test_a[i]); }
double terrama2::core::RiscoDeFogo::XYLinhaCol(double x, double y, const std::string& path, const std::string& filename) const { //const auto& prec = *(precipitacao.rbegin()+i); std::shared_ptr<te::da::DataSource> datasource(te::da::DataSourceFactory::make("GDAL", "file://"+path+filename)); //RAII for open/closing the datasource terrama2::core::OpenClose<std::shared_ptr<te::da::DataSource> > openClose(datasource); std::shared_ptr<te::da::DataSourceTransactor> transactor(datasource->getTransactor()); std::shared_ptr<te::da::DataSet> dataset(transactor->getDataSet(filename)); std::shared_ptr<te::rst::Raster> raster(dataset->getRaster(0)); te::rst::Grid* grid = raster->getGrid(); te::rst::Band* band = raster->getBand(0); double colD, rowD; grid->geoToGrid(x, y, colD, rowD); int col = std::round(colD); int row = std::round(rowD); double value; band->getValue(col, row, value); return value; }
static unsigned hdf5_read(ndio_t file, nd_t dst) { ndio_hdf5_t self=(ndio_hdf5_t)ndioContext(file); HTRY(H5Dread(dataset(self),dtype(self),H5S_ALL,H5S_ALL,H5P_DEFAULT,nddata(dst))); return 1; Error: return 0; }
void ANNWrapper::SetPoints(const std::vector <openMVG::Vec3f> &P) { dataset = openMVG::Matf(P.size(), dim); m_nnidx = Eigen::VectorXi(m_k); // allocate near neigh indices m_dists = new KFLANN::DistanceType[m_k]; // allocate near neighbor m_dists for (unsigned int i = 0; i < P.size(); i++) { dataset(i, 0) = P.at(i).x(); dataset(i, 1) = P.at(i).y(); dataset(i, 2) = P.at(i).z(); } matcher.Build(dataset.data(), P.size(), dim); }
/****************************************************************************** * Is called when the user presses the "Open Inspector" button. ******************************************************************************/ void DislocationNetworkEditor::onOpenInspector() { DislocationNetwork* dislocationsObj = static_object_cast<DislocationNetwork>(editObject()); if(!dislocationsObj) return; QMainWindow* inspectorWindow = new QMainWindow(container()->window(), (Qt::WindowFlags)(Qt::Tool | Qt::CustomizeWindowHint | Qt::WindowMaximizeButtonHint | Qt::WindowCloseButtonHint)); inspectorWindow->setWindowTitle(tr("Dislocation Inspector")); PropertiesPanel* propertiesPanel = new PropertiesPanel(inspectorWindow); propertiesPanel->hide(); QWidget* mainPanel = new QWidget(inspectorWindow); QVBoxLayout* mainPanelLayout = new QVBoxLayout(mainPanel); mainPanelLayout->setStretch(0,1); mainPanelLayout->setContentsMargins(0,0,0,0); inspectorWindow->setCentralWidget(mainPanel); ObjectNode* node = dynamic_object_cast<ObjectNode>(dataset()->selection()->front()); DislocationInspector* inspector = new DislocationInspector(node); connect(inspector, &QObject::destroyed, inspectorWindow, &QMainWindow::close); inspector->setParent(propertiesPanel); inspector->initialize(propertiesPanel, mainWindow(), RolloutInsertionParameters().insertInto(mainPanel)); inspector->setEditObject(dislocationsObj); inspectorWindow->setAttribute(Qt::WA_DeleteOnClose); inspectorWindow->resize(1000, 350); inspectorWindow->show(); }
template<typename Distance, typename IndexType> void buildIndex_(void*& index, const Mat& data, const IndexParams& params, const Distance& dist = Distance()) { typedef typename Distance::ElementType ElementType; if(DataType<ElementType>::type != data.type()) CV_Error_(Error::StsUnsupportedFormat, ("type=%d\n", data.type())); if(!data.isContinuous()) CV_Error(Error::StsBadArg, "Only continuous arrays are supported"); ::cvflann::Matrix<ElementType> dataset((ElementType*)data.data, data.rows, data.cols); IndexType* _index = new IndexType(dataset, get_params(params), dist); try { _index->buildIndex(); } catch (...) { delete _index; _index = NULL; throw; } index = _index; }
/****************************************************************************** * Is called when the user has selected a certain frame in the frame list box. ******************************************************************************/ void FileSourceEditor::onFrameSelected(int index) { FileSource* obj = static_object_cast<FileSource>(editObject()); if(!obj) return; dataset()->animationSettings()->setTime(obj->inputFrameToAnimationTime(index)); }
/** * @brief TestAlgorithm::run start the search * @return a list of data packets containing results */ QList<DataPacket*> TestAlgorithm::run() { QList<DataPacket*> list; SearchResult* result = new SearchResult(); list.append(dynamic_cast<DataPacket*>(result)); for (QString& datasetPath : mQuery->getDatasets()) { Dataset dataset(datasetPath); for (Medium* medium : dataset.getMediaList()) { if (mCancel == true) { return list; } SearchObject* object = new SearchObject(); object->setMedium(medium->getPath()); object->setSourceDataset(dataset.getPath()); //new result element SearchResultElement* resultElement = new SearchResultElement(); resultElement->setSearchObject(*object); resultElement->setScore(std::rand() % 20); //add to result list result->addResultElement(*resultElement); } } QThread::msleep(1500); return list; }
// Calculates log-loss of a dataset using ffm-native-ops C++ library (without GPU). // // Arguments: // --datasetPath <path> (--binModelPath | --textModelPath) <path> [--samplingFactor <float>] int main(int argc, const char ** argv) { Options const & options = parseOptions(argvToArgs(argc, argv)); Dataset dataset(options.datasetPath); Model model(dataset.numFields); if (!options.binModelPath.empty()) { model.binaryDeserialize(options.binModelPath); } else { model.importModel(options.textModelPath); } LogLossCalculator logLossCalc(options.samplingFactor); for (int64_t sampleIdx = 0; dataset.hasNext(); ++sampleIdx) { Dataset::Sample const & sample = dataset.next(); float t = ffmPredict(model.weights.data(), dataset.numFields, sample.data()); int y = sample.back() > 0 ? 1 : -1; logLossCalc.update(t, y); std::cout << sampleIdx << " " << y << " " << t << std::endl; } std::cout << "Log-loss: " << logLossCalc.get() << std::endl; return 0; }
hdf5_iprimitive::read_hdf5_dataset ( std::wstring* t, std::size_t data_count, std::size_t object_number ) { BOOST_ASSERT(data_count == 1); std::string path = create_object_data_path(object_number); hdf5_dataset dataset(*file_, path); hdf5_datatype datatype(dataset); // If you can think of a better way to store wchar_t/wstring objects in HDF5, be my guest... size_t size = datatype.get_size(); BOOST_ASSERT(size >= sizeof(wchar_t)); std::size_t string_size = size / sizeof(wchar_t) - 1; t->resize(string_size); if(string_size) { std::vector<wchar_t> buffer(string_size + 1); dataset.read(datatype, &buffer[0]); t->replace(0, string_size, &buffer[0], string_size); } datatype.close(); dataset.close(); }
hdf5_iprimitive::read_hdf5_dataset ( std::string* t, std::size_t data_count, std::size_t object_number ) { BOOST_ASSERT(data_count == 1); std::string path = create_object_data_path(object_number); hdf5_dataset dataset(*file_, path); hdf5_datatype datatype(dataset); if(datatype.is_variable_length_string()) { char* buffer; hdf5_dataspace dataspace(dataset); dataset.read(datatype, &buffer); *t = buffer; datatype.reclaim_buffer(dataspace, &buffer); dataspace.close(); } else { size_t size = datatype.get_size(); std::vector<char> buffer(size); dataset.read(datatype, &buffer[0]); t->resize(size); t->replace(0, size, &buffer[0], size); } datatype.close(); dataset.close(); }
int main(int argc, char* argv[]) { if( argc != 4 && argc != 5 ) { std::cout << "USAGE: ./do2ndLevel_PhotonJet_batch [dataset] [inputFileList] [flags] [useGenJets=false]" << std::endl; exit(23); } std::string dataset(argv[1]); std::string inputFileList(argv[2]); std::string flags(argv[3]); bool useGenJets = false; if( argc == 5 ) { std::string useGenJets_str(argv[4]); if( useGenJets_str=="true" ) useGenJets = true; } TRegexp run2010("Run2010"); TRegexp run2011("Run2011"); TRegexp run2012("Run2012"); TString dataset_str(dataset); if( dataset_str.Contains(run2010) || dataset_str.Contains(run2011) || dataset_str.Contains(run2012) ) { // then it's data doSingleLoop(inputFileList, dataset, flags, (bool)true, (bool)false); } else { doSingleLoop(inputFileList, dataset, flags, (bool)false, useGenJets); } }
bool Print::print(const QStringList &commands, State &state) { if (commands.isEmpty()) { std::cout << QObject::tr("print requires a dataset to be named").toStdString() << std::endl; return true; } const QString datasetName = commands.first(); QDir project(state.projectPath()); Dataset dataset(datasetName, state); if (!dataset.isValid()) { std::cout << QObject::tr("The dataset %1 could not be loaded; try checking it with the check command").arg(datasetName).toStdString() << std::endl; return true; } QStringList cols; if (commands.size() > 1) { cols = commands; cols.removeFirst(); } std::cout << dataset.tableHeaders(cols).toStdString() << std::endl; dataset.eachRow( [cols](const Dataset::Row &row) { std::cout << row.toString(cols).toStdString() << std::endl; }); return true; }
/****************************************************************************** * Replaces the particle selection. ******************************************************************************/ void ParticleSelectionSet::setParticleSelection(const PipelineFlowState& state, const QBitArray& selection, SelectionMode mode) { // Make a backup of the old snapshot so it may be restored. if(dataset()->undoStack().isRecording()) dataset()->undoStack().push(new ReplaceSelectionOperation(this)); ParticlePropertyObject* identifierProperty = ParticlePropertyObject::findInState(state, ParticleProperty::IdentifierProperty); if(identifierProperty && useIdentifiers()) { OVITO_ASSERT(selection.size() == identifierProperty->size()); _selection.clear(); int index = 0; if(mode == SelectionReplace) { _selectedIdentifiers.clear(); for(int id : identifierProperty->constIntRange()) { if(selection.testBit(index++)) _selectedIdentifiers.insert(id); } } else if(mode == SelectionAdd) { for(int id : identifierProperty->constIntRange()) { if(selection.testBit(index++)) _selectedIdentifiers.insert(id); } } else if(mode == SelectionSubtract) { for(int id : identifierProperty->constIntRange()) { if(selection.testBit(index++)) _selectedIdentifiers.remove(id); } } } else { _selectedIdentifiers.clear(); if(mode == SelectionReplace) _selection = selection; else if(mode == SelectionAdd) { _selection.resize(selection.size()); _selection |= selection; } else if(mode == SelectionSubtract) { _selection.resize(selection.size()); _selection &= ~selection; } } notifyDependents(ReferenceEvent::TargetChanged); }
static void annotate_paint(SkPaint& paint, const char* key, SkData* value) { SkAutoTUnref<SkDataSet> dataset(SkNEW_ARGS(SkDataSet, (key, value))); SkAnnotation* ann = SkNEW_ARGS(SkAnnotation, (dataset, SkAnnotation::kNoDraw_Flag)); paint.setAnnotation(ann)->unref(); SkASSERT(paint.isNoDrawAnnotation()); }
virtual void start_document ( ) { meta = dataset(); ts.clear(); temp_image = image(); temp_box = box(); }
/* * Class: org_moa_gpu_bridge_NativeDenseInstanceBatch * Method: init * Signature: (Lweka/core/Instances;I)V */ JNIEXPORT void JNICALL Java_org_moa_gpu_bridge_NativeDenseInstanceBatch_init (JNIEnv * env, jobject instance_batch, jobject instances, jint num_rows) { static jclass _class = env->FindClass(theClazz); static jfieldID _context_field = env->GetFieldID(_class, "m_native_context", "J"); dataset_interface dataset(env,instances); dense_instance_batch* batch = new dense_instance_batch(num_rows, dataset.get_num_attributes()-1,get_global_context()); env->SetLongField(instance_batch, _context_field, (jlong)batch); }
dataobject::dataobject(administrator_basic * adb, administrator_pointer * adp, const ST::string & n,ofstream * lo,istream * in) : statobject(adb,n,"dataset",lo,in) { adminp_p = adp; d = dataset(n,adb); create(); }
TEST(utest_interface_optics, optics_algorithm) { std::shared_ptr<pyclustering_package> sample = pack(dataset({ { 1.0, 1.0 }, { 1.1, 1.0 }, { 1.2, 1.4 }, { 10.0, 10.3 }, { 10.1, 10.2 }, { 10.2, 10.4 } })); pyclustering_package * result = optics_algorithm(sample.get(), 4, 2, 2, 0); ASSERT_EQ((std::size_t) OPTICS_PACKAGE_SIZE, result->size); delete result; }
TEST(utest_interface_dbscan, dbscan_algorithm) { std::shared_ptr<pyclustering_package> sample = pack(dataset({ { 1.0, 1.0 }, { 1.1, 1.0 }, { 1.2, 1.4 }, { 10.0, 10.3 }, { 10.1, 10.2 }, { 10.2, 10.4 } })); pyclustering_package * result = dbscan_algorithm(sample.get(), 4, 2, 0); ASSERT_EQ(3U, result->size); /* allocated clustes + noise */ delete result; }
/****************************************************************************** * Is called when the current animation time has changed. ******************************************************************************/ void AnimationSettings::onTimeChanged(TimePoint newTime) { _timeIsChanging++; dataset()->runWhenSceneIsReady([this] () { _timeIsChanging--; Q_EMIT timeChangeComplete(); }); }
af::shared<dataset> crystal::datasets() const { af::shared<dataset> result((af::reserve(n_datasets()))); for(int i_dataset=0; i_dataset<n_datasets(); i_dataset++) { result.push_back(dataset(*this, i_dataset)); } return result; }
/// /// \brief Vespucci::SaveVespucciBinary /// \param spectra /// \param x /// \param y /// \param abscissa /// \return /// bool Vespucci::SaveVespucciBinary(std::string filename, const arma::mat &spectra, const arma::vec &x, const arma::vec &y, const arma::vec &abscissa) { bool success; try{ arma::field<arma::mat> dataset(4); dataset(0) = spectra; dataset(1) = abscissa; dataset(2) = x; dataset(3) = y; success = dataset.save(filename, arma::arma_binary); } catch(std::exception e){ std::cerr << "See armadillo exception" << std::endl; std::string str = "Vespucci::SaveVespucciBinary: " + std::string(e.what()); throw std::runtime_error(str); } return success; }
int main( int argc, char* argv[] ) { if( argc!=3 && argc!=4 && argc!=5 && argc!=6 ) { std::cout << "USAGE: ./finalize_TTZDilepton [dataset] [selectionType] [bTaggerType=\"SSVHE\"] [PUType=\"HR11_73pb\"] [leptType=\"ALL\"]" <<std::endl; return 13; } std::string dataset(argv[1]); std::string selectionType(argv[2]); std::string bTaggerType="SSVHE"; if( argc==4 ) { std::string bTaggerType_str(argv[3]); bTaggerType = bTaggerType_str; } std::string PUType="HR11_73pb"; if( argc==5 ) { std::string PUType_str(argv[4]); PUType = PUType_str; } std::string leptType="ALL"; if( argc==6 ) { std::string leptType_str(argv[5]); leptType = leptType_str; } Ntp1Finalizer_TTZDilepton* nf = new Ntp1Finalizer_TTZDilepton( dataset, selectionType, bTaggerType, PUType, leptType ); nf->set_inputAnalyzerType("TTZ"); if( dataset=="DATA_HR11_v2" ) { nf->addFile("DoubleMu_Run2011A_FULL"); //first muons! important! nf->addFile("DoubleMu_Run2011B_v2"); //first muons! important! nf->addFile("SingleMu_Run2011A_FULL"); //first muons! important! nf->addFile("SingleMu_Run2011B_v2"); //first muons! important! nf->addFile("DoubleElectron_Run2011A_FULL"); nf->addFile("DoubleElectron_Run2011B_v2"); } else { nf->addFile( dataset ); } nf->finalize(); return 0; }
/****************************************************************************** * Selects all particles in the given particle data set. ******************************************************************************/ void ParticleSelectionSet::selectAll(const PipelineFlowState& state) { // Make a backup of the old selection state so it may be restored. if(dataset()->undoStack().isRecording()) dataset()->undoStack().push(new ReplaceSelectionOperation(this)); ParticlePropertyObject* identifiers = ParticlePropertyObject::findInState(state, ParticleProperty::IdentifierProperty); if(useIdentifiers() && identifiers != nullptr) { _selection.clear(); _selectedIdentifiers.clear(); for(int id : identifiers->constIntRange()) _selectedIdentifiers.insert(id); } else { _selection.fill(true, particleCount(state)); _selectedIdentifiers.clear(); } notifyDependents(ReferenceEvent::TargetChanged); }
/****************************************************************************** * Returns the world space point around which the viewport camera orbits. ******************************************************************************/ Point3 ViewportConfiguration::orbitCenter() { // Update orbiting center. if(orbitCenterMode() == ORBIT_SELECTION_CENTER) { Box3 selectionBoundingBox; for(SceneNode* node : dataset()->selection()->nodes()) { selectionBoundingBox.addBox(node->worldBoundingBox(dataset()->animationSettings()->time())); } if(!selectionBoundingBox.isEmpty()) return selectionBoundingBox.center(); else { Box3 sceneBoundingBox = dataset()->sceneRoot()->worldBoundingBox(dataset()->animationSettings()->time()); if(!sceneBoundingBox.isEmpty()) return sceneBoundingBox.center(); } } else if(orbitCenterMode() == ORBIT_USER_DEFINED) { return _userOrbitCenter; } return Point3::Origin(); }
inline typename boost::enable_if<is_multi_array<T>, dataset>::type create_dataset(h5xxObject const& object, std::string const& name, T const& value, StoragePolicy const& storage_policy = StoragePolicy()) { typedef typename T::element value_type; hid_t type_id = ctype<value_type>::hid(); // this ID must not be closed enum { rank = T::dimensionality }; // --- create a temporary dataspace based on the input array dimensions boost::array<hsize_t, rank> dims; std::copy(value.shape(), value.shape() + rank, dims.begin()); return dataset(object, name, type_id, dataspace(dims), storage_policy); }
/****************************************************************************** * Aligns the current viewing direction to the slicing plane. ******************************************************************************/ void SliceModifierEditor::onAlignViewToPlane() { TimeInterval interval; Viewport* vp = dataset()->viewportConfig()->activeViewport(); if(!vp) return; // Get the object to world transformation for the currently selected object. ObjectNode* node = dynamic_object_cast<ObjectNode>(dataset()->selection()->front()); if(!node) return; const AffineTransformation& nodeTM = node->getWorldTransform(dataset()->animationSettings()->time(), interval); // Transform the current slicing plane to the world coordinate system. SliceModifier* mod = static_object_cast<SliceModifier>(editObject()); if(!mod) return; Plane3 planeLocal = mod->slicingPlane(dataset()->animationSettings()->time(), interval); Plane3 planeWorld = nodeTM * planeLocal; // Calculate the intersection point of the current viewing direction with the current slicing plane. Ray3 viewportRay(vp->cameraPosition(), vp->cameraDirection()); FloatType t = planeWorld.intersectionT(viewportRay); Point3 intersectionPoint; if(t != FLOATTYPE_MAX) intersectionPoint = viewportRay.point(t); else intersectionPoint = Point3::Origin() + nodeTM.translation(); if(vp->isPerspectiveProjection()) { FloatType distance = (vp->cameraPosition() - intersectionPoint).length(); vp->setViewType(Viewport::VIEW_PERSPECTIVE); vp->setCameraDirection(-planeWorld.normal); vp->setCameraPosition(intersectionPoint + planeWorld.normal * distance); } else { vp->setViewType(Viewport::VIEW_ORTHO); vp->setCameraDirection(-planeWorld.normal); } vp->zoomToSelectionExtents(); }
/****************************************************************************** * Adopts the selection state from the modifier's input. ******************************************************************************/ void ParticleSelectionSet::resetSelection(const PipelineFlowState& state) { // Take a snapshot of the current selection. ParticlePropertyObject* selProperty = ParticlePropertyObject::findInState(state, ParticleProperty::SelectionProperty); if(selProperty) { // Make a backup of the old snapshot so it may be restored. if(dataset()->undoStack().isRecording()) dataset()->undoStack().push(new ReplaceSelectionOperation(this)); ParticlePropertyObject* identifierProperty = ParticlePropertyObject::findInState(state, ParticleProperty::IdentifierProperty); if(identifierProperty && useIdentifiers()) { OVITO_ASSERT(selProperty->size() == identifierProperty->size()); _selectedIdentifiers.clear(); _selection.clear(); const int* s = selProperty->constDataInt(); for(int id : identifierProperty->constIntRange()) { if(*s++) _selectedIdentifiers.insert(id); } } else { // Take a snapshot of the selection state. _selectedIdentifiers.clear(); _selection.fill(false, selProperty->size()); const int* s = selProperty->constDataInt(); const int* s_end = s + selProperty->size(); for(int index = 0; s != s_end; ++s, index++) { if(*s) _selection.setBit(index); } } notifyDependents(ReferenceEvent::TargetChanged); } else { // Reset selection snapshot if input doesn't contain a selection state. clearSelection(state); } }