void Feature::data(const DataArray &data) { util::checkEntityInput(data); if (!data.isValidEntity()) { throw UninitializedEntity(); } backend()->data(data.id()); }
typename Wavefunction<Rank>::Ptr Wavefunction<Rank>::CopyDeep() const { /* Set up representations and stuff */ Ptr newPsi = Ptr(new Wavefunction()); newPsi->SetRepresentation(this->Repr->Copy()); /* Allocate data */ for (size_t i = 0; i < this->WavefunctionData.size(); i++) { //Allocate data buffer in new wavefunction DataArray oldData ( GetData(i) ); int bufferName = newPsi->AllocateData(oldData.shape()); if (bufferName != (int)i) { throw std::runtime_error("What! something is wrong in Wavefunction::CopyDeep()"); } //Copy data buffer to new wavefunction DataArray newData ( newPsi->GetData(bufferName) ); newData = oldData; } //Set active buffer on the new wavefunction newPsi->SetActiveBuffer(this->GetActiveBufferName()); return newPsi; }
void PerconaFTEngine::Stats(Context& ctx, std::string& str) { str.append("perconaft_version:").append(stringfromll(DB_VERSION_MAJOR)).append(".").append(stringfromll(DB_VERSION_MINOR)).append(".").append( stringfromll(DB_VERSION_PATCH)).append("\r\n"); DataArray nss; ListNameSpaces(ctx, nss); PerconaFTLocalContext& local_ctx = g_local_ctx.GetValue(); for (size_t i = 0; i < nss.size(); i++) { DB* db = GetFTDB(ctx, nss[i], false); if (NULL == db) continue; str.append("\r\nDB[").append(nss[i].AsString()).append("] Stats:\r\n"); DB_BTREE_STAT64 st; memset(&st, 0, sizeof(st)); db->stat64(db, local_ctx.transc.Peek(), &st); str.append("bt_nkeys:").append(stringfromll(st.bt_nkeys)).append("\r\n"); str.append("bt_ndata:").append(stringfromll(st.bt_ndata)).append("\r\n"); str.append("bt_fsize:").append(stringfromll(st.bt_fsize)).append("\r\n"); str.append("bt_dsize:").append(stringfromll(st.bt_dsize)).append("\r\n"); str.append("bt_create_time_sec:").append(stringfromll(st.bt_create_time_sec)).append("\r\n"); str.append("bt_modify_time_sec:").append(stringfromll(st.bt_modify_time_sec)).append("\r\n"); str.append("bt_verify_time_sec:").append(stringfromll(st.bt_verify_time_sec)).append("\r\n"); } }
bool Tag::hasReference(const DataArray &reference) const { if (!util::checkEntityInput(reference, false)) { return false; } DataArray da = backend()->getReference(reference.name()); return da && da.id() == reference.id(); }
DataView retrieveFeatureData(const Tag &tag, size_t feature_index) { if (tag.featureCount() == 0) { throw nix::OutOfBounds("There are no features associated with this tag!", 0); } if (feature_index > tag.featureCount()) { throw nix::OutOfBounds("Feature index out of bounds.", 0); } Feature feat = tag.getFeature(feature_index); DataArray data = feat.data(); if (data == nix::none) { throw nix::UninitializedEntity(); //return NDArray(nix::DataType::Float,{0}); } if (feat.linkType() == nix::LinkType::Tagged) { NDSize offset, count; getOffsetAndCount(tag, data, offset, count); if (!positionAndExtentInData(data, offset, count)) { throw nix::OutOfBounds("Requested data slice out of the extent of the Feature!", 0); } DataView io = DataView(data, count, offset); return io; } // for untagged and indexed return the full data NDSize offset(data.dataExtent().size(), 0); DataView io = DataView(data, data.dataExtent(), offset); return io; }
void findHistogram(IDataArray::Pointer inputData, int32_t* ensembleArray, int32_t* eIds, int NumberOfBins, bool removeBiasedFeatures, bool* biasedFeatures) { DataArray<T>* featureArray = DataArray<T>::SafePointerDownCast(inputData.get()); if (NULL == featureArray) { return; } T* fPtr = featureArray->getPointer(0); size_t numfeatures = featureArray->getNumberOfTuples(); int32_t bin; int32_t ensemble; float min = 1000000.0f; float max = 0.0f; float value; for (size_t i = 1; i < numfeatures; i++) { value = fPtr[i]; if(value > max) { max = value; } if(value < min) { min = value; } } float stepsize = (max - min) / NumberOfBins; for (size_t i = 1; i < numfeatures; i++) { if(removeBiasedFeatures == false || biasedFeatures[i] == false) { ensemble = eIds[i]; bin = (fPtr[i] - min) / stepsize; if(bin >= NumberOfBins) { bin = NumberOfBins - 1; } ensembleArray[(NumberOfBins * ensemble) + bin]++; } } }
Data& getElement(uint32_t idx) { if (elements.size() <= idx) { elements.resize(idx + 1); } return elements[idx]; }
int Engine::FlushAll(Context& ctx) { DataArray nss; ListNameSpaces(ctx, nss); for (size_t i = 0; i < nss.size(); i++) { Flush(ctx, nss[i]); } return 0; }
void SortTimeFunction::f() { const int n = m_copy.size(); const int elementSize = m_copy.getElementSize(); DataArray data = m_copy; switch(elementSize) { case 1 : m_sortMethod->getMethod()(data.getData(), n, elementSize, CountComparator<BYTE>( m_compareCount)); break; case 2 : m_sortMethod->getMethod()(data.getData(), n, elementSize, CountComparator<unsigned short>(m_compareCount)); break; default: m_sortMethod->getMethod()(data.getData(), n, elementSize, CountComparator<unsigned int >(m_compareCount)); break; } }
// ----------------------------------------------------------------------------- // // ----------------------------------------------------------------------------- void subtractVector3d(DataArray<double>::Pointer data, double* v) { size_t count = data->getNumberOfTuples(); for (size_t i = 0; i < count; ++i) { double* ptr = data->getPointer(i * 3); ptr[0] = ptr[0] - v[0]; ptr[1] = ptr[1] - v[1]; ptr[2] = ptr[2] - v[2]; } }
int Engine::CompactAll(Context& ctx) { DataArray nss; ListNameSpaces(ctx, nss); for (size_t i = 0; i < nss.size(); i++) { KeyObject start, end; start.SetNameSpace(nss[i]); Compact(ctx, start, end); } return 0; }
/** * @brief Static constructor * @param numTuples The number of tuples in the array. * @param numComponents The number of Components in each Tuple * @param name The name of the array * @return Boost::Shared_Ptr wrapping an instance of DataArrayTemplate<T> */ static Pointer CreateArray(size_t numTuples, int numComponents, const std::string &name) { DataArray<T>* d = new DataArray<T> (numTuples, numComponents, true); if (d->Allocate() < 0) { // Could not allocate enough memory, reset the pointer to null and return delete d; return DataArray<T>::NullPointer(); } d->SetName(name); Pointer ptr(d); return ptr; }
DataArray createDataArray(const std::string &name, const std::string &type, const T &data) { const Hydra<const T> hydra(data); DataType dtype = hydra.element_data_type(); const NDSize shape = hydra.shape(); DataArray da = createDataArray(name, type, dtype, shape); const NDSize offset(shape.size(), 0); da.setData(data, offset); return da; }
/** * packerに基づきイメージを結合する */ static void combineImages(){ // 出力フォーマットの選別 const int ch = getChannels(ofmt_type); const int type = (ch == 4)? CV_8UC4 : CV_8UC3; const cv::Scalar scalar = (ch == 4)? cv::Scalar(127, 127, 127, 255) : cv::Scalar(127, 127, 127); // 初期色 cv::Mat dst(cv::Size(packer.getW(), packer.getH()), type, scalar); int count = 0; // for debug DataArray::iterator it = inputs.begin(); for(; it != inputs.end(); it++){ if(!it->fit) continue; // skip cv::Mat src = cv::imread(it->path, -1); if(src.channels() != ch){ adjustmentImage(src, ch); } cv::Mat roi = dst(cv::Rect(it->fit->rect.getX() + packer.getPadding(), it->fit->rect.getY() + packer.getPadding(), it->w, it->h)); src.copyTo(roi); // for debug #if 0 cv::rectangle(dst, cv::Rect(it->fit->rect.getX(), it->fit->rect.getY(), it->fit->rect.getW(), it->fit->rect.getH()), cv::Scalar(255, 255, 255), 1); std::ostringstream oss; oss << it->w << "x" << it->h << "(" << count << ")"; count++; const std::string size = oss.str(); cv::putText(dst, size, cv::Point(it->fit->rect.getX() + 1, it->fit->rect.getY() + 21), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(0, 0, 0), 1); cv::putText(dst, size, cv::Point(it->fit->rect.getX(), it->fit->rect.getY() + 20), cv::FONT_HERSHEY_SIMPLEX, 0.5, cv::Scalar(255, 255, 255), 1); #endif } // 出力 if((om_type == OutputMode_WriteOnly) || (om_type == OutputMode_Both)){ bool ret = cv::imwrite(output_path, dst, params); } if((om_type == OutputMode_DisplayOnly) || (om_type == OutputMode_Both)){ std::ostringstream oss; oss << "packed image(" << packer.getW() << " x " << packer.getH() << ")"; const std::string name = oss.str(); cv::namedWindow(name, CV_WINDOW_AUTOSIZE | CV_WINDOW_KEEPRATIO); cv::imshow(name, dst); if(ch == 4){ std::vector<cv::Mat> mv; cv::split(dst, mv); cv::Mat dsta = mv[3].clone(); cv::imshow("packed image(alpha)", dsta); } } }
// ----------------------------------------------------------------------------- // // ----------------------------------------------------------------------------- std::vector<int32_t> TriangleOps::findAdjacentTriangles(SurfaceMeshDataContainer* sm, int32_t triangleIndex, int32_t label) { std::vector<int32_t> adjacentTris; // Get the master list of triangles for the mesh DREAM3D::SurfaceMesh::FaceList_t::Pointer facesPtr = sm->getFaces(); // DREAM3D::SurfaceMesh::Face_t* faces = facesPtr->GetPointer(0); IDataArray::Pointer flPtr = sm->getFaceData(DREAM3D::FaceData::SurfaceMeshFaceLabels); DataArray<int32_t>* faceLabelsPtr = DataArray<int32_t>::SafePointerDownCast(flPtr.get()); int32_t* faceLabels = faceLabelsPtr->GetPointer(0); // Get the Triangle Neighbor Structure MeshFaceNeighbors::Pointer triNeighbors = sm->getMeshFaceNeighborLists(); // For the specific triangle that was passed, get its neighbor list uint16_t count = triNeighbors->getNumberOfFaces(triangleIndex); int32_t* nList = triNeighbors->getNeighborListPointer(triangleIndex); if (count < 3) { std::cout << "Triangle Neighbor List had only " << count << " neighbors. Must be at least 3." << std::endl; BOOST_ASSERT(false); } else if (count == 3) // This triangle only has 3 neighbors so we are assuming all three have the same label set. { for (uint16_t n = 0; n < count; ++n) { adjacentTris.push_back(nList[n]); } } else { // Iterate over the indices to find triangles that match the label and are NOT the current triangle index for (uint16_t n = 0; n < count; ++n) { int32_t fl_0 = faceLabels[nList[n]*2]; int32_t fl_1 = faceLabels[nList[n]*2 + 1]; if ( (fl_0 == label || fl_1 == label) && (nList[n] != triangleIndex) ) { // std::cout << " Found Adjacent Triangle: " << t->tIndex << std::endl; adjacentTris.push_back(nList[n]); // ++index; } } } return adjacentTris; }
int MMKVImpl::SAdd(DBID db, const Data& key, const DataArray& elements) { if (m_readonly) { return ERR_PERMISSION_DENIED; } int err = 0; RWLockGuard<MemorySegmentManager, WRITE_LOCK> keylock_guard(m_segment); EnsureWritableValueSpace(); ObjectAllocator allocator = m_segment.MSpaceAllocator<Object>(); StringSet* set = GetObject<StringSet>(db, key, V_TYPE_SET, true, err)(std::less<Object>(), allocator); if (0 != err) { return err; } int inserted = 0; for (size_t i = 0; i < elements.size(); i++) { std::pair<StringSet::iterator, bool> ret = set->insert(Object(elements[i], true)); if (ret.second) { m_segment.AssignObjectValue(*(ret.first), elements[i], true); inserted++; } } return inserted; }
RangeDimension::RangeDimension(const DataArray &array) : ImplContainer() { if (array.dataExtent().size() > 1) { throw InvalidRank("Error creating RangeDimension with DataArray: array must be 1-D!"); } }
void Clear() { type = 0; ns.Clear(); key.Clear(); elements.clear(); }
void TestBlock::testMultiTagAccess() { vector<string> names = { "tag_a", "tag_b", "tag_c", "tag_d", "tag_e" }; // create a valid positions data array below typedef boost::multi_array<double, 3>::index index; DataArray positions = block.createDataArray("array_one", "testdata", DataType::Double, nix::NDSize({ 3, 4, 2 })); boost::multi_array<double, 3> A(boost::extents[3][4][2]); int values = 0; for(index i = 0; i != 3; ++i) for(index j = 0; j != 4; ++j) for(index k = 0; k != 2; ++k) A[i][j][k] = values++; positions.setData(A); CPPUNIT_ASSERT(block.multiTagCount() == 0); CPPUNIT_ASSERT(block.multiTags().size() == 0); CPPUNIT_ASSERT(block.getMultiTag("invalid_id") == false); vector<string> ids; for (auto it = names.begin(); it != names.end(); it++) { MultiTag tag = block.createMultiTag(*it, "segment", positions); CPPUNIT_ASSERT(tag.name() == *it); ids.push_back(tag.id()); } CPPUNIT_ASSERT_THROW(block.createMultiTag(names[0], "segment", positions), DuplicateName); CPPUNIT_ASSERT(block.multiTagCount() == names.size()); CPPUNIT_ASSERT(block.multiTags().size() == names.size()); for (auto it = ids.begin(); it != ids.end(); it++) { MultiTag tag = block.getMultiTag(*it); CPPUNIT_ASSERT(block.hasMultiTag(*it) == true); CPPUNIT_ASSERT(tag.id() == *it); block.deleteMultiTag(*it); } CPPUNIT_ASSERT(block.multiTagCount() == 0); CPPUNIT_ASSERT(block.multiTags().size() == 0); CPPUNIT_ASSERT(block.getMultiTag("invalid_id") == false); }
void MultiTag::extents(const DataArray &extents) { if (extents == none) { backend()->extents(none); } else { backend()->extents(extents.id()); } }
static void UpdateInstancingParams (bool allDataDirty, const DataArray& array, csRef<iRenderBuffer>& buffer, csShaderVariable* sv) { bool updateData = allDataDirty; if (!buffer || (buffer->GetElementCount() != array.Capacity())) { buffer = csRenderBuffer::CreateRenderBuffer (array.Capacity(), CS_BUF_STREAM, CS_BUFCOMP_FLOAT, sizeof (typename DataArray::ValueType) / sizeof(float)); sv->SetValue (buffer); updateData = true; } if (updateData) buffer->SetData (array.GetArray()); }
void Feature::data(const DataArray &data) { if (data == none) { throw std::runtime_error("Empty data entity (DataArray) given"); } else { backend()->data(data.id()); } }
FeatureHDF5::FeatureHDF5(const shared_ptr<IFile> &file, const shared_ptr<IBlock> &block, const Group &group, const string &id, DataArray data, LinkType link_type, time_t time) : EntityHDF5(file, group, id, time), block(block) { linkType(link_type); // TODO: the line below currently throws an exception if the DataArray // is not in block - to consider if we prefer copying it to the block this->data(data.id()); }
FeatureFS::FeatureFS(const std::shared_ptr<base::IFile> &file, const std::shared_ptr<base::IBlock> &block, const std::string &loc, const std::string &id, DataArray data, LinkType link_type, time_t time) : EntityFS(file, (bfs::path(loc) / bfs::path(id)).string(), id, time), block(block) { linkType(link_type); // TODO: the line below currently throws an exception if the DataArray // is not in block - to consider if we prefer copying it to the block this->data(data.id()); }
int MMKVImpl::SDiff(DBID db, const DataArray& keys, const StringArrayResult& diffs) { if (keys.size() < 2) { return ERR_INVALID_TYPE; } RWLockGuard<MemorySegmentManager, READ_LOCK> keylock_guard(m_segment); return GenericSInterDiffUnion(db, OP_DIFF, keys, NULL, &diffs); }
/** * @brief Static constructor * @param numTuples The number of tuples in the array. * @param dims The actual dimensions of the attribute on each Tuple * @param name The name of the array * @return Boost::Shared_Ptr wrapping an instance of DataArrayTemplate<T> */ static Pointer CreateArray(size_t numTuples, QVector<size_t> cDims, const QString& name, bool allocate = true) { if (name.isEmpty() == true) { return NullPointer(); } DataArray<T>* d = new DataArray<T>(numTuples, cDims, name, allocate); if(allocate) { if (d->allocate() < 0) { // Could not allocate enough memory, reset the pointer to null and return delete d; return DataArray<T>::NullPointer(); } } Pointer ptr(d); return ptr; }
int PerconaFTEngine::ListNameSpaces(Context& ctx, DataArray& nss) { RWLockGuard<SpinRWLock> guard(m_lock, true); FTDBTable::iterator it = m_dbs.begin(); while (it != m_dbs.end()) { nss.push_back(it->first); it++; } return 0; }
EllMatrix(index_t rows, index_t cols, index_t nnz_per_row) : _rows(rows), _cols(cols), nnz_per_row(nnz_per_row) { static constexpr auto step = align_bytes / sizeof(scalar_t); auto aligned_rows = rows; while (aligned_rows % step != 0) { ++aligned_rows; } data.resize(aligned_rows, nnz_per_row); indices.resize(aligned_rows, nnz_per_row); }
void getOffsetAndCount(const Tag &tag, const DataArray &array, NDSize &offset, NDSize &count) { vector<double> position = tag.position(); vector<double> extent = tag.extent(); vector<string> units = tag.units(); NDSize temp_offset(position.size()); NDSize temp_count(position.size(), 1); if (array.dimensionCount() != position.size() || (extent.size() > 0 && extent.size() != array.dimensionCount())) { throw std::runtime_error("Dimensionality of position or extent vector does not match dimensionality of data!"); } for (size_t i = 0; i < position.size(); ++i) { Dimension dim = array.getDimension(i+1); temp_offset[i] = positionToIndex(position[i], i >= units.size() ? "none" : units[i], dim); if (i < extent.size()) { ndsize_t c = positionToIndex(position[i] + extent[i], i >= units.size() ? "none" : units[i], dim) - temp_offset[i]; temp_count[i] = (c > 1) ? c : 1; } } offset = temp_offset; count = temp_count; }
bool positionInData(const DataArray &data, const NDSize &position) { NDSize data_size = data.dataExtent(); bool valid = true; if (!(data_size.size() == position.size())) { return false; } for (size_t i = 0; i < data_size.size(); i++) { valid &= position[i] < data_size[i]; } return valid; }