void OsmAnd::ImageMapLayerProvider::AsyncImage::submit(const bool requestSucceeded, const QByteArray& image) const { std::shared_ptr<IMapDataProvider::Data> data; if (requestSucceeded && !image.isNull()) { // Decode image data std::shared_ptr<SkBitmap> bitmap(new SkBitmap()); SkMemoryStream imageStream(image.constData(), image.length(), false); if (SkImageDecoder::DecodeStream(&imageStream, bitmap.get(), SkColorType::kUnknown_SkColorType, SkImageDecoder::kDecodePixels_Mode)) { data.reset(new IRasterMapLayerProvider::Data( request->tileId, request->zoom, provider->getAlphaChannelPresence(), provider->getTileDensityFactor(), bitmap)); } else { callback(provider, false, data, nullptr); delete this; return; } } callback(provider, requestSucceeded, data, nullptr); delete this; }
bool OsmAnd::ImageMapLayerProvider::obtainData( const IMapDataProvider::Request& request_, std::shared_ptr<IMapDataProvider::Data>& outData, std::shared_ptr<Metric>* const pOutMetric /*= nullptr*/) { const auto& request = MapDataProviderHelpers::castRequest<Request>(request_); if (pOutMetric) pOutMetric->reset(); if (!supportsNaturalObtainData()) return MapDataProviderHelpers::nonNaturalObtainData(this, request, outData, pOutMetric); // Obtain image data const auto image = obtainImage(request); if (image.isNull()) { outData.reset(); return true; } // Decode image data std::shared_ptr<SkBitmap> bitmap(new SkBitmap()); SkMemoryStream imageStream(image.constData(), image.length(), false); if (!SkImageDecoder::DecodeStream(&imageStream, bitmap.get(), SkColorType::kUnknown_SkColorType, SkImageDecoder::kDecodePixels_Mode)) return false; // Return tile outData.reset(new IRasterMapLayerProvider::Data( request.tileId, request.zoom, getAlphaChannelPresence(), getTileDensityFactor(), bitmap)); return true; }
/* Protected functions */ osg::Image* OculusHealthAndSafetyWarning::buildImageFromByteStream() { std::string imageString((char*)healthAndSafety_tga, sizeof(healthAndSafety_tga)); std::stringstream imageStream(imageString); osgDB::ReaderWriter *rw = osgDB::Registry::instance()->getReaderWriterForExtension("tga"); if (!rw) { std::cerr << "Error: could not find a suitable reader to load the specified data" << std::endl; return 0; } osgDB::ReaderWriter::ReadResult rr = rw->readImage(imageStream); if (!rr.validImage()) { std::cerr << "Error: Cannot build a valid image from data" << std::endl; return 0; } return rr.takeImage(); }
void windowManager::openProject() { //const QString fileString = "test.out"; const QString fileString = QFileDialog::getOpenFileName(activeWindow(), tr("Open project"), ".", "Cstitch files (*.xst)\nAll files (*.*)"); if (fileString.isEmpty()) { return; } QFile inFile(fileString); inFile.open(QIODevice::ReadOnly); // the save file starts with xml text, so read that first QTextStream textInStream(&inFile); QString inString; inString = textInStream.readLine() + "\n"; // (A day after the initial release I changed the program name from // stitch to cstitch, so check for either...) QRegExp rx("^<(cstitch|stitch) version="); rx.indexIn(inString); const QString programName = rx.cap(1); if (programName != "cstitch" && programName != "stitch") { // uh oh QMessageBox::critical(NULL, tr("Bad project file"), tr("Sorry, ") + fileString + tr(" is not a valid project file ") + tr("(diagnostic: wrong first line)")); return; } int lineCount = 0; const int maxLineCount = 100000; const QString endTag = "</" + programName + ">"; QString thisString = ""; do { thisString = textInStream.readLine(); inString += thisString + "\n"; ++lineCount; if (lineCount > maxLineCount) { QMessageBox::critical(NULL, tr("Bad project file"), tr("Sorry, ") + fileString + tr(" appears to be corrupted ") + tr("(diagnostic: bad separator)")); return; } } while (thisString != endTag); QDomDocument doc; const bool xmlLoadSuccess = doc.setContent(inString); if (!xmlLoadSuccess) { QMessageBox::critical(NULL, tr("Bad project file"), tr("Sorry, ") + fileString + tr(" appears to be corrupted ") + tr("(diagnostic: parse failed)")); return; } // a blank line between the xml and the image inString += textInStream.readLine() + "\n"; // hide everything except progress meters while we regenerate this project hideWindows_ = true; hideWindows(); // how many total images are we restoring? int imageCount = 1; // one colorChooser image imageCount += doc.elementsByTagName("color_compare_image").size(); imageCount += doc.elementsByTagName("square_window_image").size(); imageCount += doc.elementsByTagName("pattern_window_image").size(); groupProgressDialog progressMeter(imageCount); progressMeter.setMinimumDuration(2000); progressMeter.setWindowModality(Qt::WindowModal); progressMeter.show(); progressMeter.bumpCount(); altMeter::setGroupMeter(&progressMeter); // read the image file name const QString fileName = ::getElementText(doc, "image_name"); // read the project version number setProjectVersion(::getElementAttribute(doc, "cstitch", "version")); // now read the binary data image inFile.seek(inString.length()); QDataStream imageStream(&inFile); QImage newImage; imageStream >> newImage; inFile.seek(inString.length()); QDataStream imageData(&inFile); QByteArray imageByteArray; imageData >> imageByteArray; reset(newImage, imageByteArray, fileName); //// colorChooser colorChooser_.window()->setNewImage(newImage); progressMeter.bumpCount(); //// colorCompare QDomElement colorCompareElement = doc.elementsByTagName("color_compare").item(0).toElement(); if (!colorCompareElement.isNull()) { colorChooser* colorChooserObject = colorChooser_.window(); QDomNodeList compareImagesList(colorCompareElement. elementsByTagName("color_compare_image")); for (int i = 0, size = compareImagesList.size(); i < size; ++i) { QDomElement thisCompareElement(compareImagesList.item(i).toElement()); const int hiddenColorCompareIndex = colorChooserObject-> recreateImage(colorCompareSaver(thisCompareElement)); progressMeter.bumpCount(); //// squareWindow colorCompare* colorCompareObject = colorCompareWindow_.window(); QDomNodeList squareImagesList(thisCompareElement. elementsByTagName("square_window_image")); if (!squareImagesList.isEmpty()) { for (int ii = 0, iiSize = squareImagesList.size(); ii < iiSize; ++ii) { QDomElement thisSquareElement(squareImagesList.item(ii).toElement()); const int hiddenSquareImageIndex = colorCompareObject-> recreateImage(squareWindowSaver(thisSquareElement)); progressMeter.bumpCount(); squareWindow* squareWindowObject = squareWindow_.window(); //// patternWindow // restore this square image's children before restoring its history // (the children may have their own different histories) QDomNodeList patternImageList(thisSquareElement. elementsByTagName("pattern_window_image")); if (!patternImageList.isEmpty()) { for (int iii = 0, iiiSize = patternImageList.size(); iii < iiiSize; ++iii) { QDomElement thisPatternElement(patternImageList.item(iii). toElement()); squareWindowObject-> recreatePatternImage(patternWindowSaver(thisPatternElement)); patternWindow_.window()->updateHistory(thisPatternElement); progressMeter.bumpCount(); } } squareWindowObject->updateImageHistory(thisSquareElement); // if this image was only created so that one of its children could be // recreated, remove it (its data has already been stored away) if (hiddenSquareImageIndex != -1) { squareWindowObject->removeImage(hiddenSquareImageIndex); } } } // if this image was only created so that one of its children could be // recreated, remove it (its data has already been stored away) if (hiddenColorCompareIndex != -1) { colorCompareObject->removeImage(hiddenColorCompareIndex); } } } //// restore window wide settings that are independent of a particular image QDomElement windowGlobals(doc.elementsByTagName("global_settings"). item(0).toElement()); if (colorChooserAction_->isEnabled() && colorChooser_.window()) { colorChooser_.window()->updateCurrentSettings(windowGlobals); } if (colorCompareAction_->isEnabled() && colorCompareWindow_.window()) { colorCompareWindow_.window()->updateCurrentSettings(windowGlobals); } if (squareWindowAction_->isEnabled() && squareWindow_.window()) { squareWindow_.window()->updateCurrentSettings(windowGlobals); squareWindow_.window()->checkAllColorLists(); } if (patternWindowAction_->isEnabled() && patternWindow_.window()) { patternWindow_.window()->updateCurrentSettings(windowGlobals); } // read the image number of colors (before the reset) const int colorCount = ::getElementText(doc, "color_count").toInt(); originalImageColorCount_ = colorCount; // call while hideWindows_ (if colorCount wasn't 0 it won't be recalculated) startOriginalImageColorCount(); hideWindows_ = false; altMeter::setGroupMeter(NULL); projectFilename_ = fileString; setWindowTitles(QFileInfo(projectFilename_).fileName()); setNewWidgetGeometryAndRaise(colorChooser_.window()); }
int main(int argc, char** argv) { /*Polynomial2 poly2; poly2.kuu = -1; poly2.kuv = 1; poly2.kvv= -1; poly2.ku = 0.25; poly2.kv = 0.25; poly2.k1 = 5; CurveRasterizer<Polynomial2> raster(1, 1, -100, 100, poly2); CurveRasterizer2<Polynomial2> raster2(1, 1, -100, 100, poly2); auto tr0 = clock(); int x1 = 0; int x2 = 0; for (int i = 0; i < 10000000; i++) { raster.step(); x1 += raster.x; } auto tr1 = clock(); for (int i = 0; i < 10000000; i++) { raster2.step(); x2 += raster2.x; } auto tr2 = clock(); cout << "optimized " << double(tr1 - tr0) / CLOCKS_PER_SEC << endl; cout << "simple " << double(tr2 - tr1) / CLOCKS_PER_SEC << endl; cout << x1 << " " << x2 << endl; return 0;*/ ifstream paramFile(argv[1]); if (not paramFile.is_open()) { cout << argv[1] << " : ERROR, file is not found" << endl; return 0; } array<double, 6> params; cout << "EU Camera model parameters :" << endl; for (auto & p: params) { paramFile >> p; cout << setw(10) << p; } cout << endl; paramFile.ignore(); array<double, 6> cameraPose; cout << "Camera pose wrt the robot :" << endl; for (auto & e: cameraPose) { paramFile >> e; cout << setw(10) << e; } cout << endl; paramFile.ignore(); Transformation<double> TbaseCamera(cameraPose.data()); array<double, 6> robotPose1, robotPose2; SGMParameters stereoParams; stereoParams.verbosity = 3; stereoParams.salientPoints = false; paramFile >> stereoParams.u0; paramFile >> stereoParams.v0; paramFile >> stereoParams.dispMax; paramFile >> stereoParams.scale; paramFile.ignore(); string imageDir; getline(paramFile, imageDir); string imageInfo, imageName; getline(paramFile, imageInfo); istringstream imageStream(imageInfo); imageStream >> imageName; for (auto & x : robotPose1) imageStream >> x; Mat8u img1 = imread(imageDir + imageName, 0); stereoParams.uMax = img1.cols; stereoParams.vMax = img1.rows; stereoParams.setEqualMargin(); int counter = 2; EnhancedCamera camera(params.data()); while (getline(paramFile, imageInfo)) { istringstream imageStream(imageInfo); imageStream >> imageName; for (auto & x : robotPose2) imageStream >> x; Transformation<double> T01(robotPose1.data()), T02(robotPose2.data()); Transformation<double> TleftRight = T01.compose(TbaseCamera).inverseCompose(T02.compose(TbaseCamera)); Mat8u img2 = imread(imageDir + imageName, 0); EnhancedSGM stereo(TleftRight, &camera, &camera, stereoParams); DepthMap depth; auto t2 = clock(); stereo.computeStereo(img1, img2, depth); auto t3 = clock(); cout << double(t3 - t2) / CLOCKS_PER_SEC << endl; Mat32f dMat; depth.toInverseMat(dMat); // imwrite(imageDir + "res" + to_string(counter++) + ".png", depth*200); imwrite(imageDir + "res" + to_string(counter++) + ".png", dMat*30); } return 0; }
int main(int argc, char** argv) { /*Polynomial2 poly2; poly2.kuu = -1; poly2.kuv = 1; poly2.kvv= -1; poly2.ku = 0.25; poly2.kv = 0.25; poly2.k1 = 5; CurveRasterizer<Polynomial2> raster(1, 1, -100, 100, poly2); CurveRasterizer2<Polynomial2> raster2(1, 1, -100, 100, poly2); auto tr0 = clock(); int x1 = 0; int x2 = 0; for (int i = 0; i < 10000000; i++) { raster.step(); x1 += raster.x; } auto tr1 = clock(); for (int i = 0; i < 10000000; i++) { raster2.step(); x2 += raster2.x; } auto tr2 = clock(); cout << "optimized " << double(tr1 - tr0) / CLOCKS_PER_SEC << endl; cout << "simple " << double(tr2 - tr1) / CLOCKS_PER_SEC << endl; cout << x1 << " " << x2 << endl; return 0;*/ ifstream paramFile(argv[1]); if (not paramFile.is_open()) { cout << argv[1] << " : ERROR, file is not found" << endl; return 0; } array<double, 6> params; cout << "EU Camera model parameters :" << endl; for (auto & p: params) { paramFile >> p; cout << setw(10) << p; } cout << endl; paramFile.ignore(); array<double, 6> cameraPose; cout << "Camera pose wrt the robot :" << endl; for (auto & e: cameraPose) { paramFile >> e; cout << setw(10) << e; } cout << endl; paramFile.ignore(); Transformation<double> TbaseCamera(cameraPose.data()); array<double, 6> robotPose1, robotPose2; SGMParameters stereoParams2; stereoParams2.salientPoints = false; stereoParams2.verbosity = 3; stereoParams2.hypMax = 1; // stereoParams.salientPoints = false; paramFile >> stereoParams2.u0; paramFile >> stereoParams2.v0; paramFile >> stereoParams2.dispMax; paramFile >> stereoParams2.scale; paramFile.ignore(); string imageDir; getline(paramFile, imageDir); string imageInfo, imageName; getline(paramFile, imageInfo); istringstream imageStream(imageInfo); imageStream >> imageName; for (auto & x : robotPose1) imageStream >> x; Mat8u img1 = imread(imageDir + imageName, 0); cout << "Image name: "<< imageDir + imageName << endl; cout << "Image size: "<<img1.size()<<endl;; stereoParams2.u0 = 50; stereoParams2.v0 = 50; stereoParams2.uMax = img1.cols; stereoParams2.vMax = img1.rows; stereoParams2.setEqualMargin(); // stereoParams2.salientPoints = true; int counter = 2; EnhancedCamera camera(params.data()); DepthMap depth; depth.setDefault(); // stereoParams2.dispMax = 40; // stereoParams2.descRespThresh = 2; // stereoParams2.scaleVec = vector<int>{1}; MotionStereoParameters stereoParams(stereoParams2); stereoParams.verbosity = 1; stereoParams.descLength = 5; // stereoParams.descRespThresh = 2; // stereoParams.scaleVec = vector<int>{1}; MotionStereo stereo(&camera, &camera, stereoParams); stereo.setBaseImage(img1); //do SGM to init the depth getline(paramFile, imageInfo); getline(paramFile, imageInfo); getline(paramFile, imageInfo); imageStream.str(imageInfo); imageStream.clear(); imageStream >> imageName; for (auto & x : robotPose2) imageStream >> x; Mat8u img2 = imread(imageDir + imageName, 0); Transformation<double> T01(robotPose1.data()), T02(robotPose2.data()); Transformation<double> TleftRight = T01.compose(TbaseCamera).inverseCompose(T02.compose(TbaseCamera)); EnhancedSGM stereoSG(TleftRight, &camera, &camera, stereoParams2); Timer timer; stereoSG.computeStereo(img1, img2, depth); depth.filterNoise(); cout << timer.elapsed() << endl; Mat32f res, sigmaRes; Mat32f res2, sigmaRes2; depth.toInverseMat(res2); depth.sigmaToMat(sigmaRes2); imshow("res" + to_string(counter), res2 *0.12); imshow("sigma " + to_string(counter), sigmaRes2*20); cv::waitKey(0); counter++; while (getline(paramFile, imageInfo)) { istringstream imageStream(imageInfo); imageStream >> imageName; for (auto & x : robotPose2) imageStream >> x; Transformation<double> T01(robotPose1.data()), T02(robotPose2.data()); Transformation<double> TleftRight = T01.compose(TbaseCamera).inverseCompose(T02.compose(TbaseCamera)); Mat8u img2 = imread(imageDir + imageName, 0); // depth.setDefault(); timer.reset(); cout << TleftRight << endl; DepthMap depth2 = stereo.compute(TleftRight, img2, depth, counter - 3); depth = depth2; depth.filterNoise(); cout << timer.elapsed() << endl; depth.toInverseMat(res); depth.sigmaToMat(sigmaRes); // imwrite(imageDir + "res" + to_string(counter++) + ".png", depth*200); imshow("sigma " + to_string(counter), sigmaRes*20); imshow("d sigma " + to_string(counter), (sigmaRes - sigmaRes2)*20 + 0.5); // cout << (sigmaRes - sigmaRes2)(Rect(150, 150, 15, 15)) << endl; imshow("res " + to_string(counter), res *0.12); counter++; waitKey(); } waitKey(); return 0; }