コード例 #1
0
FDiff2D CalculateFOV::calcFOV(const PanoramaData& panorama)
{
    if (panorama.getNrOfImages() == 0) {
        // no change
        return FDiff2D(panorama.getOptions().getHFOV(), panorama.getOptions().getVFOV());
    }

    vigra::Size2D panoSize(360*2,180*2);

    // remap into minature pano.
    PanoramaOptions opts;
    opts.setHFOV(360);
    opts.setProjection(PanoramaOptions::EQUIRECTANGULAR);
    opts.setWidth(panoSize.x);
    opts.setHeight(panoSize.y);

    // remap image
    // DGSW - make sure the type is correct
    vigra::BImage panoAlpha(panoSize.x, panoSize.y,static_cast< unsigned char >(0));
    //    vigra::BImage panoAlpha(panoSize.x, panoSize.y,0);
    Nona::RemappedPanoImage<vigra::BImage, vigra::BImage> remapped;
    UIntSet activeImgs = panorama.getActiveImages();
    for (UIntSet::iterator it = activeImgs.begin(); it != activeImgs.end(); ++it) {
        //    for (unsigned int imgNr=0; imgNr < getNrOfImages(); imgNr++) {
        // DGSW FIXME - Unreferenced
        //	        const PanoImage & img = getImage(*it);
        remapped.setPanoImage(panorama.getSrcImage(*it), opts, vigra::Rect2D(0,0,panoSize.x,panoSize.y));
        //remapped.setPanoImage(*this, *it, vigra::Size2D(img.getWidth(), img.getHeight()), opts);
        // calculate alpha channel
        remapped.calcAlpha();
        // copy into global alpha channel.
        vigra::copyImageIf(vigra_ext::applyRect(remapped.boundingBox(),
                                                vigra_ext::srcMaskRange(remapped)),
                            vigra_ext::applyRect(remapped.boundingBox(),
                                                vigra_ext::srcMask(remapped)),
                            vigra_ext::applyRect(remapped.boundingBox(),
                                                destImage(panoAlpha)));
        //        vigra::ImageExportInfo imge2("c:/hugin_calcfov_alpha.png");
        //        exportImage(vigra::srcImageRange(panoAlpha), imge2);
        }

    // get field of view
    FDiff2D ul,lr;
    bool found = false;
    ul.x = DBL_MAX;
    ul.y = DBL_MAX;
    lr.x = -DBL_MAX;
    lr.y = -DBL_MAX;
    for (int v=0; v< panoSize.y; v++) {
        for (int h=0; h < panoSize.x; h++) {
            if (panoAlpha(h,v)) {
                // pixel is valid
                if ( ul.x > h ) {
                    found=true;
                    ul.x = h;
                }
                if ( ul.y > v ) {
                    found=true;
                    ul.y = v;
                }
                if ( lr.x < h) {
                    found=true;
                    lr.x = h;
                }
                if ( lr.y < v) {
                    found=true;
                    lr.y = v;
                }
            }
        }
    }
    if (!found) {
        // if nothing found, return current fov
        return FDiff2D(panorama.getOptions().getHFOV(), panorama.getOptions().getVFOV());
    }
    ul=ul/2.0;
    lr=lr/2.0;
    ul.x = ul.x - 180;
    ul.y = ul.y - 90;
    lr.x = lr.x - 180;
    lr.y = lr.y - 90;
    FDiff2D fov (2*std::max(fabs(ul.x), fabs(lr.x)), 2*std::max(fabs(ul.y), fabs(lr.y)));
    if(fov.x<40)
    {
        fov.x+=1;
    };
    return fov;
}
コード例 #2
0
int main(int argc, char *argv[])
{
    // parse arguments
    const char * optstring = "alho:npqsv:m";
    int c;
    string output;
    bool doPairwise = false;
    bool doAutoOpt = false;
    bool doNormalOpt = false;
    bool doLevel = false;
    bool chooseProj = false;
    bool quiet = false;
    bool doPhotometric = false;
    double hfov = 0.0;
    while ((c = getopt (argc, argv, optstring)) != -1)
    {
        switch (c) {
        case 'o':
            output = optarg;
            break;
        case 'h':
            usage(argv[0]);
            return 0;
        case 'p':
            doPairwise = true;
            break;
        case 'a':
            doAutoOpt = true;
            break;
        case 'n':
            doNormalOpt = true;
            break;
        case 'l':
            doLevel = true;
            break;
        case 's':
            chooseProj = true;
            break;
        case 'q':
            quiet = true;
            break;
        case 'v':
            hfov = atof(optarg);
            break;
        case 'm':
            doPhotometric = true;
            break;
        default:
            abort ();
        }
    }

    if (argc - optind != 1) {
        usage(argv[0]);
        return 1;
    }

    const char * scriptFile = argv[optind];

    Panorama pano;
    if (scriptFile[0] == '-') {
        DocumentData::ReadWriteError err = pano.readData(std::cin);
        if (err != DocumentData::SUCCESSFUL) {
            cerr << "error while reading script file from stdin." << endl;
            cerr << "DocumentData::ReadWriteError code: " << err << endl;
            return 1;
        }
    } else {
        ifstream prjfile(scriptFile);
        if (!prjfile.good()) {
            cerr << "could not open script : " << scriptFile << endl;
            return 1;
        }
        pano.setFilePrefix(hugin_utils::getPathPrefix(scriptFile));
        DocumentData::ReadWriteError err = pano.readData(prjfile);
        if (err != DocumentData::SUCCESSFUL) {
            cerr << "error while parsing panos tool script: " << scriptFile << endl;
            cerr << "DocumentData::ReadWriteError code: " << err << endl;
            return 1;
        }
    }

    if (pano.getNrOfImages() == 0) {
        cerr << "Panorama should consist of at least one image" << endl;
        return 1;
    }

    // for bad HFOV (from autopano-SIFT)
    for (unsigned i=0; i < pano.getNrOfImages(); i++) {
        SrcPanoImage img = pano.getSrcImage(i);
        if (img.getProjection() == SrcPanoImage::RECTILINEAR
            && img.getHFOV() >= 180)
        {
            // something is wrong here, try to read from exif data
            double focalLength = 0;
            double cropFactor = 0;
            cerr << "HFOV of image " << img.getFilename() << " invalid, trying to read EXIF tags" << endl;
            bool ok = img.readEXIF(focalLength, cropFactor, true, false);
            if (! ok) {
                if (hfov) {
                    img.setHFOV(hfov);
                } else {
                    cerr << "EXIF reading failed, please specify HFOV with -v" << endl;
                    return 1;
                }
            }
            pano.setSrcImage(i, img);
        }
    }

    if(pano.getNrOfCtrlPoints()==0 && (doPairwise || doAutoOpt || doNormalOpt))
    {
        cerr << "Panorama have to have control points to optimise positions" << endl;
        return 1;
    };


	
	
	if (doPairwise && ! doAutoOpt) {
        // do pairwise optimisation
        set<string> optvars;
        optvars.insert("r");
        optvars.insert("p");
        optvars.insert("y");
        AutoOptimise::autoOptimise(pano);

        // do global optimisation
        if (!quiet) std::cerr << "*** Pairwise position optimisation" << endl;
        PTools::optimize(pano);
    } else if (doAutoOpt) {
        if (!quiet) std::cerr << "*** Adaptive geometric optimisation" << endl;
        SmartOptimise::smartOptimize(pano);
    } else if (doNormalOpt) {
        if (!quiet) std::cerr << "*** Optimising parameters specified in PTO file" << endl;
        PTools::optimize(pano);
    } else {
        if (!quiet) std::cerr << "*** Geometric parameters not optimized" << endl;
    }

    if (doLevel)
    {
        bool hasVerticalLines=false;
        CPVector allCP=pano.getCtrlPoints();
        if(allCP.size()>0 && (doPairwise || doAutoOpt || doNormalOpt))
        {
            for(size_t i=0;i<allCP.size() && !hasVerticalLines;i++)
            {
                hasVerticalLines=(allCP[i].mode==ControlPoint::X);
            };
        };
        // straighten only if there are no vertical control points
        if(hasVerticalLines)
        {
            cout << "Skipping automatic leveling because of existing vertical control points." << endl;
        }
        else
        {
            StraightenPanorama(pano).run();
            CenterHorizontally(pano).run();
        };
    }

    if (chooseProj) {
        PanoramaOptions opts = pano.getOptions();
        double hfov, vfov;
        CalculateFitPanorama fitPano = CalculateFitPanorama(pano);
        fitPano.run();
        opts.setHFOV(fitPano.getResultHorizontalFOV());
        opts.setHeight(roundi(fitPano.getResultHeight()));
        vfov = opts.getVFOV();
        hfov = opts.getHFOV();
        // avoid perspective projection if field of view > 100 deg
        double mf = 100;
        if (vfov < mf) {
            // cylindrical or rectilinear
            if (hfov < mf) {
                opts.setProjection(PanoramaOptions::RECTILINEAR);
            } else {
                opts.setProjection(PanoramaOptions::CYLINDRICAL);
            }
        }

        // downscale pano a little
        double sizeFactor = 0.7;

        pano.setOptions(opts);
        double w = CalculateOptimalScale::calcOptimalScale(pano);
        opts.setWidth(roundi(opts.getWidth()*w*sizeFactor), true);
        pano.setOptions(opts);
    }

    if(doPhotometric)
    {
        // photometric estimation
        PanoramaOptions opts = pano.getOptions();
        int nPoints = 200;
        int pyrLevel=3;
        bool randomPoints = true;
        nPoints = nPoints * pano.getNrOfImages();
 
        std::vector<vigra_ext::PointPairRGB> points;
        ProgressDisplay *progressDisplay;
        if(!quiet)
            progressDisplay=new StreamProgressDisplay(std::cout);
        else
            progressDisplay=new DummyProgressDisplay();
        try 
        {
            loadImgsAndExtractPoints(pano, nPoints, pyrLevel, randomPoints, *progressDisplay, points, !quiet);
        } 
        catch (std::exception & e)
        {
            cerr << "caught exception: " << e.what() << endl;
            return 1;
        };
        if(!quiet)
            cout << "\rSelected " << points.size() << " points" << endl;

        if (points.size() == 0)
        {
            cerr << "Error: no overlapping points found, exiting" << endl;
            return 1;
        }

        progressDisplay->startSubtask("Photometric Optimization", 0.0);
        // first, ensure that vignetting and response coefficients are linked
        const HuginBase::ImageVariableGroup::ImageVariableEnum vars[] = {
                HuginBase::ImageVariableGroup::IVE_EMoRParams,
                HuginBase::ImageVariableGroup::IVE_ResponseType,
                HuginBase::ImageVariableGroup::IVE_VigCorrMode,
                HuginBase::ImageVariableGroup::IVE_RadialVigCorrCoeff,
                HuginBase::ImageVariableGroup::IVE_RadialVigCorrCenterShift
        };
        HuginBase::StandardImageVariableGroups variable_groups(pano);
        HuginBase::ImageVariableGroup & lenses = variable_groups.getLenses();
        for (size_t i = 0; i < lenses.getNumberOfParts(); i++)
        {
            std::set<HuginBase::ImageVariableGroup::ImageVariableEnum> links_needed;
            links_needed.clear();
            for (int v = 0; v < 5; v++)
            {
                if (!lenses.getVarLinkedInPart(vars[v], i))
                {
                    links_needed.insert(vars[v]);
                }
            };
            if (!links_needed.empty())
            {
                std::set<HuginBase::ImageVariableGroup::ImageVariableEnum>::iterator it;
                for (it = links_needed.begin(); it != links_needed.end(); it++)
                {
                    lenses.linkVariablePart(*it, i);
                }
            }
        }

        HuginBase::SmartPhotometricOptimizer::PhotometricOptimizeMode optmode = 
            HuginBase::SmartPhotometricOptimizer::OPT_PHOTOMETRIC_LDR;
        if (opts.outputMode == PanoramaOptions::OUTPUT_HDR)
        {
            optmode = HuginBase::SmartPhotometricOptimizer::OPT_PHOTOMETRIC_HDR;
        }
        SmartPhotometricOptimizer photoOpt(pano, progressDisplay, pano.getOptimizeVector(), points, optmode);
        photoOpt.run();

        // calculate the mean exposure.
        opts.outputExposureValue = CalculateMeanExposure::calcMeanExposure(pano);
        pano.setOptions(opts);
        progressDisplay->finishSubtask();
        delete progressDisplay;
    };

    // write result
    OptimizeVector optvec = pano.getOptimizeVector();
    UIntSet imgs;
    fill_set(imgs,0, pano.getNrOfImages()-1);
    if (output != "") {
        ofstream of(output.c_str());
        pano.printPanoramaScript(of, optvec, pano.getOptions(), imgs, false, hugin_utils::getPathPrefix(scriptFile));
    } else {
        pano.printPanoramaScript(cout, optvec, pano.getOptions(), imgs, false, hugin_utils::getPathPrefix(scriptFile));
    }
    return 0;
}
コード例 #3
0
void CenterHorizontally::centerHorizontically(PanoramaData& panorama)
{
    vigra::Size2D panoSize(360,180);
    
    // remap into minature pano.
    PanoramaOptions opts;
    opts.setHFOV(360);
    opts.setProjection(PanoramaOptions::EQUIRECTANGULAR);
    opts.setWidth(360);
    opts.setHeight(180);
    
    // remap image
    vigra::BImage panoAlpha(panoSize);
    Nona::RemappedPanoImage<vigra::BImage, vigra::BImage> remapped;
    
    // use selected images.
    const UIntSet allActiveImgs(panorama.getActiveImages());

    if (allActiveImgs.empty())
    {
        // do nothing if there are no images
        return;
    }
    
    //only check unlinked images
    UIntSet activeImgs;
    for (UIntSet::const_iterator it = allActiveImgs.begin(); it!= allActiveImgs.end(); ++it)
    {
        const SrcPanoImage & img=panorama.getImage(*it);
        bool consider=true;
        if(img.YawisLinked())
        {
            for(UIntSet::const_iterator it2=activeImgs.begin(); it2!=activeImgs.end(); ++it2)
            {
                if(img.YawisLinkedWith(panorama.getSrcImage(*it2)))
                {
                    consider=false;
                    break;
                };
            };
        };
        if(consider)
            activeImgs.insert(*it);
    };

    for (UIntSet::iterator it = activeImgs.begin(); it != activeImgs.end(); ++it)
    {
        remapped.setPanoImage(panorama.getSrcImage(*it), opts, vigra::Rect2D(0,0,360,180));
        // calculate alpha channel
        remapped.calcAlpha();
        // copy into global alpha channel.
        vigra::copyImageIf(vigra_ext::applyRect(remapped.boundingBox(),
                                                vigra_ext::srcMaskRange(remapped)),
                           vigra_ext::applyRect(remapped.boundingBox(),
                                                vigra_ext::srcMask(remapped)),
                           vigra_ext::applyRect(remapped.boundingBox(),
                                                destImage(panoAlpha)));
        }
    
    // get field of view
    std::vector<int> borders;
    bool colOccupied = false;
    for (int h=0; h < 360; h++) {
        bool curColOccupied = false;
        for (int v=0; v< 180; v++) {
            if (panoAlpha(h,v)) {
                // pixel is valid
                curColOccupied = true;
            }
        }
        if ((colOccupied && !curColOccupied) ||
            (!colOccupied && curColOccupied))
        {
            // change in position, save point.
            borders.push_back(h-180);
            colOccupied = curColOccupied;
        }
    }
    
    
    int lastidx = borders.size() -1;
    if (lastidx == -1) {
        // empty pano
        return;
    }
    
    if (colOccupied) {
        // we have reached the right border, and the pano is still valid
        // shift right fragments by 360 deg
        // |11    2222|  -> |      222211     |
        std::vector<int> newBorders;
        newBorders.push_back(borders[lastidx]);
        for (int i = 0; i < lastidx; i++) {
            newBorders.push_back(borders[i]+360);
        }
        borders = newBorders;
    }
    
    const double dYaw=(borders[0] + borders[lastidx])/2;
    
    // apply yaw shift, takes also translation parameters into account
    RotatePanorama(panorama, -dYaw, 0, 0).run();
}