static Ptr<detail::Blender> buildBlender(int blenderType, vector<Point> corners, vector<Size> sizes, int blendStrength) { Ptr<Blender> blender = Blender::createDefault(blenderType, TRY_GPU); Size destinationsz = resultRoi(corners, sizes).size(); int blendWidth = sqrt(static_cast<float>(destinationsz.area())) * blendStrength / 100.f; if (blendWidth < 1.f) blender = Blender::createDefault(Blender::NO, TRY_GPU); else if (blenderType == Blender::MULTI_BAND) { MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(static_cast<Blender*>(blender)); mb->setNumBands(static_cast<int>(ceil(log(blendWidth)/log(2.)) - 1.)); __android_log_print(ANDROID_LOG_INFO, TAG, "Multi-band blender, number of bands: %d", mb->numBands()); } else if (blenderType == Blender::FEATHER) { FeatherBlender* fb = dynamic_cast<FeatherBlender*>(static_cast<Blender*>(blender)); fb->setSharpness(1.f / blendWidth); __android_log_print(ANDROID_LOG_INFO, TAG, "Feather blender, sharpness: %f", fb->sharpness()); } blender->prepare(corners, sizes); return blender; }
int main(int argc, char* argv[]) { int64 app_start_time = getTickCount(); cv::setBreakOnError(true); int retval = parseCmdArgs(argc, argv); if (retval) return retval; // Check if have enough images int num_images = static_cast<int>(img_names.size()); if (num_images < 2) { LOGLN("Need more images"); return -1; } double work_scale = 1, seam_scale = 1, compose_scale = 1; bool is_work_scale_set = false, is_seam_scale_set = false, is_compose_scale_set = false; LOGLN("Finding features..."); int64 t = getTickCount(); vector<ImageFeatures> features(num_images); SurfFeaturesFinder finder(try_gpu); Mat full_img, img; vector<Mat> images(num_images); vector<Size> full_img_sizes(num_images); double seam_work_aspect = 1; for (int i = 0; i < num_images; ++i) { full_img = imread(img_names[i]); full_img_sizes[i] = full_img.size(); if (full_img.empty()) { LOGLN("Can't open image " << img_names[i]); return -1; } if (work_megapix < 0) { img = full_img; work_scale = 1; is_work_scale_set = true; } else { if (!is_work_scale_set) { work_scale = min(1.0, sqrt(work_megapix * 1e6 / full_img.size().area())); is_work_scale_set = true; } resize(full_img, img, Size(), work_scale, work_scale); } if (!is_seam_scale_set) { seam_scale = min(1.0, sqrt(seam_megapix * 1e6 / full_img.size().area())); seam_work_aspect = seam_scale / work_scale; is_seam_scale_set = true; } finder(img, features[i]); features[i].img_idx = i; LOGLN("Features in image #" << i+1 << ": " << features[i].keypoints.size()); resize(full_img, img, Size(), seam_scale, seam_scale); images[i] = img.clone(); } full_img.release(); img.release(); LOGLN("Finding features, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); LOG("Pairwise matching"); t = getTickCount(); vector<MatchesInfo> pairwise_matches; BestOf2NearestMatcher matcher(try_gpu, match_conf); matcher(features, pairwise_matches); LOGLN("Pairwise matching, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); // Leave only images we are sure are from the same panorama vector<int> indices = leaveBiggestComponent(features, pairwise_matches, conf_thresh); vector<Mat> img_subset; vector<string> img_names_subset; vector<Size> full_img_sizes_subset; for (size_t i = 0; i < indices.size(); ++i) { img_names_subset.push_back(img_names[indices[i]]); img_subset.push_back(images[indices[i]]); full_img_sizes_subset.push_back(full_img_sizes[indices[i]]); } images = img_subset; img_names = img_names_subset; full_img_sizes = full_img_sizes_subset; // Check if we still have enough images num_images = static_cast<int>(img_names.size()); if (num_images < 2) { LOGLN("Need more images"); return -1; } LOGLN("Estimating rotations..."); t = getTickCount(); HomographyBasedEstimator estimator; vector<CameraParams> cameras; estimator(features, pairwise_matches, cameras); LOGLN("Estimating rotations, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); for (size_t i = 0; i < cameras.size(); ++i) { Mat R; cameras[i].R.convertTo(R, CV_32F); cameras[i].R = R; LOGLN("Initial focal length #" << indices[i]+1 << ": " << cameras[i].focal); } LOG("Bundle adjustment"); t = getTickCount(); BundleAdjuster adjuster(ba_space, conf_thresh); adjuster(features, pairwise_matches, cameras); LOGLN("Bundle adjustment, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); // Find median focal length vector<double> focals; for (size_t i = 0; i < cameras.size(); ++i) { LOGLN("Camera #" << indices[i]+1 << " focal length: " << cameras[i].focal); focals.push_back(cameras[i].focal); } nth_element(focals.begin(), focals.begin() + focals.size()/2, focals.end()); float warped_image_scale = static_cast<float>(focals[focals.size() / 2]); if (wave_correct) { LOGLN("Wave correcting..."); t = getTickCount(); vector<Mat> rmats; for (size_t i = 0; i < cameras.size(); ++i) rmats.push_back(cameras[i].R); waveCorrect(rmats); for (size_t i = 0; i < cameras.size(); ++i) cameras[i].R = rmats[i]; LOGLN("Wave correcting, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); } LOGLN("Warping images (auxiliary)... "); t = getTickCount(); vector<Point> corners(num_images); vector<Mat> masks_warped(num_images); vector<Mat> images_warped(num_images); vector<Size> sizes(num_images); vector<Mat> masks(num_images); // Preapre images masks for (int i = 0; i < num_images; ++i) { masks[i].create(images[i].size(), CV_8U); masks[i].setTo(Scalar::all(255)); } // Warp images and their masks Ptr<Warper> warper = Warper::createByCameraFocal(static_cast<float>(warped_image_scale * seam_work_aspect), warp_type); for (int i = 0; i < num_images; ++i) { corners[i] = warper->warp(images[i], static_cast<float>(cameras[i].focal * seam_work_aspect), cameras[i].R, images_warped[i]); sizes[i] = images_warped[i].size(); warper->warp(masks[i], static_cast<float>(cameras[i].focal * seam_work_aspect), cameras[i].R, masks_warped[i], INTER_NEAREST, BORDER_CONSTANT); } vector<Mat> images_warped_f(num_images); for (int i = 0; i < num_images; ++i) images_warped[i].convertTo(images_warped_f[i], CV_32F); LOGLN("Warping images, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); LOGLN("Exposure compensation (feed)..."); t = getTickCount(); Ptr<ExposureCompensator> compensator = ExposureCompensator::createDefault(expos_comp_type); compensator->feed(corners, images_warped, masks_warped); LOGLN("Exposure compensation (feed), time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); LOGLN("Finding seams..."); t = getTickCount(); Ptr<SeamFinder> seam_finder = SeamFinder::createDefault(seam_find_type); seam_finder->find(images_warped_f, corners, masks_warped); LOGLN("Finding seams, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); // Release unused memory images.clear(); images_warped.clear(); images_warped_f.clear(); masks.clear(); LOGLN("Compositing..."); t = getTickCount(); Mat img_warped, img_warped_s; Mat dilated_mask, seam_mask, mask, mask_warped; Ptr<Blender> blender; double compose_seam_aspect = 1; double compose_work_aspect = 1; for (int img_idx = 0; img_idx < num_images; ++img_idx) { LOGLN("Compositing image #" << indices[img_idx]+1); // Read image and resize it if necessary full_img = imread(img_names[img_idx]); if (!is_compose_scale_set) { if (compose_megapix > 0) compose_scale = min(1.0, sqrt(compose_megapix * 1e6 / full_img.size().area())); is_compose_scale_set = true; // Compute relative scales compose_seam_aspect = compose_scale / seam_scale; compose_work_aspect = compose_scale / work_scale; // Update warped image scale warped_image_scale *= static_cast<float>(compose_work_aspect); warper = Warper::createByCameraFocal(warped_image_scale, warp_type); // Update corners and sizes for (int i = 0; i < num_images; ++i) { // Update camera focal cameras[i].focal *= compose_work_aspect; // Update corner and size Size sz = full_img_sizes[i]; if (abs(compose_scale - 1) > 1e-1) { sz.width = cvRound(full_img_sizes[i].width * compose_scale); sz.height = cvRound(full_img_sizes[i].height * compose_scale); } Rect roi = warper->warpRoi(sz, static_cast<float>(cameras[i].focal), cameras[i].R); corners[i] = roi.tl(); sizes[i] = roi.size(); } } if (abs(compose_scale - 1) > 1e-1) resize(full_img, img, Size(), compose_scale, compose_scale); else img = full_img; full_img.release(); Size img_size = img.size(); // Warp the current image warper->warp(img, static_cast<float>(cameras[img_idx].focal), cameras[img_idx].R, img_warped); // Warp the current image mask mask.create(img_size, CV_8U); mask.setTo(Scalar::all(255)); warper->warp(mask, static_cast<float>(cameras[img_idx].focal), cameras[img_idx].R, mask_warped, INTER_NEAREST, BORDER_CONSTANT); // Compensate exposure compensator->apply(img_idx, corners[img_idx], img_warped, mask_warped); img_warped.convertTo(img_warped_s, CV_16S); img_warped.release(); img.release(); mask.release(); dilate(masks_warped[img_idx], dilated_mask, Mat()); resize(dilated_mask, seam_mask, mask_warped.size()); mask_warped = seam_mask & mask_warped; if (static_cast<Blender*>(blender) == 0) { blender = Blender::createDefault(blend_type); Size dst_sz = resultRoi(corners, sizes).size(); float blend_width = sqrt(static_cast<float>(dst_sz.area())) * blend_strength / 100.f; if (blend_width < 1.f) blender = Blender::createDefault(Blender::NO); else if (blend_type == Blender::MULTI_BAND) { MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(static_cast<Blender*>(blender)); mb->setNumBands(static_cast<int>(ceil(log(blend_width)/log(2.)) - 1.)); LOGLN("Multi-band blender, number of bands: " << mb->numBands()); } else if (blend_type == Blender::FEATHER) { FeatherBlender* fb = dynamic_cast<FeatherBlender*>(static_cast<Blender*>(blender)); fb->setSharpness(1.f/blend_width); LOGLN("Feather blender, number of bands: " << fb->sharpness()); } blender->prepare(corners, sizes); } // Blend the current image blender->feed(img_warped_s, mask_warped, corners[img_idx]); } Mat result, result_mask; blender->blend(result, result_mask); LOGLN("Compositing, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); imwrite(result_name, result); LOGLN("Finished, total time: " << ((getTickCount() - app_start_time) / getTickFrequency()) << " sec"); return 0; }
int main(int argc, char* argv[]) { #if ENABLE_LOG int64 app_start_time = getTickCount(); #endif cv::setBreakOnError(true); int retval = parseCmdArgs(argc, argv); if (retval) return retval; // Check if have enough images int num_images = static_cast<int>(img_names.size()); if (num_images < 2) { LOGLN("Need more images"); return -1; } double work_scale = 1, seam_scale = 1, compose_scale = 1; bool is_work_scale_set = false, is_seam_scale_set = false, is_compose_scale_set = false; LOGLN("Finding features..."); #if ENABLE_LOG int64 t = getTickCount(); #endif Ptr<FeaturesFinder> finder; if (features_type == "surf") { #if defined(HAVE_OPENCV_NONFREE) && defined(HAVE_OPENCV_GPU) if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0) finder = new SurfFeaturesFinderGpu(); else #endif finder = new SurfFeaturesFinder(); } else if (features_type == "orb") { finder = new OrbFeaturesFinder(); } else { cout << "Unknown 2D features type: '" << features_type << "'.\n"; return -1; } Mat full_img, img; vector<ImageFeatures> features(num_images); vector<Mat> images(num_images); vector<Size> full_img_sizes(num_images); double seam_work_aspect = 1; for (int i = 0; i < num_images; ++i) { full_img = imread(img_names[i]); full_img_sizes[i] = full_img.size(); if (full_img.empty()) { LOGLN("Can't open image " << img_names[i]); return -1; } if (work_megapix < 0) { img = full_img; work_scale = 1; is_work_scale_set = true; } else { if (!is_work_scale_set) { work_scale = min(1.0, sqrt(work_megapix * 1e6 / full_img.size().area())); is_work_scale_set = true; } resize(full_img, img, Size(), work_scale, work_scale); } if (!is_seam_scale_set) { seam_scale = min(1.0, sqrt(seam_megapix * 1e6 / full_img.size().area())); seam_work_aspect = seam_scale / work_scale; is_seam_scale_set = true; } (*finder)(img, features[i]); features[i].img_idx = i; LOGLN("Features in image #" << i+1 << ": " << features[i].keypoints.size()); resize(full_img, img, Size(), seam_scale, seam_scale); images[i] = img.clone(); } finder->collectGarbage(); full_img.release(); img.release(); LOGLN("Finding features, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); LOG("Pairwise matching"); #if ENABLE_LOG t = getTickCount(); #endif vector<MatchesInfo> pairwise_matches; BestOf2NearestMatcher matcher(try_gpu, match_conf); matcher(features, pairwise_matches); matcher.collectGarbage(); LOGLN("Pairwise matching, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); // Check if we should save matches graph if (save_graph) { LOGLN("Saving matches graph..."); ofstream f(save_graph_to.c_str()); f << matchesGraphAsString(img_names, pairwise_matches, conf_thresh); } // Leave only images we are sure are from the same panorama vector<int> indices = leaveBiggestComponent(features, pairwise_matches, conf_thresh); vector<Mat> img_subset; vector<String> img_names_subset; vector<Size> full_img_sizes_subset; for (size_t i = 0; i < indices.size(); ++i) { img_names_subset.push_back(img_names[indices[i]]); img_subset.push_back(images[indices[i]]); full_img_sizes_subset.push_back(full_img_sizes[indices[i]]); } images = img_subset; img_names = img_names_subset; full_img_sizes = full_img_sizes_subset; // Check if we still have enough images num_images = static_cast<int>(img_names.size()); if (num_images < 2) { LOGLN("Need more images"); return -1; } HomographyBasedEstimator estimator; vector<CameraParams> cameras; estimator(features, pairwise_matches, cameras); for (size_t i = 0; i < cameras.size(); ++i) { Mat R; cameras[i].R.convertTo(R, CV_32F); cameras[i].R = R; LOGLN("Initial intrinsics #" << indices[i]+1 << ":\n" << cameras[i].K()); } Ptr<detail::BundleAdjusterBase> adjuster; if (ba_cost_func == "reproj") adjuster = new detail::BundleAdjusterReproj(); else if (ba_cost_func == "ray") adjuster = new detail::BundleAdjusterRay(); else { cout << "Unknown bundle adjustment cost function: '" << ba_cost_func << "'.\n"; return -1; } adjuster->setConfThresh(conf_thresh); Mat_<uchar> refine_mask = Mat::zeros(3, 3, CV_8U); if (ba_refine_mask[0] == 'x') refine_mask(0,0) = 1; if (ba_refine_mask[1] == 'x') refine_mask(0,1) = 1; if (ba_refine_mask[2] == 'x') refine_mask(0,2) = 1; if (ba_refine_mask[3] == 'x') refine_mask(1,1) = 1; if (ba_refine_mask[4] == 'x') refine_mask(1,2) = 1; adjuster->setRefinementMask(refine_mask); (*adjuster)(features, pairwise_matches, cameras); // Find median focal length vector<double> focals; for (size_t i = 0; i < cameras.size(); ++i) { LOGLN("Camera #" << indices[i]+1 << ":\n" << cameras[i].K()); focals.push_back(cameras[i].focal); } sort(focals.begin(), focals.end()); float warped_image_scale; if (focals.size() % 2 == 1) warped_image_scale = static_cast<float>(focals[focals.size() / 2]); else warped_image_scale = static_cast<float>(focals[focals.size() / 2 - 1] + focals[focals.size() / 2]) * 0.5f; if (do_wave_correct) { vector<Mat> rmats; for (size_t i = 0; i < cameras.size(); ++i) rmats.push_back(cameras[i].R); waveCorrect(rmats, wave_correct); for (size_t i = 0; i < cameras.size(); ++i) cameras[i].R = rmats[i]; } LOGLN("Warping images (auxiliary)... "); #if ENABLE_LOG t = getTickCount(); #endif vector<Point> corners(num_images); vector<Mat> masks_warped(num_images); vector<Mat> images_warped(num_images); vector<Size> sizes(num_images); vector<Mat> masks(num_images); // Preapre images masks for (int i = 0; i < num_images; ++i) { masks[i].create(images[i].size(), CV_8U); masks[i].setTo(Scalar::all(255)); } // Warp images and their masks Ptr<WarperCreator> warper_creator; #ifdef HAVE_OPENCV_GPU if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0) { if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu(); else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu(); else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu(); } else #endif { if (warp_type == "plane") warper_creator = new cv::PlaneWarper(); else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper(); else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper(); else if (warp_type == "fisheye") warper_creator = new cv::FisheyeWarper(); else if (warp_type == "stereographic") warper_creator = new cv::StereographicWarper(); else if (warp_type == "compressedPlaneA2B1") warper_creator = new cv::CompressedRectilinearWarper(2, 1); else if (warp_type == "compressedPlaneA1.5B1") warper_creator = new cv::CompressedRectilinearWarper(1.5, 1); else if (warp_type == "compressedPlanePortraitA2B1") warper_creator = new cv::CompressedRectilinearPortraitWarper(2, 1); else if (warp_type == "compressedPlanePortraitA1.5B1") warper_creator = new cv::CompressedRectilinearPortraitWarper(1.5, 1); else if (warp_type == "paniniA2B1") warper_creator = new cv::PaniniWarper(2, 1); else if (warp_type == "paniniA1.5B1") warper_creator = new cv::PaniniWarper(1.5, 1); else if (warp_type == "paniniPortraitA2B1") warper_creator = new cv::PaniniPortraitWarper(2, 1); else if (warp_type == "paniniPortraitA1.5B1") warper_creator = new cv::PaniniPortraitWarper(1.5, 1); else if (warp_type == "mercator") warper_creator = new cv::MercatorWarper(); else if (warp_type == "transverseMercator") warper_creator = new cv::TransverseMercatorWarper(); } if (warper_creator.empty()) { cout << "Can't create the following warper '" << warp_type << "'\n"; return 1; } Ptr<RotationWarper> warper = warper_creator->create(static_cast<float>(warped_image_scale * seam_work_aspect)); for (int i = 0; i < num_images; ++i) { Mat_<float> K; cameras[i].K().convertTo(K, CV_32F); float swa = (float)seam_work_aspect; K(0,0) *= swa; K(0,2) *= swa; K(1,1) *= swa; K(1,2) *= swa; corners[i] = warper->warp(images[i], K, cameras[i].R, INTER_LINEAR, BORDER_REFLECT, images_warped[i]); sizes[i] = images_warped[i].size(); warper->warp(masks[i], K, cameras[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]); } vector<Mat> images_warped_f(num_images); for (int i = 0; i < num_images; ++i) images_warped[i].convertTo(images_warped_f[i], CV_32F); LOGLN("Warping images, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); Ptr<ExposureCompensator> compensator = ExposureCompensator::createDefault(expos_comp_type); compensator->feed(corners, images_warped, masks_warped); Ptr<SeamFinder> seam_finder; if (seam_find_type == "no") seam_finder = new detail::NoSeamFinder(); else if (seam_find_type == "voronoi") seam_finder = new detail::VoronoiSeamFinder(); else if (seam_find_type == "gc_color") { #ifdef HAVE_OPENCV_GPU if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0) seam_finder = new detail::GraphCutSeamFinderGpu(GraphCutSeamFinderBase::COST_COLOR); else #endif seam_finder = new detail::GraphCutSeamFinder(GraphCutSeamFinderBase::COST_COLOR); } else if (seam_find_type == "gc_colorgrad") { #ifdef HAVE_OPENCV_GPU if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0) seam_finder = new detail::GraphCutSeamFinderGpu(GraphCutSeamFinderBase::COST_COLOR_GRAD); else #endif seam_finder = new detail::GraphCutSeamFinder(GraphCutSeamFinderBase::COST_COLOR_GRAD); } else if (seam_find_type == "dp_color") seam_finder = new detail::DpSeamFinder(DpSeamFinder::COLOR); else if (seam_find_type == "dp_colorgrad") seam_finder = new detail::DpSeamFinder(DpSeamFinder::COLOR_GRAD); if (seam_finder.empty()) { cout << "Can't create the following seam finder '" << seam_find_type << "'\n"; return 1; } seam_finder->find(images_warped_f, corners, masks_warped); // Release unused memory images.clear(); images_warped.clear(); images_warped_f.clear(); masks.clear(); LOGLN("Compositing..."); #if ENABLE_LOG t = getTickCount(); #endif Mat img_warped, img_warped_s; Mat dilated_mask, seam_mask, mask, mask_warped; Ptr<Blender> blender; //double compose_seam_aspect = 1; double compose_work_aspect = 1; for (int img_idx = 0; img_idx < num_images; ++img_idx) { LOGLN("Compositing image #" << indices[img_idx]+1); // Read image and resize it if necessary full_img = imread(img_names[img_idx]); if (!is_compose_scale_set) { if (compose_megapix > 0) compose_scale = min(1.0, sqrt(compose_megapix * 1e6 / full_img.size().area())); is_compose_scale_set = true; // Compute relative scales //compose_seam_aspect = compose_scale / seam_scale; compose_work_aspect = compose_scale / work_scale; // Update warped image scale warped_image_scale *= static_cast<float>(compose_work_aspect); warper = warper_creator->create(warped_image_scale); // Update corners and sizes for (int i = 0; i < num_images; ++i) { // Update intrinsics cameras[i].focal *= compose_work_aspect; cameras[i].ppx *= compose_work_aspect; cameras[i].ppy *= compose_work_aspect; // Update corner and size Size sz = full_img_sizes[i]; if (std::abs(compose_scale - 1) > 1e-1) { sz.width = cvRound(full_img_sizes[i].width * compose_scale); sz.height = cvRound(full_img_sizes[i].height * compose_scale); } Mat K; cameras[i].K().convertTo(K, CV_32F); Rect roi = warper->warpRoi(sz, K, cameras[i].R); corners[i] = roi.tl(); sizes[i] = roi.size(); } } if (abs(compose_scale - 1) > 1e-1) resize(full_img, img, Size(), compose_scale, compose_scale); else img = full_img; full_img.release(); Size img_size = img.size(); Mat K; cameras[img_idx].K().convertTo(K, CV_32F); // Warp the current image warper->warp(img, K, cameras[img_idx].R, INTER_LINEAR, BORDER_REFLECT, img_warped); // Warp the current image mask mask.create(img_size, CV_8U); mask.setTo(Scalar::all(255)); warper->warp(mask, K, cameras[img_idx].R, INTER_NEAREST, BORDER_CONSTANT, mask_warped); // Compensate exposure compensator->apply(img_idx, corners[img_idx], img_warped, mask_warped); img_warped.convertTo(img_warped_s, CV_16S); img_warped.release(); img.release(); mask.release(); dilate(masks_warped[img_idx], dilated_mask, Mat()); resize(dilated_mask, seam_mask, mask_warped.size()); mask_warped = seam_mask & mask_warped; if (blender.empty()) { blender = Blender::createDefault(blend_type, try_gpu); Size dst_sz = resultRoi(corners, sizes).size(); float blend_width = sqrt(static_cast<float>(dst_sz.area())) * blend_strength / 100.f; if (blend_width < 1.f) blender = Blender::createDefault(Blender::NO, try_gpu); else if (blend_type == Blender::MULTI_BAND) { MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(static_cast<Blender*>(blender)); mb->setNumBands(static_cast<int>(ceil(log(blend_width)/log(2.)) - 1.)); LOGLN("Multi-band blender, number of bands: " << mb->numBands()); } else if (blend_type == Blender::FEATHER) { FeatherBlender* fb = dynamic_cast<FeatherBlender*>(static_cast<Blender*>(blender)); fb->setSharpness(1.f/blend_width); LOGLN("Feather blender, sharpness: " << fb->sharpness()); } blender->prepare(corners, sizes); } // Blend the current image blender->feed(img_warped_s, mask_warped, corners[img_idx]); } Mat result, result_mask; blender->blend(result, result_mask); LOGLN("Compositing, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); imwrite(result_name, result); LOGLN("Finished, total time: " << ((getTickCount() - app_start_time) / getTickFrequency()) << " sec"); return 0; }
int stitch_pair(const vector<Mat> &input_imgs, std::vector<StitchData> &vdata) { /*imwrite("a.jpg", input_imgs[0]); imwrite("b.jpg", input_imgs[1]); imshow("a", input_imgs[0]); imshow("b", input_imgs[1]); waitKey(0);*/ vector<CameraParams> cameras; //for(size_t i=0; i<_input_imgs.size(); ++i) //{ // vector<Mat> input_imgs; // input_imgs.push_back(_input_imgs[0]); // input_imgs.push_back(_input_imgs[i]); cv::setBreakOnError(true); // Check if have enough images int num_images = static_cast<int>(input_imgs.size()); if (num_images < 2) { cout << "Need more images" << endl; return -1; } double work_scale = 1, seam_scale = 1, compose_scale = 1; bool is_work_scale_set = false, is_seam_scale_set = false, is_compose_scale_set = false; cout << "Finding features..." << endl; Ptr<FeaturesFinder> finder; if (features_type == "surf") { #if defined(HAVE_OPENCV_NONFREE) && defined(HAVE_OPENCV_) if (try_ && ::getCudaEnabledDeviceCount() > 0) finder = new SurfFeaturesFinderGpu(); else #endif finder = new SurfFeaturesFinder(); } else if (features_type == "orb") { finder = new OrbFeaturesFinder(); } else { cout << "Unknown 2D features type: '" << features_type << "'.\n" << endl; return -1; } Mat full_img, img; vector<ImageFeatures> features(num_images); vector<Mat> images(num_images); vector<Size> full_img_sizes(num_images); double seam_work_aspect = 1; for (int i = 0; i < num_images; ++i) { full_img = input_imgs[i].clone(); full_img_sizes[i] = full_img.size(); if (full_img.empty()) { cout<< "Can't open image " << i << endl; return -1; } if (work_megapix < 0) { img = full_img; work_scale = 1; is_work_scale_set = true; } else { if (!is_work_scale_set) { work_scale = min(1.0, sqrt(work_megapix * 1e6 / full_img.size().area())); is_work_scale_set = true; } resize(full_img, img, Size(), work_scale, work_scale); } if (!is_seam_scale_set) { seam_scale = min(1.0, sqrt(seam_megapix * 1e6 / full_img.size().area())); seam_work_aspect = seam_scale / work_scale; is_seam_scale_set = true; } (*finder)(img, features[i]); features[i].img_idx = i; cout << "Features in image #" << i+1 << ": " << features[i].keypoints.size() << endl; resize(full_img, img, Size(), seam_scale, seam_scale); images[i] = img.clone(); } finder->collectGarbage(); full_img.release(); img.release(); cout << endl <<"Pairwise matching..." << endl; vector<MatchesInfo> pairwise_matches; BestOf2NearestMatcher matcher(try_, match_conf); matcher(features, pairwise_matches); matcher.collectGarbage(); cout<<pairwise_matches[1].H<<endl; showRegist(input_imgs[0],input_imgs[1],pairwise_matches[1].H); // Check if we should save matches graph /*if (save_graph) { cout << endl << "Saving matches graph..." << endl; ofstream f(save_graph_to.c_str()); f << matchesGraphAsString(img_names, pairwise_matches, conf_thresh); }*/ // Leave only images we are sure are from the same panorama vector<int> indices = leaveBiggestComponent(features, pairwise_matches, conf_thresh); vector<Mat> img_subset; // vector<string> img_names_subset; vector<Size> full_img_sizes_subset; for (size_t i = 0; i < indices.size(); ++i) { // img_names_subset.push_back(img_names[indices[i]]); img_subset.push_back(images[indices[i]]); full_img_sizes_subset.push_back(full_img_sizes[indices[i]]); } images = img_subset; // img_names = img_names_subset; full_img_sizes = full_img_sizes_subset; // Check if we still have enough images num_images = static_cast<int>(img_subset.size()); if(num_images < 2) { cout << "Need more images" << endl; return -1; } cout << endl << "Calibrating..." << endl; HomographyBasedEstimator estimator; estimator(features, pairwise_matches, cameras); for (size_t i = 0; i < cameras.size(); ++i) { Mat R; cameras[i].R.convertTo(R, CV_32F); cameras[i].R = R; cout << "Initial intrinsics #" << indices[i]+1 << ":\n" << cameras[i].K() << endl; } Ptr<detail::BundleAdjusterBase> adjuster; if (ba_cost_func == "reproj") adjuster = new detail::BundleAdjusterReproj(); else if (ba_cost_func == "ray") adjuster = new detail::BundleAdjusterRay(); else { cout << "Unknown bundle adjustment cost function: '" << ba_cost_func << "'.\n"; return -1; } adjuster->setConfThresh(conf_thresh); Mat_<uchar> refine_mask = Mat::zeros(3, 3, CV_8U); if (ba_refine_mask[0] == 'x') refine_mask(0,0) = 1; if (ba_refine_mask[1] == 'x') refine_mask(0,1) = 1; if (ba_refine_mask[2] == 'x') refine_mask(0,2) = 1; if (ba_refine_mask[3] == 'x') refine_mask(1,1) = 1; if (ba_refine_mask[4] == 'x') refine_mask(1,2) = 1; adjuster->setRefinementMask(refine_mask); (*adjuster)(features, pairwise_matches, cameras); for(int i=0; i<cameras.size(); ++i) { // cameras[i].R=cameras[0].R.inv()*cameras[i].R; } // Find median focal length vector<double> focals; for (size_t i = 0; i < cameras.size(); ++i) { cout << "Camera #" << indices[i]+1 << ":\n" << cameras[i].K() << endl; focals.push_back(cameras[i].focal); if(!(0 < cameras[i].focal && cameras[i].focal < 1e6)) { cout << "bundle ajuster failed." << endl << endl; return -2; } } sort(focals.begin(), focals.end()); float warped_image_scale; if (focals.size() % 2 == 1) warped_image_scale = static_cast<float>(focals[focals.size() / 2]); else warped_image_scale = static_cast<float>(focals[focals.size() / 2 - 1] + focals[focals.size() / 2]) * 0.5f; if (do_wave_correct) { vector<Mat> rmats; for (size_t i = 0; i < cameras.size(); ++i) rmats.push_back(cameras[i].R); waveCorrect(rmats, wave_correct); for (size_t i = 0; i < cameras.size(); ++i) cameras[i].R = rmats[i]; } cout << endl << "Warping images (auxiliary)... " << endl; vector<Point> corners(num_images); vector<Mat> masks_warped(num_images); //only this is used vector<Mat> images_warped(num_images); vector<Size> sizes(num_images); vector<Mat> masks(num_images); // Preapre images masks for (int i = 0; i < num_images; ++i) { masks[i].create(images[i].size(), CV_8U); masks[i].setTo(Scalar::all(255)); } // Warp images and their masks Ptr<WarperCreator> warper_creator; #ifdef HAVE_OPENCV_ if (try_ && ::getCudaEnabledDeviceCount() > 0) { if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu(); else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu(); else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu(); } else #endif { if (warp_type == "plane") warper_creator = new cv::PlaneWarper(); else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper(); else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper(); else if (warp_type == "fisheye") warper_creator = new cv::FisheyeWarper(); else if (warp_type == "stereographic") warper_creator = new cv::StereographicWarper(); else if (warp_type == "compressedPlaneA2B1") warper_creator = new cv::CompressedRectilinearWarper(2, 1); else if (warp_type == "compressedPlaneA1.5B1") warper_creator = new cv::CompressedRectilinearWarper(1.5, 1); else if (warp_type == "compressedPlanePortraitA2B1") warper_creator = new cv::CompressedRectilinearPortraitWarper(2, 1); else if (warp_type == "compressedPlanePortraitA1.5B1") warper_creator = new cv::CompressedRectilinearPortraitWarper(1.5, 1); else if (warp_type == "paniniA2B1") warper_creator = new cv::PaniniWarper(2, 1); else if (warp_type == "paniniA1.5B1") warper_creator = new cv::PaniniWarper(1.5, 1); else if (warp_type == "paniniPortraitA2B1") warper_creator = new cv::PaniniPortraitWarper(2, 1); else if (warp_type == "paniniPortraitA1.5B1") warper_creator = new cv::PaniniPortraitWarper(1.5, 1); else if (warp_type == "mercator") warper_creator = new cv::MercatorWarper(); else if (warp_type == "transverseMercator") warper_creator = new cv::TransverseMercatorWarper(); } if (warper_creator.empty()) { cout << "Can't create the following warper '" << warp_type << "'\n"; return 1; } Ptr<RotationWarper> warper = warper_creator->create(static_cast<float>(warped_image_scale * seam_work_aspect)); for (int i = 0; i < num_images; ++i) { Mat_<float> K; cameras[i].K().convertTo(K, CV_32F); float swa = (float)seam_work_aspect; K(0,0) *= swa; K(0,2) *= swa; K(1,1) *= swa; K(1,2) *= swa; corners[i] = warper->warp(images[i], K, cameras[i].R, INTER_LINEAR, BORDER_REFLECT, images_warped[i]); sizes[i] = images_warped[i].size(); warper->warp(masks[i], K, cameras[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]); } vector<Mat> images_warped_f(num_images); for (int i = 0; i < num_images; ++i) images_warped[i].convertTo(images_warped_f[i], CV_32F); Ptr<ExposureCompensator> compensator = ExposureCompensator::createDefault(expos_comp_type); compensator->feed(corners, images_warped, masks_warped); Ptr<SeamFinder> seam_finder; if (seam_find_type == "no") seam_finder = new detail::NoSeamFinder(); else if (seam_find_type == "voronoi") seam_finder = new detail::VoronoiSeamFinder(); else if (seam_find_type == "gc_color") { #ifdef HAVE_OPENCV_ if (try_ && ::getCudaEnabledDeviceCount() > 0) seam_finder = new detail::GraphCutSeamFinderGpu(GraphCutSeamFinderBase::COST_COLOR); else #endif seam_finder = new detail::GraphCutSeamFinder(GraphCutSeamFinderBase::COST_COLOR); } else if (seam_find_type == "gc_colorgrad") { #ifdef HAVE_OPENCV_ if (try_ && ::getCudaEnabledDeviceCount() > 0) seam_finder = new detail::GraphCutSeamFinderGpu(GraphCutSeamFinderBase::COST_COLOR_GRAD); else #endif seam_finder = new detail::GraphCutSeamFinder(GraphCutSeamFinderBase::COST_COLOR_GRAD); } /* else if (seam_find_type == "dp_color") seam_finder = new detail::DpSeamFinder(DpSeamFinder::COLOR); else if (seam_find_type == "dp_colorgrad") seam_finder = new detail::DpSeamFinder(DpSeamFinder::COLOR_GRAD);*/ if (seam_finder.empty()) { cout << "Can't create the following seam finder '" << seam_find_type << "'\n"; return 1; } seam_finder->find(images_warped_f, corners, masks_warped); // Release unused memory images.clear(); images_warped.clear(); images_warped_f.clear(); masks.clear(); cout << endl <<"Compositing..." << endl; Mat img_warped, img_warped_s; Mat dilated_mask, seam_mask, mask, mask_warped; Ptr<Blender> blender; //double compose_seam_aspect = 1; double compose_work_aspect = 1; Mat mask0; vdata.resize(num_images); for (int img_idx = 0; img_idx < num_images; ++img_idx) { cout << endl << "Compositing image #" << indices[img_idx]+1 << endl; // Read image and resize it if necessary full_img = input_imgs[img_idx].clone(); if (!is_compose_scale_set) { if (compose_megapix > 0) compose_scale = min(1.0, sqrt(compose_megapix * 1e6 / full_img.size().area())); is_compose_scale_set = true; // Compute relative scales //compose_seam_aspect = compose_scale / seam_scale; compose_work_aspect = compose_scale / work_scale; // Update warped image scale warped_image_scale *= static_cast<float>(compose_work_aspect); warper = warper_creator->create(warped_image_scale); // Update corners and sizes for (int i = 0; i < num_images; ++i) { // Update intrinsics cameras[i].focal *= compose_work_aspect; cameras[i].ppx *= compose_work_aspect; cameras[i].ppy *= compose_work_aspect; // Update corner and size Size sz = full_img_sizes[i]; if (std::abs(compose_scale - 1) > 1e-1) { sz.width = cvRound(full_img_sizes[i].width * compose_scale); sz.height = cvRound(full_img_sizes[i].height * compose_scale); } Mat K; cameras[i].K().convertTo(K, CV_32F); Rect roi = warper->warpRoi(sz, K, cameras[i].R); corners[i] = roi.tl(); sizes[i] = roi.size(); } } if (abs(compose_scale - 1) > 1e-1) resize(full_img, img, Size(), compose_scale, compose_scale); else img = full_img; full_img.release(); Size img_size = img.size(); Mat K; cameras[img_idx].K().convertTo(K, CV_32F); // Warp the current image warper->warp(img, K, cameras[img_idx].R, INTER_LINEAR, BORDER_REFLECT, img_warped); // Warp the current image mask mask.create(img_size, CV_8U); mask.setTo(Scalar::all(255)); warper->warp(mask, K, cameras[img_idx].R, INTER_NEAREST, BORDER_CONSTANT, mask_warped); compensator->apply(img_idx, corners[img_idx], img_warped, mask_warped); // Compensate exposure img_warped.convertTo(img_warped_s, CV_16S); img_warped.release(); img.release(); mask.release(); dilate(masks_warped[img_idx], dilated_mask, Mat()); resize(dilated_mask, seam_mask, mask_warped.size()); mask_warped = seam_mask & mask_warped; if (blender.empty()) { blender = Blender::createDefault(blend_type, try_); Size dst_sz = resultRoi(corners, sizes).size(); float blend_width = sqrt(static_cast<float>(dst_sz.area())) * blend_strength / 100.f; if (blend_width < 1.f) blender = Blender::createDefault(Blender::NO, try_); else if (blend_type == Blender::MULTI_BAND) { MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(static_cast<Blender*>(blender)); mb->setNumBands(static_cast<int>(ceil(log(blend_width)/log(2.)) - 1.)); cout << "Multi-band blender, number of bands: " << mb->numBands() << endl; } else if (blend_type == Blender::FEATHER) { FeatherBlender* fb = dynamic_cast<FeatherBlender*>(static_cast<Blender*>(blender)); fb->setSharpness(1.f/blend_width); cout << "Feather blender, sharpness: " << fb->sharpness() << endl; } // blender->prepare(corners, sizes); } //if(img_idx==0) //{ // mask0=mask_warped.clone(); //} //else //{ // int dx=corners[0].x-corners[img_idx].x, dy=corners[0].y-corners[img_idx].y; // for(int yi=0; yi<mask_warped.rows; ++yi) // { // for(int xi=0; xi<mask_warped.cols; ++xi) // { // int x=xi-dx, y=yi-dy; // if(unsigned(y)<mask0.rows&&unsigned(x)<mask0.cols && mask0.at<uchar>(y,x)!=0) // mask_warped.at<uchar>(yi,xi)=0; // } // } //} vdata[img_idx].m_wapredAlpha=mask_warped; vdata[img_idx].m_warpedImg=img_warped_s; vdata[img_idx].m_corner=corners[img_idx]; vdata[img_idx].m_size=img_warped_s.size(); } return 0; }
int stitch(const vector<Mat> &input_imgs, Mat &result_img, Mat &result_mask, cv::Point &refCorner) { std::vector<std::vector<StitchData> > vdata(input_imgs.size()); std::vector<Mat> ii(2); ii[0]=(input_imgs[0]); int K=0; for(int i=1; i<input_imgs.size(); ++i) { ii[1]=input_imgs[i]; vdata[K].resize(2); if(stitch_pair(ii,vdata[K])==0) ++K; } if(K==0) return -1; vdata.resize(K); refCorner=vdata[0][0].m_corner; std::vector<cv::Point> corners; std::vector<cv::Size> sizes; for(int i=0; i<vdata.size(); ++i) { for(size_t j=0; j<vdata[i].size(); ++j) { vdata[i][j].m_corner+=vdata[0][0].m_corner-vdata[i][0].m_corner; } } corners.push_back(vdata[0][0].m_corner); sizes.push_back(vdata[0][0].m_size); for(int i=0; i<vdata.size(); ++i) { corners.push_back(vdata[i][1].m_corner); sizes.push_back(vdata[i][1].m_size); } Ptr<Blender> blender; if (blender.empty()) { blender = Blender::createDefault(blend_type, try_); Size dst_sz = resultRoi(corners, sizes).size(); float blend_width = sqrt(static_cast<float>(dst_sz.area())) * blend_strength / 100.f; if (blend_width < 1.f) blender = Blender::createDefault(Blender::NO, try_); else if (blend_type == Blender::MULTI_BAND) { MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(static_cast<Blender*>(blender)); mb->setNumBands(static_cast<int>(ceil(log(blend_width)/log(2.)) - 1.)); cout << "Multi-band blender, number of bands: " << mb->numBands() << endl; } else if (blend_type == Blender::FEATHER) { FeatherBlender* fb = dynamic_cast<FeatherBlender*>(static_cast<Blender*>(blender)); fb->setSharpness(1.f/blend_width); cout << "Feather blender, sharpness: " << fb->sharpness() << endl; } } blender->prepare(corners, sizes); blender->feed(vdata[0][0].m_warpedImg, vdata[0][0].m_wapredAlpha, vdata[0][0].m_corner); for(int i=0; i<vdata.size(); ++i) { blender->feed(vdata[i][1].m_warpedImg, vdata[i][1].m_wapredAlpha, vdata[i][1].m_corner); } blender->blend(result_img, result_mask); return 0; }
int main(int argc, char** argv) { // Program initialization clock_t timerOverall; timerOverall = clock(); string input; vector <Image> images; vector <Mat> images_scale; double work_scale = 1, seam_scale = 1, compose_scale = 1; bool is_work_scale_set = false, is_seam_scale_set = false, is_compose_scale_set = false; double seam_work_aspect = 1; double compose_work_aspect = 1; if (argc < 2) { cout << "No input images file provided." << endl; system("PAUSE"); return (0); } // Open input file ifstream file(argv[1]); // Check if input is empty or the file is empty if ((!file.is_open()) || (file.peek() == ifstream::traits_type::eof())) { cout << "Provide input images file with .txt extension and " << endl; cout << "make sure it's not empty: "; system("PAUSE"); return (0); } // Extract image paths line by line string str; while (getline(file, str)) { Mat input_img; input_img = (imread(str, IMREAD_UNCHANGED)); // Check for invalid input if (input_img.empty()) { cout << "Could not open or find the image!!!" << endl; cout << "Program terminated!!!" << endl; return (0); } Image tmp_img(input_img); images.push_back(tmp_img); if (work_megapix < 0) { work_scale = 1; is_work_scale_set = true; } else { Mat tmp; if (!is_work_scale_set) { work_scale = min(1.0, sqrt(work_megapix * 1e6 / tmp_img.getImg().size().area())); is_work_scale_set = true; } resize(tmp_img.getImg(), tmp, Size(), work_scale, work_scale); } if (!is_seam_scale_set) { seam_scale = min(1.0, sqrt(seam_megapix * 1e6 / tmp_img.getImg().size().area())); seam_work_aspect = seam_scale / work_scale; is_seam_scale_set = true; } Mat tmp; resize(tmp_img.getImg(), tmp, Size(), seam_scale, seam_scale); images_scale.push_back(tmp); } // Global Histogram Equalization // Images have been processed if (hist == 1) { for (int i = 0; i < images.size(); i++) images[i].setImg(hist_equalize(images[i].getImg())); } // Feature Detection and Descirption // Timing clock_t timerFDD; timerFDD = clock(); cout << "Feature Detection and Descirption..." << endl; det_desc_features(images, hist); //show_image(images, CV_8UC3, "Keypoints"); timerFDD = clock() - timerFDD; cout << "Feature Detection and Description time: " << ((float)timerFDD / CLOCKS_PER_SEC) << " seconds." << endl; cout << endl; // Feature Matching // Timing clock_t timerFM; timerFM = clock(); vector <MatchesInfo> pairwise_matches; cout << "Pairwise image matching..." << endl; match_features(pairwise_matches, images, hist); timerFM = clock() - timerFM; cout << "Pairwise image matching time: " << ((float)timerFM / CLOCKS_PER_SEC) << " seconds." << endl; cout << endl; // Reject noise images which match to no other images cout << "Reject noise images which match to no other images..." << endl; bool flag = imageValidate(pairwise_matches, images, images_scale); if (flag) { system("PAUSE"); return (0); } cout << "Images that belong to panorama are: "; for (size_t i = 0; i < images.size(); i++) { cout << " #" << images[i].getID(); } cout << endl; // Estimate Homography and bundle adjustement clock_t timerHBA; timerHBA = clock(); cout << "Estimating Homography and bundle adjust camera intrinsics..." << endl; vector <double> focals; focals = homogr_ba(images, pairwise_matches); timerHBA = clock() - timerHBA; cout << "Estimating Homography and bundle adjust camera intrinsics time: " << ((float)timerHBA / CLOCKS_PER_SEC) << " seconds." << endl; cout << endl; // Find median focal length sort(focals.begin(), focals.end()); float warped_image_scale; if (focals.size() % 2 == 1) warped_image_scale = static_cast<float>(focals[focals.size() / 2]); else warped_image_scale = static_cast<float>(focals[focals.size() / 2 - 1] + focals[focals.size() / 2]) * 0.5f; // Wave correction wave_correct(images); // Warping images clock_t timerWarp; timerWarp = clock(); cout << "Warping images (auxiliary)... " << endl; vector<Point> corners(images.size()); // top left corner of image vector<Mat> masks_warped(images.size()); // mask for warping vector<Mat> images_warped(images.size()); // warped images used in gain conmpesation vector<Size> sizes(images.size()); vector<Mat> masks(images.size()); // used to create masks_warped // Preapre images masks for (int i = 0; i < images.size(); ++i) { masks[i].create(images_scale[i].size(), CV_8U); masks[i].setTo(Scalar::all(255)); } // Warp images and their masks Ptr<WarperCreator> warper_creator; // Warping type if (warp_type == "plane") warper_creator = new cv::PlaneWarper(); else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper(); else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper(); if (warper_creator.empty()) { cout << "Can't create the following warper '" << warp_type << "'\n"; return (0); } // Find corners and estimate masks_warped Ptr<RotationWarper> warper = warper_creator->create(static_cast<float>(warped_image_scale * seam_work_aspect)); for (int i = 0; i < images.size(); ++i) { Mat_<float> K; images[i].getIntrinsics().K().convertTo(K, CV_32F); float swa = (float)seam_work_aspect; K(0, 0) *= swa; K(0, 2) *= swa; K(1, 1) *= swa; K(1, 2) *= swa; corners[i] = warper->warp(images_scale[i], K, images[i].getIntrinsics().R, INTER_LINEAR, BORDER_REFLECT, images_warped[i]); sizes[i] = images_warped[i].size(); warper->warp(masks[i], K, images[i].getIntrinsics().R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]); } vector<Mat> images_warped_f(images.size()); for (size_t i = 0; i < images.size(); ++i) { images_warped[i].convertTo(images_warped_f[i], CV_32F); } timerWarp = clock() - timerWarp; cout << "Image warping time: " << ((float)timerWarp / CLOCKS_PER_SEC) << " seconds." << endl; // Gain compensation Ptr<ExposureCompensator> compensator = ExposureCompensator::createDefault(expos_comp_type); compensator->feed(corners, images_warped, masks_warped); // Seam estimation type Ptr<SeamFinder> seam_finder; if (seam_find_type == "no") seam_finder = new detail::NoSeamFinder(); else if (seam_find_type == "voronoi") seam_finder = new detail::VoronoiSeamFinder(); else if (seam_find_type == "gc_color") { seam_finder = new detail::GraphCutSeamFinder(GraphCutSeamFinderBase::COST_COLOR); } else if (seam_find_type == "gc_colorgrad") { seam_finder = new detail::GraphCutSeamFinder(GraphCutSeamFinderBase::COST_COLOR_GRAD); } else if (seam_find_type == "dp_color") seam_finder = new detail::DpSeamFinder(DpSeamFinder::COLOR); else if (seam_find_type == "dp_colorgrad") seam_finder = new detail::DpSeamFinder(DpSeamFinder::COLOR_GRAD); if (seam_finder.empty()) { cout << "Can't create the following seam finder '" << seam_find_type << "'\n"; return (0); } // Find seams between images seam_finder->find(images_warped_f, corners, masks_warped); // Release unused memory images_warped.clear(); images_warped_f.clear(); masks.clear(); // Compose panorama clock_t timerCompose; timerCompose = clock(); cout << "Compositing..." << endl; Mat img_warped, img_warped_s; Mat dilated_mask, seam_mask, mask, mask_warped; Ptr<Blender> blender; for (size_t i = 0; i < images.size(); i++) { cout << "Compositing image #" << images[i].getID() << endl; // Read image and resize it if necessary if (!is_compose_scale_set) { is_compose_scale_set = true; // Compute relative scales //compose_seam_aspect = compose_scale / seam_scale; compose_work_aspect = compose_scale / work_scale; // Update warped image scale warped_image_scale *= static_cast<float>(compose_work_aspect); warper = warper_creator->create(warped_image_scale); // Update corners and sizes for (int i = 0; i < images.size(); ++i) { // Update intrinsics CameraParams camera = images[i].getIntrinsics(); camera.focal *= compose_work_aspect; camera.ppx *= compose_work_aspect; camera.ppy *= compose_work_aspect; // Update corner and size Size sz = images[i].getImg().size(); if (std::abs(compose_scale - 1) > 1e-1) { sz.width = cvRound(images[i].getImg().rows * compose_scale); sz.height = cvRound(images[i].getImg().cols * compose_scale); } Mat K; camera.K().convertTo(K, CV_32F); Rect roi = warper->warpRoi(sz, K, camera.R); corners[i] = roi.tl(); sizes[i] = roi.size(); } } if (abs(compose_scale - 1) > 1e-1) { resize(images[i].getImg(), images_scale[i], Size(), compose_scale, compose_scale); } else { images_scale[i] = images[i].getImg(); } Mat K; images[i].getIntrinsics().K().convertTo(K, CV_32F); // Warp the current image warper->warp(images[i].getImg(), K, images[i].getIntrinsics().R, INTER_LINEAR, BORDER_REFLECT, img_warped); // Warp the current image mask mask.create(images_scale[i].size(), CV_8U); mask.setTo(Scalar::all(255)); warper->warp(mask, K, images[i].getIntrinsics().R, INTER_NEAREST, BORDER_CONSTANT, mask_warped); // Compensate exposure compensator->apply(static_cast <int> (i), corners[i], img_warped, mask_warped); img_warped.convertTo(img_warped_s, CV_16S); img_warped.release(); mask.release(); dilate(masks_warped[i], dilated_mask, Mat()); resize(dilated_mask, seam_mask, mask_warped.size()); mask_warped = seam_mask & mask_warped; // Blender type if (blender.empty()) { blender = Blender::createDefault(blend_type, false); Size dst_sz = resultRoi(corners, sizes).size(); float blend_width = sqrt(static_cast<float>(dst_sz.area())) * blend_strength / 100.f; if (blend_width < 1.f) { blender = Blender::createDefault(Blender::NO, false); } else if (blend_type == Blender::MULTI_BAND) { MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(static_cast<Blender*>(blender)); mb->setNumBands(static_cast<int>(ceil(log(blend_width) / log(2.)) - 1.)); cout << "Multi-band blender, number of bands: " << mb->numBands() << endl; } else if (blend_type == Blender::FEATHER) { FeatherBlender* fb = dynamic_cast<FeatherBlender*>(static_cast<Blender*>(blender)); fb->setSharpness(1.f / blend_width); cout << "Feather blender, sharpness: " << fb->sharpness() << endl; } blender->prepare(corners, sizes); } // Blend the current image blender->feed(img_warped_s, mask_warped, corners[i]); } Mat result, result_mask; blender->blend(result, result_mask); timerCompose = clock() - timerCompose; cout << "Image compose time: " << ((float)timerCompose / CLOCKS_PER_SEC) << " seconds." << endl; imwrite(result_name, result); timerOverall = clock() - timerOverall; cout << "Overall time: " << ((float)timerOverall / CLOCKS_PER_SEC) << " seconds." << endl; images.clear(); system("PAUSE"); return(0); }
int main(int argc, char* argv[]) { // Default parameters vector<String> img_names; double scale = 1; string features_type = "orb";//"surf" or "orb" features type float match_conf = 0.3f; float conf_thresh = 0.2f; string adjuster_method = "ray";//"reproj" or "ray" adjuster method bool do_wave_correct = true; WaveCorrectKind wave_correct_type = WAVE_CORRECT_HORIZ; string warp_type = "spherical"; int expos_comp_type = ExposureCompensator::GAIN_BLOCKS; string seam_find_type = "gc_color"; float blend_strength = 5; int blend_type = Blender::MULTI_BAND; string result_name = "panorama_result.jpg"; double start_time = getTickCount(); // 1-Input images if(argc > 1) { for(int i=1; i < argc; i++) img_names.push_back(argv[i]); } else { img_names.push_back("./panorama_image1.jpg"); img_names.push_back("./panorama_image2.jpg"); } // Check if have enough images int num_images = static_cast<int>(img_names.size()); if (num_images < 2) {cout << "Need more images" << endl; return -1; } // 2- Resize images and find features steps cout << "Finding features..." << endl; double t = getTickCount(); Ptr<FeaturesFinder> finder; if (features_type == "surf") finder = makePtr<SurfFeaturesFinder>(); else if (features_type == "orb") finder = makePtr<OrbFeaturesFinder>(); else {cout << "Unknown 2D features type: '" << features_type <<endl; return -1; } Mat full_img, img; vector<ImageFeatures> features(num_images); vector<Mat> images(num_images); vector<Size> full_img_sizes(num_images); for (int i = 0; i < num_images; ++i) { full_img = imread(img_names[i]); full_img_sizes[i] = full_img.size(); if (full_img.empty()) {cout << "Can't open image " << img_names[i] << endl; return -1; } resize(full_img, img, Size(), scale, scale); images[i] = img.clone(); (*finder)(img, features[i]); features[i].img_idx = i; cout << "Features in image #" << i+1 << " are : " <<features[i].keypoints.size() << endl; } finder->collectGarbage(); full_img.release(); img.release(); cout << "Finding features, time: " << ((getTickCount() - t) /getTickFrequency()) << " sec" << endl; // 3- Match features cout << "Pairwise matching" << endl; t = getTickCount(); vector<MatchesInfo> pairwise_matches; BestOf2NearestMatcher matcher(false, match_conf); matcher(features, pairwise_matches); matcher.collectGarbage(); cout << "Pairwise matching, time: " << ((getTickCount() - t) /getTickFrequency()) << " sec" << endl; // 4- Select images and matches subset to build panorama vector<int> indices = leaveBiggestComponent(features, pairwise_matches, conf_thresh); vector<Mat> img_subset; vector<String> img_names_subset; vector<Size> full_img_sizes_subset; for (size_t i = 0; i < indices.size(); ++i) { img_names_subset.push_back(img_names[indices[i]]); img_subset.push_back(images[indices[i]]); full_img_sizes_subset.push_back(full_img_sizes[indices[i]]); } images = img_subset; img_names = img_names_subset; full_img_sizes = full_img_sizes_subset; // Estimate camera parameters rough HomographyBasedEstimator estimator; vector<CameraParams> cameras; if (!estimator(features, pairwise_matches, cameras)){cout <<"Homography estimation failed." << endl; return -1; } for (size_t i = 0; i < cameras.size(); ++i) { Mat R; cameras[i].R.convertTo(R, CV_32F); cameras[i].R = R; cout << "Initial intrinsic #" << indices[i]+1 << ":\n" <<cameras[i].K() << endl; } // 5- Refine camera parameters globally Ptr<BundleAdjusterBase> adjuster; if (adjuster_method == "reproj") // "reproj" method adjuster = makePtr<BundleAdjusterReproj>(); else // "ray" method adjuster = makePtr<BundleAdjusterRay>(); adjuster->setConfThresh(conf_thresh); if (!(*adjuster)(features, pairwise_matches, cameras)) {cout <<"Camera parameters adjusting failed." << endl; return -1; } // Find median focal length vector<double> focals; for (size_t i = 0; i < cameras.size(); ++i) { cout << "Camera #" << indices[i]+1 << ":\n" << cameras[i].K()<< endl; focals.push_back(cameras[i].focal); } sort(focals.begin(), focals.end()); float warped_image_scale; if (focals.size() % 2 == 1) warped_image_scale = static_cast<float>(focals[focals.size() /2]); else warped_image_scale = static_cast<float>(focals[focals.size() /2 - 1] + focals[focals.size() / 2]) * 0.5f; // 6- Wave correlation (optional) if (do_wave_correct) { vector<Mat> rmats; for (size_t i = 0; i < cameras.size(); ++i) rmats.push_back(cameras[i].R.clone()); waveCorrect(rmats, wave_correct_type); for (size_t i = 0; i < cameras.size(); ++i) cameras[i].R = rmats[i]; } // 7- Warp images cout << "Warping images (auxiliary)... " << endl; t = getTickCount(); vector<Point> corners(num_images); vector<UMat> masks_warped(num_images); vector<UMat> images_warped(num_images); vector<Size> sizes(num_images); vector<UMat> masks(num_images); // Prepare images masks for (int i = 0; i < num_images; ++i) { masks[i].create(images[i].size(), CV_8U); masks[i].setTo(Scalar::all(255)); } // Map projections Ptr<WarperCreator> warper_creator; if (warp_type == "rectilinear") warper_creator = makePtr<cv::CompressedRectilinearWarper>(2.0f, 1.0f); else if (warp_type == "cylindrical") warper_creator = makePtr<cv::CylindricalWarper>(); else if (warp_type == "spherical") warper_creator = makePtr<cv::SphericalWarper>(); else if (warp_type == "stereographic") warper_creator = makePtr<cv::StereographicWarper>(); else if (warp_type == "panini") warper_creator = makePtr<cv::PaniniWarper>(2.0f, 1.0f); if (!warper_creator){ cout << "Can't create the following warper'" << warp_type << endl; return 1; } Ptr<RotationWarper> warper = warper_creator->create(static_cast<float>(warped_image_scale * scale)); for (int i = 0; i < num_images; ++i) { Mat_<float> K; cameras[i].K().convertTo(K, CV_32F); float swa = (float)scale; K(0,0) *= swa; K(0,2) *= swa; K(1,1) *= swa; K(1,2) *= swa; corners[i] = warper->warp(images[i], K, cameras[i].R, INTER_LINEAR, BORDER_REFLECT, images_warped[i]); sizes[i] = images_warped[i].size(); warper->warp(masks[i], K, cameras[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]); } vector<UMat> images_warped_f(num_images); for (int i = 0; i < num_images; ++i) images_warped[i].convertTo(images_warped_f[i], CV_32F); cout << "Warping images, time: " << ((getTickCount() - t) /getTickFrequency()) << " sec" << endl; // 8- Compensate exposure errors Ptr<ExposureCompensator> compensator = ExposureCompensator::createDefault(expos_comp_type); compensator->feed(corners, images_warped, masks_warped); // 9- Find seam masks Ptr<SeamFinder> seam_finder; if (seam_find_type == "no") seam_finder = makePtr<NoSeamFinder>(); else if (seam_find_type == "voronoi") seam_finder = makePtr<VoronoiSeamFinder>(); else if (seam_find_type == "gc_color") seam_finder = makePtr<GraphCutSeamFinder>(GraphCutSeamFinderBase::COST_COLOR); else if (seam_find_type == "gc_colorgrad") seam_finder = makePtr<GraphCutSeamFinder>(GraphCutSeamFinderBase::COST_COLOR_GRAD); else if (seam_find_type == "dp_color") seam_finder = makePtr<DpSeamFinder>(DpSeamFinder::COLOR); else if (seam_find_type == "dp_colorgrad") seam_finder = makePtr<DpSeamFinder>(DpSeamFinder::COLOR_GRAD); if (!seam_finder){cout << "Can't create the following seam finder'" << seam_find_type << endl; return 1; } seam_finder->find(images_warped_f, corners, masks_warped); // Release unused memory images.clear(); images_warped.clear(); images_warped_f.clear(); masks.clear(); // 10- Create a blender Ptr<Blender> blender = Blender::createDefault(blend_type, false); Size dst_sz = resultRoi(corners, sizes).size(); float blend_width = sqrt(static_cast<float>(dst_sz.area())) *blend_strength / 100.f; if (blend_width < 1.f) blender = Blender::createDefault(Blender::NO, false); else if (blend_type == Blender::MULTI_BAND) { MultiBandBlender* mb = dynamic_cast<MultiBandBlender*>(blender.get()); mb->setNumBands(static_cast<int>(ceil(log(blend_width)/log(2.)) - 1.)); cout << "Multi-band blender, number of bands: " << mb->numBands() << endl; } else if (blend_type == Blender::FEATHER) { FeatherBlender* fb = dynamic_cast<FeatherBlender*>(blender.get()); fb->setSharpness(1.f/blend_width); cout << "Feather blender, sharpness: " << fb->sharpness() <<endl; } blender->prepare(corners, sizes); // 11- Compositing step cout << "Compositing..." << endl; t = getTickCount(); Mat img_warped, img_warped_s; Mat dilated_mask, seam_mask, mask, mask_warped; for (int img_idx = 0; img_idx < num_images; ++img_idx) { cout << "Compositing image #" << indices[img_idx]+1<< endl; // 11.1- Read image and resize it if necessary full_img = imread(img_names[img_idx]); if (abs(scale - 1) > 1e-1) resize(full_img, img, Size(), scale, scale); else img = full_img; full_img.release(); Size img_size = img.size(); Mat K; cameras[img_idx].K().convertTo(K, CV_32F); // 11.2- Warp the current image warper->warp(img, K, cameras[img_idx].R, INTER_LINEAR, BORDER_REFLECT,img_warped); // Warp the current image mask mask.create(img_size, CV_8U); mask.setTo(Scalar::all(255)); warper->warp(mask, K, cameras[img_idx].R, INTER_NEAREST,BORDER_CONSTANT, mask_warped); // 11.3- Compensate exposure error step compensator->apply(img_idx, corners[img_idx], img_warped, mask_warped); img_warped.convertTo(img_warped_s, CV_16S); img_warped.release(); img.release(); mask.release(); dilate(masks_warped[img_idx], dilated_mask, Mat()); resize(dilated_mask, seam_mask, mask_warped.size()); mask_warped = seam_mask & mask_warped; // 11.4- Blending images step blender->feed(img_warped_s, mask_warped, corners[img_idx]); } Mat result, result_mask; blender->blend(result, result_mask); cout << "Compositing, time: " << ((getTickCount() - t) /getTickFrequency()) << " sec" << endl; imwrite(result_name, result); cout << "Finished, total time: " << ((getTickCount() - start_time)/ getTickFrequency()) << " sec" << endl; return 0; }