void RotatePanorama::rotatePano(PanoramaData& panorama, const Matrix3& transformMat) { for (unsigned int i = 0; i < panorama.getNrOfImages(); i++) { const SrcPanoImage & image = panorama.getImage(i); SrcPanoImage copy = image; double y = image.getYaw(); double p = image.getPitch(); double r = image.getRoll(); Matrix3 mat; mat.SetRotationPT(DEG_TO_RAD(y), DEG_TO_RAD(p), DEG_TO_RAD(r)); DEBUG_DEBUG("rotation matrix (PT) for img " << i << " << ypr:" << y << " " << p << " " << r << std::endl << mat); Matrix3 rotated; rotated = transformMat * mat; DEBUG_DEBUG("rotation matrix after transform: " << rotated); rotated.GetRotationPT(y,p,r); y = RAD_TO_DEG(y); p = RAD_TO_DEG(p); r = RAD_TO_DEG(r); DEBUG_DEBUG("rotated angles of img " << i << ": " << y << " " << p << " " << r); // Don't update a variable linked to a variable we already updated. conditional_set(Yaw, y); conditional_set(Pitch, p); conditional_set(Roll, r); if(image.getX()!=0.0 || image.getY()!=0.0 || image.getZ()!=0.0) { // rotate translation vector Vector3 vecRot=transformMat.Inverse().TransformVector(Vector3(image.getZ(), image.getX(), image.getY())); conditional_set(X, vecRot.y); conditional_set(Y, vecRot.z); conditional_set(Z, vecRot.x); // rotate translation plane mat.SetRotationPT(DEG_TO_RAD(image.getTranslationPlaneYaw()), DEG_TO_RAD(image.getTranslationPlanePitch()), 0.0); rotated = transformMat * mat; rotated.GetRotationPT(y,p,r); conditional_set(TranslationPlaneYaw, RAD_TO_DEG(y)); conditional_set(TranslationPlanePitch, RAD_TO_DEG(p)); }; panorama.setImage(i, copy); panorama.imageChanged(i); } }
double get(PanoramaData & pano) const { return pano.getImage(m_img).getVar(m_name); }
Matrix3 StraightenPanorama::calcStraighteningRotation(const PanoramaData& panorama) { // landscape/non rotated portrait detection is not working correctly // should use the exif rotation tag but thats not stored anywhere currently... // 1: use y axis (image x axis), for normal image // 0: use z axis (image y axis), for non rotated portrait images // (usually rotation is just stored in EXIF tag) std::vector<int> coord_idx; for (unsigned int i = 0; i < panorama.getNrOfImages(); i++) { SrcPanoImage img = panorama.getSrcImage(i); // BUG: need to read exif data here, since exif orientation is not // stored in Panorama data model double fl=0; double crop=0; img.readEXIF(fl, crop, false, false); double roll = img.getExifOrientation(); if (roll == 90 || roll == 270 ) { coord_idx.push_back(2); } else { coord_idx.push_back(1); } } // build covariance matrix of X Matrix3 cov; unsigned int nrOfVariableImages=0; for (unsigned int i = 0; i < panorama.getNrOfImages(); i++) { const SrcPanoImage & img=panorama.getImage(i); if(img.YawisLinked()) { //only consider images which are not linked with the previous ones bool consider=true; for(unsigned int j=0; j<i; j++) { if(img.YawisLinkedWith(panorama.getImage(j))) { consider=false; break; }; }; if(!consider) continue; }; double y = const_map_get(panorama.getImageVariables(i), "y").getValue(); double p = const_map_get(panorama.getImageVariables(i), "p").getValue(); double r = const_map_get(panorama.getImageVariables(i), "r").getValue(); Matrix3 mat; mat.SetRotationPT(DEG_TO_RAD(y), DEG_TO_RAD(p), DEG_TO_RAD(r)); nrOfVariableImages++; DEBUG_DEBUG("mat = " << mat); for (int j=0; j<3; j++) { for (int k=0; k<3; k++) { cov.m[j][k] += mat.m[j][coord_idx[i]] * mat.m[k][coord_idx[i]]; } } } cov /= nrOfVariableImages; DEBUG_DEBUG("cov = " << cov); // calculate eigenvalues and vectors Matrix3 eigvectors; double eigval[3]; int eigvalIdx[3]; int maxsweep = 100; int maxannil = 0; double eps = 1e-16; hugin_utils::eig_jacobi(3, cov.m, eigvectors.m, eigval, eigvalIdx, &maxsweep, &maxannil, &eps); DEBUG_DEBUG("Eigenvectors & eigenvalues:" << std::endl << "V = " << eigvectors << std::endl << "D = [" << eigval[0] << ", " << eigval[1] << ", " << eigval[2] << " ]" << "idx = [" << eigvalIdx[0] << ", " << eigvalIdx[1] << ", " << eigvalIdx[2] << " ]"); // get up vector, eigenvector with smallest eigenvalue Vector3 up; up.x = eigvectors.m[eigvalIdx[2]][0]; up.y = eigvectors.m[eigvalIdx[2]][1]; up.z = eigvectors.m[eigvalIdx[2]][2]; // normalize vector up.Normalize(); DEBUG_DEBUG("Up vector: up = " << up ); double rotAngle = acos(up.Dot(Vector3(0,0,1))); if (rotAngle > M_PI/2) { // turn in shorter direction up *= -1; rotAngle = acos(up.Dot(Vector3(0,0,1))); } DEBUG_DEBUG("rotation Angle: " << rotAngle); // get rotation axis Vector3 rotAxis = up.Cross(Vector3(0,0,1)); DEBUG_DEBUG("rotAxis = " << rotAngle); // calculate rotation matrix Matrix3 rotMat = GetRotationAroundU(rotAxis, -rotAngle); DEBUG_DEBUG("rotMat = " << rotMat); return rotMat; }
void CenterHorizontally::centerHorizontically(PanoramaData& panorama) { vigra::Size2D panoSize(360,180); // remap into minature pano. PanoramaOptions opts; opts.setHFOV(360); opts.setProjection(PanoramaOptions::EQUIRECTANGULAR); opts.setWidth(360); opts.setHeight(180); // remap image vigra::BImage panoAlpha(panoSize); Nona::RemappedPanoImage<vigra::BImage, vigra::BImage> remapped; // use selected images. const UIntSet allActiveImgs(panorama.getActiveImages()); if (allActiveImgs.empty()) { // do nothing if there are no images return; } //only check unlinked images UIntSet activeImgs; for (UIntSet::const_iterator it = allActiveImgs.begin(); it!= allActiveImgs.end(); ++it) { const SrcPanoImage & img=panorama.getImage(*it); bool consider=true; if(img.YawisLinked()) { for(UIntSet::const_iterator it2=activeImgs.begin(); it2!=activeImgs.end(); ++it2) { if(img.YawisLinkedWith(panorama.getSrcImage(*it2))) { consider=false; break; }; }; }; if(consider) activeImgs.insert(*it); }; for (UIntSet::iterator it = activeImgs.begin(); it != activeImgs.end(); ++it) { remapped.setPanoImage(panorama.getSrcImage(*it), opts, vigra::Rect2D(0,0,360,180)); // calculate alpha channel remapped.calcAlpha(); // copy into global alpha channel. vigra::copyImageIf(vigra_ext::applyRect(remapped.boundingBox(), vigra_ext::srcMaskRange(remapped)), vigra_ext::applyRect(remapped.boundingBox(), vigra_ext::srcMask(remapped)), vigra_ext::applyRect(remapped.boundingBox(), destImage(panoAlpha))); } // get field of view std::vector<int> borders; bool colOccupied = false; for (int h=0; h < 360; h++) { bool curColOccupied = false; for (int v=0; v< 180; v++) { if (panoAlpha(h,v)) { // pixel is valid curColOccupied = true; } } if ((colOccupied && !curColOccupied) || (!colOccupied && curColOccupied)) { // change in position, save point. borders.push_back(h-180); colOccupied = curColOccupied; } } int lastidx = borders.size() -1; if (lastidx == -1) { // empty pano return; } if (colOccupied) { // we have reached the right border, and the pano is still valid // shift right fragments by 360 deg // |11 2222| -> | 222211 | std::vector<int> newBorders; newBorders.push_back(borders[lastidx]); for (int i = 0; i < lastidx; i++) { newBorders.push_back(borders[i]+360); } borders = newBorders; } const double dYaw=(borders[0] + borders[lastidx])/2; // apply yaw shift, takes also translation parameters into account RotatePanorama(panorama, -dYaw, 0, 0).run(); }