void SLICSegment2Vector3D_(cv::InputArray segment, cv::InputArray signal, double invalidValue, std::vector<std::vector<cv::Point3_<S>>>& segmentPoint)
	{
		double minv, maxv;
		minMaxLoc(segment, &minv, &maxv);
		segmentPoint.clear();
		segmentPoint.resize((int)maxv + 1);
		if (signal.depth() == CV_8U) _SLICSegment2Vector3D_<uchar, S>(segment, signal, (uchar)invalidValue, segmentPoint);
		else if (signal.depth() == CV_16S) _SLICSegment2Vector3D_<short, S>(segment, signal, (short)invalidValue, segmentPoint);
		else if (signal.depth() == CV_16U) _SLICSegment2Vector3D_<ushort, S>(segment, signal, (ushort)invalidValue, segmentPoint);
		else if (signal.depth() == CV_32S) _SLICSegment2Vector3D_<int, S>(segment, signal, (int)invalidValue, segmentPoint);
		else if (signal.depth() == CV_32F) _SLICSegment2Vector3D_<float, S>(segment, signal, (float)invalidValue, segmentPoint);
		else if (signal.depth() == CV_64F) _SLICSegment2Vector3D_<double, S>(segment, signal, (double)invalidValue, segmentPoint);
	}
Пример #2
0
void unprojectPointsFisheye( cv::InputArray distorted, cv::OutputArray undistorted, cv::InputArray K, cv::InputArray D, cv::InputArray R, cv::InputArray P)
{
    // will support only 2-channel data now for points
    CV_Assert(distorted.type() == CV_32FC2 || distorted.type() == CV_64FC2);
    undistorted.create(distorted.size(), CV_MAKETYPE(distorted.depth(), 3));

    CV_Assert(P.empty() || P.size() == cv::Size(3, 3) || P.size() == cv::Size(4, 3));
    CV_Assert(R.empty() || R.size() == cv::Size(3, 3) || R.total() * R.channels() == 3);
    CV_Assert(D.total() == 4 && K.size() == cv::Size(3, 3) && (K.depth() == CV_32F || K.depth() == CV_64F));

    cv::Vec2d f, c;
    if (K.depth() == CV_32F)
    {
        cv::Matx33f camMat = K.getMat();
        f = cv::Vec2f(camMat(0, 0), camMat(1, 1));
        c = cv::Vec2f(camMat(0, 2), camMat(1, 2));
    }
    else
    {
        cv::Matx33d camMat = K.getMat();
        f = cv::Vec2d(camMat(0, 0), camMat(1, 1));
        c = cv::Vec2d(camMat(0, 2), camMat(1, 2));
    }

    cv::Vec4d k = D.depth() == CV_32F ? (cv::Vec4d)*D.getMat().ptr<cv::Vec4f>(): *D.getMat().ptr<cv::Vec4d>();

    cv::Matx33d RR = cv::Matx33d::eye();
    if (!R.empty() && R.total() * R.channels() == 3)
    {
        cv::Vec3d rvec;
        R.getMat().convertTo(rvec, CV_64F);
        RR = cv::Affine3d(rvec).rotation();
    }
    else if (!R.empty() && R.size() == cv::Size(3, 3))
        R.getMat().convertTo(RR, CV_64F);

    if(!P.empty())
    {
        cv::Matx33d PP;
        P.getMat().colRange(0, 3).convertTo(PP, CV_64F);
        RR = PP * RR;
    }

    // start undistorting
    const cv::Vec2f* srcf = distorted.getMat().ptr<cv::Vec2f>();
    const cv::Vec2d* srcd = distorted.getMat().ptr<cv::Vec2d>();
    cv::Vec3f* dstf = undistorted.getMat().ptr<cv::Vec3f>();
    cv::Vec3d* dstd = undistorted.getMat().ptr<cv::Vec3d>();

    size_t n = distorted.total();
    int sdepth = distorted.depth();

    for(size_t i = 0; i < n; i++ )
    {
        cv::Vec2d pi = sdepth == CV_32F ? (cv::Vec2d)srcf[i] : srcd[i];  // image point
        cv::Vec2d pw((pi[0] - c[0])/f[0], (pi[1] - c[1])/f[1]);      // world point

        double theta_d = sqrt(pw[0]*pw[0] + pw[1]*pw[1]);
        double theta = theta_d;
        if (theta_d > 1e-8)
        {
            // compensate distortion iteratively
            for(int j = 0; j < 10; j++ )
            {
                double theta2 = theta*theta, theta4 = theta2*theta2, theta6 = theta4*theta2, theta8 = theta6*theta2;
                theta = theta_d / (1 + k[0] * theta2 + k[1] * theta4 + k[2] * theta6 + k[3] * theta8);
            }
        }
        double z = std::cos(theta);
        double r = std::sin(theta);

        cv::Vec3d pu = cv::Vec3d(r*pw[0], r*pw[1], z); //undistorted point

        // reproject
        cv::Vec3d pr = RR * pu; // rotated point optionally multiplied by new camera matrix
        cv::Vec3d fi;       // final
        normalize(pr, fi);

        if( sdepth == CV_32F )
            dstf[i] = fi;
        else
            dstd[i] = fi;
    }
}
Пример #3
0
bool VideoWriter_IntelMFX::write_one(cv::InputArray bgr)
{
    mfxStatus res;
    mfxFrameSurface1 *workSurface = 0;
    mfxSyncPoint sync;

    if (!bgr.empty() && (bgr.dims() != 2 || bgr.type() != CV_8UC3 || bgr.size() != frameSize))
    {
        MSG(cerr << "MFX: invalid frame passed to encoder: "
            << "dims/depth/cn=" << bgr.dims() << "/" << bgr.depth() << "/" << bgr.channels()
            << ", size=" << bgr.size() << endl);
        return false;

    }
    if (!bgr.empty())
    {
        workSurface = pool->getFreeSurface();
        if (!workSurface)
        {
            // not enough surfaces
            MSG(cerr << "MFX: Failed to get free surface" << endl);
            return false;
        }
        const int rows = workSurface->Info.Height;
        const int cols = workSurface->Info.Width;
        Mat Y(rows, cols, CV_8UC1, workSurface->Data.Y, workSurface->Data.Pitch);
        Mat UV(rows / 2, cols, CV_8UC1, workSurface->Data.UV, workSurface->Data.Pitch);
        to_nv12(bgr, Y, UV);
        CV_Assert(Y.ptr() == workSurface->Data.Y);
        CV_Assert(UV.ptr() == workSurface->Data.UV);
    }

    while (true)
    {
        outSurface = 0;
        DBG(cout << "Calling with surface: " << workSurface << endl);
        res = encoder->EncodeFrameAsync(NULL, workSurface, &bs->stream, &sync);
        if (res == MFX_ERR_NONE)
        {
            res = session->SyncOperation(sync, 1000); // 1 sec, TODO: provide interface to modify timeout
            if (res == MFX_ERR_NONE)
            {
                // ready to write
                if (!bs->write())
                {
                    MSG(cerr << "MFX: Failed to write bitstream" << endl);
                    return false;
                }
                else
                {
                    DBG(cout << "Write bitstream" << endl);
                    return true;
                }
            }
            else
            {
                MSG(cerr << "MFX: Sync error: " << res << endl);
                return false;
            }
        }
        else if (res == MFX_ERR_MORE_DATA)
        {
            DBG(cout << "ERR_MORE_DATA" << endl);
            return false;
        }
        else if (res == MFX_WRN_DEVICE_BUSY)
        {
            DBG(cout << "Waiting for device" << endl);
            sleep(1);
            continue;
        }
        else
        {
            MSG(cerr << "MFX: Bad status: " << res << endl);
            return false;
        }
    }
}
	void disparityFitPlane(cv::InputArray disparity, cv::InputArray image, cv::OutputArray dest, int slicRegionSize, float slicRegularization, float slicMinRegionRatio, int slicMaxIteration, int ransacNumofSample, float ransacThreshold)
	{
		//disparityFitTest(ransacNumofSample, ransacThreshold);
		//cv::FileStorage pointxml("planePoint.xml", cv::FileStorage::WRITE); int err = 0;

		Mat segment;
		SLIC(image, segment, slicRegionSize, slicRegularization, slicMinRegionRatio, slicMaxIteration);
		
		vector<vector<Point3f>> points;
		SLICSegment2Vector3D_<float>(segment, disparity, 0, points);

		Mat disp32f = Mat::zeros(dest.size(), CV_32F);

		for (int i = 0; i < points.size(); ++i)
		{	
			if (points[i].size() < 3)
			{
				if (!points[i].empty())
				{
					for (int j = 0; j < points[i].size(); ++j)
					{
						points[i][j].z = 0.f;
					}
				}
			}
			else
			{
				Point3f abc;

				fitPlaneRANSAC(points[i], abc, ransacNumofSample, ransacThreshold, 1);

				//for refinement(if nessesary)
				int v = countArrowablePointDistanceZ(points[i], abc, ransacThreshold);
				/*double rate = (double)v / points[i].size() * 100;
				int itermax = 1;
				for (int n = 0; n < itermax;n++)
				{
					if (rate < 30)
					{
						//pointxml <<format("point%03d",err++)<< points[i];
						fitPlaneRANSAC(points[i], abc, ransacNumofSample, ransacThreshold, 1);
						v = countArrowablePointDistanceZ(points[i], abc, ransacThreshold);
						rate = (double)v / points[i].size() * 100;
					}
				}*/
	
				for (int j = 0; j < points[i].size(); ++j)
				{
					points[i][j].z = points[i][j].x*abc.x + points[i][j].y*abc.y + abc.z;
				}			
			}
		}

		SLICVector3D2Signal(points, image.size(), disp32f);
		if (disparity.depth() == CV_32F)
		{
			disp32f.copyTo(dest);
		}
		else if (disparity.depth() == CV_8U || disparity.depth() == CV_16U || disparity.depth() == CV_16S || disparity.depth() == CV_32S)
		{
			disp32f.convertTo(dest, disparity.type(), 1.0, 0.5);
		}
		else
		{
			disp32f.convertTo(dest, disparity.type());
		}
	}