示例#1
0
// buf must be fftwf_malloc'd with size `sizeof(float) * fftLen`
//
// im should be approx offset from fix by hint.
Point phaseCorr(const float *fix, const float *im, const Size &imSz, const fftwf_plan &plan, float *buf, const Point2f &hint,
				double &bestSqDist, const Mat &mask, Point &maxLoc, float &conf, const ConfGetter getConf, const char *saveIm) {
	unsigned fftLen = getFFTLen(imSz);
	for (unsigned i = 0; i < fftLen; i += 2) {
		float a = fix[i] * im[i] + fix[i + 1] * im[i + 1];
		float b = fix[i + 1] * im[i] - fix[i] * im[i + 1];
		float norm = sqrt(a * a + b * b);
		buf[i] = (norm != 0) ? (a / norm) : 0;
		buf[i + 1] = (norm != 0) ? (b / norm) : 0;
		//printf("wow %f, %f, %f, %f, %f, %f, %f, %f\n", fix[i], fix[i+1], im[i], im[i+1], a, b, buf[i], buf[i + 1]);
	}

	fftwf_execute_dft_c2r(plan, (fftwf_complex *)buf, buf);

	Mat bufMat(imSz.height, imSz.width + 2, CV_32FC1, buf);
	//	bufMat = abs(bufMat);
	blur(bufMat, bufMat, Size(21, 21));

	if (saveIm) {
		saveFloatIm(saveIm, bufMat);
	}

	minMaxLoc(bufMat, NULL, NULL, NULL, &maxLoc, mask);

	// there are four potential shifts corresponding to one peak
	// we choose the shift that is closest to the microscope's guess for the offset between the two
	Point bestPt;
	bestSqDist = 1e99;
	for (int dx = -imSz.width; dx <= 0; dx += imSz.width) {
		for (int dy = -imSz.height; dy <= 0; dy += imSz.height) {
			Point curPt(maxLoc.x + dx, maxLoc.y + dy);
			double curSqDist = getSqDist(curPt, hint);
			if (curSqDist < bestSqDist) {
				bestSqDist = curSqDist;
				bestPt = curPt;
			}
		}
	}

	conf = getConf(bufMat, maxLoc, bestSqDist);

	return bestPt;
}
示例#2
0
/*

	emits particles with color sampled from specified
	shading node/shading engine

*/
MStatus sampleParticles::doIt( const MArgList& args )
{
	unsigned int i;
	bool shadow = 0;
	bool reuse = 0;

	for ( i = 0; i < args.length(); i++ )
		if ( args.asString(i) == MString("-shadow") || 
			args.asString(i) == MString("-s") )
			shadow = 1;
		else if ( args.asString(i) == MString("-reuse") || 
			args.asString(i) == MString("-r") )
			reuse = 1;
		else
			break;
	if ( args.length() - i < 5 )
	{
		displayError( "Usage: sampleParticles [-shadow|-reuse] particleName <shadingEngine|shadingNode.plug> resX resY scale\n"
			"  Example: sampleParticles -shadow particle1 phong1SG 64 64 10;\n"
			"  Example: sampleParticles particle1 file1.outColor 128 128 5;\n" );
		return MS::kFailure;
	}
	if ( reuse && !shadow )	// can only reuse if shadow is turned on
		reuse = 0;

	MString particleName = args.asString( i );
	MString node = args.asString( i+1 );
	int resX = args.asInt( i+2 );
	int resY = args.asInt( i+3 );
	double scale = args.asDouble( i+4 );

	if ( scale <= 0.0 )
		scale = 1.0;

	MFloatArray uCoord, vCoord;
	MFloatPointArray points;
	MFloatVectorArray normals, tanUs, tanVs;

	if ( resX <= 0 )
		resX = 1;
	if ( resY <= 0 )
		resY = 1;

	MString command( "emit -o " );
	command += particleName;
	char tmp[2048];

	float stepU = (float) (1.0 / resX);
	float stepV = (float) (1.0 / resY);

	// stuff sample data by iterating over grid
	// Y is set to arch along the X axis

	int x, y;
	for ( y = 0; y < resY; y++ )
		for ( x = 0; x < resX; x++ )
		{
			uCoord.append( stepU * x );
			vCoord.append( stepV * y );

			float curY = (float) (sin( stepU * (x) * M_PI )*2.0);

			MFloatPoint curPt(
				(float) (stepU * x * scale),
				curY,
				(float) (stepV * y * scale ));

			MFloatPoint uPt(
				(float) (stepU * (x+1) * scale),
				(float) (sin( stepU * (x+1) * M_PI )*2.0),
				(float) (stepV * y * scale ));

			MFloatPoint vPt(
				(float) (stepU * (x) * scale),
				curY,
				(float) (stepV * (y+1) * scale ));

			MFloatVector du, dv, n;
			du = uPt-curPt;
			dv = vPt-curPt;

			n = dv^du;	// normal is based on dU x dV
			n = n.normal();
			normals.append( n );

			du.normal();
			dv.normal();
			tanUs.append( du );
			tanVs.append( dv );

			points.append( curPt );
		}

	// get current camera's world matrix

	MDagPath cameraPath;
	M3dView::active3dView().getCamera( cameraPath );
	MMatrix mat = cameraPath.inclusiveMatrix();
	MFloatMatrix cameraMat( mat.matrix );

	MFloatVectorArray colors, transps;
	if ( MS::kSuccess == MRenderUtil::sampleShadingNetwork( 
			node, 
			points.length(),
			shadow,
			reuse,

			cameraMat,

			&points,
			&uCoord,
			&vCoord,
			&normals,
			&points,
			&tanUs,
			&tanVs,
			NULL,	// don't need filterSize

			colors,
			transps ) )
	{
		fprintf( stderr, "%u points sampled...\n", points.length() );
		for ( i = 0; i < uCoord.length(); i++ )
		{
			sprintf( tmp, " -pos %g %g %g -at velocity -vv %g %g %g -at rgbPP -vv %g %g %g",
				points[i].x,
				points[i].y,
				points[i].z,

				normals[i].x,
				normals[i].y,
				normals[i].z,

				colors[i].x,
				colors[i].y,
				colors[i].z );

			command += MString( tmp );

			// execute emit command once every 512 samples
			if ( i % 512 == 0 )
			{
				fprintf( stderr, "%u...\n", i );
				MGlobal::executeCommand( command, false, false );
				command = MString( "emit -o " );
				command += particleName;
			}
		}

		if ( i % 512 )
			MGlobal::executeCommand( command, true, true );
	}
	else
	{
		displayError( node + MString(" is not a shading engine!  Specify node.attr or shading group node." ) );
	}

	return MS::kSuccess;
}
bool StairDetection::DetermineStairs(cv::InputArray depthImg, std::vector<cv::Point> &stairMidLine, std::vector<cv::Point> &stairMidPoints)
{
	// stairs should have at least 3 edges.
	if (stairMidPoints.size() < 3)
		return false;

	// current holds the current found depth.
	int current = -1, above = -1, below = -1;
	int previous = -1, zeroCount = 0;

	const int ZeroConsequtiveLimit = 10;
	const int PreviousDeltaAllowance = 5;

	cv::LineIterator it(depthImg.getMat(), stairMidLine[0], stairMidLine[1], 8, false);
	std::vector<cv::Point>::iterator midpts = stairMidPoints.begin();

	// for each row in half of the image frame 
	for (int i = 3; i < it.count / 2.0; i++, ++it) 
	{
		// skip if this row is not the stair edge.
		// the stair edges are stored in midpts.
		if (i != midpts->y)
			continue;

		/// Let current = depth at stairEdge at y;
		///     below = depth at stairEdge at y - 3;
		///     above = depth at stairEdge at y - 3;
		cv::Point curPt(it.pos());
		cv::Point abvPt(it.pos().x, it.pos().y + 3);
		cv::Point blwPt(it.pos().x, it.pos().y - 3);
		current = (int)depthImg.getMat().at<uchar>(curPt);
		above = (int)depthImg.getMat().at<uchar>(abvPt);
		below = (int)depthImg.getMat().at<uchar>(blwPt);

		// if difference between above and current is < 3 depth units &&
		//    difference between current's depth must more than 9 depth units.
		if (((above - current) < 3) && (current - below) > 9)
			// then it could be stairs, update to next stair edge.
			++midpts;
		else
			// else it's not stairs at all.
			return false;
	}


	cv::LineIterator ascendingIt(depthImg.getMat(), stairMidLine[0], stairMidLine[1], 8, false);
	// until first half of the image.
	for (int i = 0; i < ascendingIt.count / 2.0; i++, ++ascendingIt)
	{
		current = (int)depthImg.getMat().at<uchar>(ascendingIt.pos());

		if (current == 0) {
			/// if too many consecutive empty depth, 
			/// then this image is too corrupted / does not have stairs
			if (++zeroCount > ZeroConsequtiveLimit)
				return false;

			continue;
		} 
		zeroCount = 0;	
			
		/// Stairs should have ascending depth value;
		/// However, on angled view stairs, the depth value can occasional drop a bit.
		/// Else it's not stairs at all.
		if (current > previous)
			previous = current;
		else if (current > (previous - PreviousDeltaAllowance))
			continue;
		else
			return false;
	}

	return true;
}
/*********************w is variable********************************/
Mat corrector::latitudeCorrection4(Mat imgOrg, Point2i center, int radius, double w_longtitude, double w_latitude, distMapMode distMap, double theta_left, double phi_up, double camerFieldAngle, camMode camProjMode)
{
	if (!(camerFieldAngle > 0 && camerFieldAngle <= PI))
	{
		cout << "The parameter \"camerFieldAngle\" must be in the interval (0,PI]." << endl;
		return Mat();
	}
	double rateOfWindow = 0.9;

	//int width = imgOrg.size().width*rateOfWindow;
	//int height = width;

	//int width = max(imgOrg.cols, imgOrg.rows);
	int width = 512;
	int height = width;
	//int height = imgOrg.rows;


	Size imgSize(width, height);
	int center_x = imgSize.width / 2;
	int center_y = imgSize.height / 2;

	Mat retImg(imgSize, CV_8UC3, Scalar(0, 0, 0));

	double dx = camerFieldAngle / imgSize.width;
	double dy = camerFieldAngle / imgSize.height;

	//coordinate for latitude map
	double latitude;
	double longitude;

	//unity sphere coordinate 
	double x, y, z, r;

	//parameter cooradinate of sphere coordinate
	double Theta_sphere;
	double Phi_sphere;

	//polar cooradinate for fish-eye Image
	double p;
	double theta;

	//cartesian coordinate 
	double x_cart, y_cart;

	//Image cooradinate of imgOrg
	double u, v;
	Point pt, pt1, pt2, pt3, pt4;

	//Image cooradinate of imgRet
	int u_latitude, v_latitude;
	Rect imgArea(0, 0, imgOrg.cols, imgOrg.rows);

	//offset of imgRet Origin
	double longitude_offset, latitude_offset;
	longitude_offset = (PI - camerFieldAngle) / 2;
	latitude_offset = (PI - camerFieldAngle) / 2;

	double foval = 0.0;//焦距


	cv::Mat_<Vec3b> _retImg = retImg;
	cv::Mat_<Vec3b> _imgOrg = imgOrg;

	//according to the camera type to do the calibration
	double  limi_latitude = 2 * auxFunc(w_latitude, 0);
	double  limi_longtitude = 2 * auxFunc(w_longtitude, 0);
	for (int j = 0; j < imgSize.height; j++)
	{

		for (int i = 0; i < imgSize.width; i++)
		{
			Point3f tmpPt(i - center_x, center_y - j, 600);//最后一个参数用来修改成像面的焦距
			double normPt = norm(tmpPt);

			switch (distMap)
			{
			case PERSPECTIVE:

				tmpPt.x /= normPt;
				tmpPt.y /= normPt;
				tmpPt.z /= normPt;

				x = tmpPt.x;
				y = tmpPt.y;
				z = tmpPt.z;

				break;
			case LATITUDE_LONGTITUDE:

				//latitude = latitude_offset + j*dy;

				latitude = getPhi1((double)j*limi_latitude / imgSize.height, w_latitude);
				//longitude = getPhi1((double)i * limi_longtitude / imgSize.width,w_longtitude);

				//latitude = latitude_offset + j*dy;
				longitude = longitude_offset + i*dx;
				//Convert from latitude cooradinate to the sphere cooradinate
				x = -sin(latitude)*cos(longitude);
				y = cos(latitude);
				z = sin(latitude)*sin(longitude);

				break;
			default:
				break;
			}

			if (distMap == PERSPECTIVE)
			{
				//double theta = PI/4;
				//double phi = -PI/2;
				cv::Mat curPt(cv::Point3f(x, y, z));
				std::vector<cv::Point3f> pts;

				//向东旋转地球
				//pts.push_back(cv::Point3f(cos(theta), 0, -sin(theta)));
				//pts.push_back(cv::Point3f(0, 1, 0));
				//pts.push_back(cv::Point3f(sin(theta), 0, cos(theta)));

				//向南旋转地球
				//pts.push_back(cv::Point3f(1, 0, 0));
				//pts.push_back(cv::Point3f(0, cos(phi), sin(phi)));
				//pts.push_back(cv::Point3f(0, -sin(phi), cos(phi)));

				//两个方向旋转
				pts.push_back(cv::Point3f(cos(theta_left), 0, sin(theta_left)));
				pts.push_back(cv::Point3f(sin(phi_up)*sin(theta_left), cos(phi_up), -sin(phi_up)*cos(theta_left)));
				pts.push_back(cv::Point3f(-cos(phi_up)*sin(theta_left), sin(phi_up), cos(phi_up)*cos(theta_left)));


				cv::Mat revert = cv::Mat(pts).reshape(1).t();

				cv::Mat changed(revert*curPt);

				cv::Mat_<double> changed_double;
				changed.convertTo(changed_double, CV_64F);

				x = changed_double.at<double>(0, 0);
				y = changed_double.at<double>(1, 0);
				z = changed_double.at<double>(2, 0);

				//std::cout << curPt << std::endl
				//	<<revert<<std::endl;
			}

			//Convert from unit sphere cooradinate to the parameter sphere cooradinate
			Theta_sphere = acos(z);
			Phi_sphere = cvFastArctan(y, x);//return value in Angle
			Phi_sphere = Phi_sphere*PI / 180;//Convert from Angle to Radian


			switch (camProjMode)
			{
			case STEREOGRAPHIC:
				foval = radius / (2 * tan(camerFieldAngle / 4));
				p = 2 * foval*tan(Theta_sphere / 2);
				break;
			case EQUIDISTANCE:
				foval = radius / (camerFieldAngle / 2);
				p = foval*Theta_sphere;
				break;
			case EQUISOLID:
				foval = radius / (2 * sin(camerFieldAngle / 4));
				p = 2 * foval*sin(Theta_sphere / 2);
				break;
			case ORTHOGONAL:
				foval = radius / sin(camerFieldAngle / 2);
				p = foval*sin(Theta_sphere);
				break;
			default:
				cout << "The camera mode hasn't been choose!" << endl;
			}
			//Convert from parameter sphere cooradinate to fish-eye polar cooradinate
			//p = sin(Theta_sphere);
			theta = Phi_sphere;

			//Convert from fish-eye polar cooradinate to cartesian cooradinate
			x_cart = p*cos(theta);
			y_cart = p*sin(theta);

			//double R = radius / sin(camerFieldAngle / 2);

			//Convert from cartesian cooradinate to image cooradinate
			u = x_cart + center.x;
			v = -y_cart + center.y;

			pt = Point(u, v);

			if (!pt.inside(imgArea))
			{
				continue;
			}
			else
			{
				_retImg.at<Vec3b>(j, i) = _imgOrg.at<Vec3b>(pt);
			}


		}
	}

	//imshow("org", _imgOrg);
	//imshow("ret", _retImg);
	//cv::waitKey();
#ifdef _DEBUG_
	cv::namedWindow("Corrected Image", CV_WINDOW_AUTOSIZE);
	imshow("Corrected Image", retImg);
	cv::waitKey();
#endif
	imwrite("ret.jpg", retImg);
	return retImg;
}