コード例 #1
0
ファイル: mcore.hpp プロジェクト: nan7674/np5
	inline std::pair<bool, double> neigbour_search_total(
			F f, P& p, double const step) {
		bool min_found = false;
		P      min_point(p);
		double min_value = f(p);
		for (size_t i = 0; i < p.size(); ++i) {
			double const v = p[i];

			p[i] -= step;
			double const fm = f(p);
			if (fm < min_value) {
				min_point = p;
				min_value = fm;
				min_found = true;
			}

			p[i] = v + step;
			double const fp = f(p);
			if (fp < min_value) {
				min_point = p;
				min_value = fp;
				min_found = true;
			}

			p[i] = v;
		}

		p = min_point;
		return std::make_pair(min_found, min_value);
	}
コード例 #2
0
ファイル: expand_by_epsilon.hpp プロジェクト: 3Jade/Sprawl
inline void expand_by_epsilon(Box & box)
{
    typedef detail::indexed_point_view<Box, min_corner> min_type;
    min_type min_point(box);
    expand::corner_by_epsilon<min_type, std::minus>::apply(min_point);

    typedef detail::indexed_point_view<Box, max_corner> max_type;
    max_type max_point(box);
    expand::corner_by_epsilon<max_type, std::plus>::apply(max_point);
}
コード例 #3
0
ファイル: Cube.cpp プロジェクト: BillyKim/directxcode
void Cube::InitCornerPoints(D3DXVECTOR3& front_bottom_left)
{
	// Calculate the min/max pint of the cube
	// min point is the front bottom left corner of the cube
	D3DXVECTOR3 min_point(front_bottom_left.x, front_bottom_left.y, front_bottom_left.z);

	// max point is the back top right corner of the cube
	D3DXVECTOR3 max_point(front_bottom_left.x + length_, front_bottom_left.y + length_, front_bottom_left.z + length_);

	/* The 8 points were count first on the front side from the bottom-left corner in clock-wise order
	 Then on the back side, with the same order

	 // Front face
		1-----------2
		|  front    |
		|  side     |
		|           |
		|           |
		0-----------3
	*/
	corner_points_[0] = D3DXVECTOR3(min_point.x, min_point.y, min_point.z);
	corner_points_[1] = D3DXVECTOR3(min_point.x, max_point.y, min_point.z);
	corner_points_[2] = D3DXVECTOR3(max_point.x, max_point.y, min_point.z);
	corner_points_[3] = D3DXVECTOR3(max_point.x, min_point.y, min_point.z);

	/* Back face
	    5-----------6
		|  front    |
		|  side     |
		|           |
		|           |
		4-----------7
	*/
	corner_points_[4] = D3DXVECTOR3(max_point.x, min_point.y, max_point.z);
	corner_points_[5] = D3DXVECTOR3(max_point.x, max_point.y, max_point.z);
	corner_points_[6] = D3DXVECTOR3(min_point.x, max_point.y, max_point.z);
	corner_points_[7] = D3DXVECTOR3(min_point.x, min_point.y, max_point.z);

	// Initilize min_point and max_point
	min_point_ = min_point;
	max_point_ = max_point;
}
コード例 #4
0
ファイル: geometry.hpp プロジェクト: golvok/IEEEXtreme-9.0
	PRECISION& minx() { return min_point().x; }
コード例 #5
0
ファイル: geometry.hpp プロジェクト: golvok/IEEEXtreme-9.0
	PRECISION& miny() { return min_point().y; }
コード例 #6
0
ファイル: geometry.hpp プロジェクト: golvok/IEEEXtreme-9.0
	/**
	 * These return their respective edge/point's location
	 */
	const PRECISION& minx() const { return min_point().x; }
コード例 #7
0
ファイル: geometry.hpp プロジェクト: golvok/IEEEXtreme-9.0
	const PRECISION& miny() const { return min_point().y; }
コード例 #8
0
ファイル: main.cpp プロジェクト: dejanpan/semantic_mapping
int main (int argc, char** argv)
{
  ros::init (argc, argv, "semantic_mapping_controller");
  ros::NodeHandle nh;
  Visualization visualizer;
  ParameterServer* ps = ParameterServer::instance ();
  std::vector<PointCloudConstPtr> vis_clouds;

  // import mesh
  MeshIO io_obj;

  ROS_INFO("Importing mesh to pointcloud model...");
  PointCloudPtr raw_import_ptr;
  raw_import_ptr = io_obj.loadMeshFromFile (ps->get<std::string> ("mesh_input_filename"));
  io_obj.savePointcloudToFile(raw_import_ptr, "raw_import.pcd");

  ROS_INFO("Performing principle axis alignment...");
  //align_principle_axis::FloorAxisAlignment axis_align;

  pluginlib::ClassLoader<align_principle_axis::AxisAlignment> loader("align_principle_axis", "align_principle_axis::AxisAlignment");

  align_principle_axis::AxisAlignment* axis_align = NULL;

  try
  {
    axis_align = loader.createClassInstance("align_principle_axis/FloorAxisAlignment");
  }
  catch(pluginlib::PluginlibException& ex)
  {
    ROS_ERROR("The plugin failed to load for some reason. Error: %s", ex.what());
  }

  Eigen::Matrix4f guess, align_trafo, move_to_origin;
  guess = Eigen::Matrix4f::Zero(4,4);
  guess(0,0) = 1.0;
  guess(1,2) = 1.0;
  guess(2,1) = -1.0;
  guess(3,3) = 1.0;
  PointCloudPtr model_aligned_ptr (new PointCloud);
  axis_align->alignCloudPrincipleAxis(raw_import_ptr, guess, model_aligned_ptr, align_trafo);

  visualizer.visualizeCloud(model_aligned_ptr);

  ROS_INFO("Applying boxfilter to cloud...");
  PointCloudPtr cabinet_cloud_ptr (new PointCloud);
  Eigen::Vector4f min_point (0.9, 0.8, -3.0, 1);
  Eigen::Vector4f max_point (1.8, 1.4, -1.3, 1);
  box_filter::filterCloud (model_aligned_ptr, min_point, max_point, cabinet_cloud_ptr);

  ROS_INFO("move model to origin...");
  PointCloudPtr cabinet_centered_cloud_ptr (new PointCloud);
  axis_align->moveModelToOrigin(cabinet_cloud_ptr, cabinet_centered_cloud_ptr, move_to_origin);
  visualizer.visualizeCloud(cabinet_centered_cloud_ptr);

//  ros::ServiceClient client;
//  client = nh.serviceClient<kinect_capture_frame::kinectSnapshot> ("kinect_snapshot_service");
//  kinect_capture_frame::kinectSnapshot get_kinect_frame_srv;
//  ROS_INFO("getting snapshot from kinect");
//  if (!client.call (get_kinect_frame_srv))
//  {
//    ROS_INFO("kinect snapshot service failed");
//    return 1;
//  }
//  PointCloudPtr kinect_cloud_ptr = convertSensorMsgPointCloudToPCL(get_kinect_frame_srv.response.pointcloud);
//  cv::Mat kinect_image = convertSensorMsgToCV(get_kinect_frame_srv.response.image);
//
//  ROS_INFO("using kinect snapshot from file");
//  kinect_image = io_obj.loadImageFromFile("/work/kidson/meshes/cabinet_scan_3/frames_to_register/image_2.png");
//  kinect_cloud_ptr = io_obj.loadPointcloudFromFile("/work/kidson/meshes/cabinet_scan_3/frames_to_register/pointcloud_2.pcd");
//  io_obj.savePointcloudToFile(kinect_cloud_ptr, "kinect_cloud.pcd");
//  io_obj.savePointcloudToFile(model_cloud_ptr, "model_cloud.pcd");
//
//  ROS_INFO("Registering Kinect to Model...");
//  std::vector<cv::Mat> images;
//  std::vector<Eigen::Matrix4f> transformations;
//  io_obj.loadImagesFromDir(ps->get<std::string> ("mesh_registration_images_directory"),images);
//  io_obj.loadTransformationsFromDir(ps->get<std::string> ("mesh_registration_transformations_directory"),transformations);
//  KinectRegistration kinect_reg;
//  Eigen::Matrix4f dildos;
//  dildos = kinect_reg.registerKinectToModel(model_cloud_ptr,kinect_cloud_ptr,kinect_image,images,transformations);
//  ROS_INFO_STREAM("final trafo \n " << dildos);

  return 0;
}
コード例 #9
0
ファイル: warp.cpp プロジェクト: aaronaskew/synfig
bool
Warp::accelerated_cairorender(Context context, cairo_t *cr, int quality, const RendDesc &renddesc_, ProgressCallback *cb)const
{
	Point src_tl=param_src_tl.get(Point());
	Point src_br=param_src_br.get(Point());
	Point dest_tl=param_dest_tl.get(Point());
	Point dest_tr=param_dest_tr.get(Point());
	Point dest_bl=param_dest_bl.get(Point());
	Point dest_br=param_dest_br.get(Point());
	Real horizon=param_horizon.get(Real());
	bool clip=param_clip.get(bool());

	SuperCallback stageone(cb,0,9000,10000);
	SuperCallback stagetwo(cb,9000,10000,10000);
	
	
	RendDesc renddesc(renddesc_);
	// Untransform the render desc
	if(!cairo_renddesc_untransform(cr, renddesc))
		return false;
	
	Real pw=(renddesc.get_w())/(renddesc.get_br()[0]-renddesc.get_tl()[0]);
	Real ph=(renddesc.get_h())/(renddesc.get_br()[1]-renddesc.get_tl()[1]);
	
	if(cb && !cb->amount_complete(0,10000))
		return false;
	
	Point tl(renddesc.get_tl());
	Point br(renddesc.get_br());
	
	Rect bounding_rect;
	
	Rect render_rect(tl,br);
	Rect clip_rect(Rect::full_plane());
	Rect dest_rect(dest_tl,dest_br); dest_rect.expand(dest_tr).expand(dest_bl);
	
	Real zoom_factor(1.0);
	
	// Quick exclusion clip, if necessary
	if(clip && !intersect(render_rect,dest_rect))
	{
		cairo_save(cr);
		cairo_set_operator(cr, CAIRO_OPERATOR_CLEAR);
		cairo_paint(cr);
		cairo_restore(cr);
		return true;
	}
	
	{
		Rect other(render_rect);
		if(clip)
			other&=dest_rect;
		
		Point min(other.get_min());
		Point max(other.get_max());
		
		bool init_point_set=false;
		
		// Point trans_point[4];
		Point p;
		// Real trans_z[4];
		Real z,minz(10000000000000.0f),maxz(0);
		
		//! \todo checking the 4 corners for 0<=z<horizon*2 and using
		//! only 4 corners which satisfy this condition isn't the
		//! right thing to do.  It's possible that none of the 4
		//! corners fall within that range, and yet content of the
		//! tile does.
		p=transform_forward(min);
		z=transform_backward_z(p);
		if(z>0 && z<horizon*2)
		{
			if(init_point_set)
				bounding_rect.expand(p);
			else
				bounding_rect=Rect(p);
			init_point_set=true;
			maxz=std::max(maxz,z);
			minz=std::min(minz,z);
		}
		
		p=transform_forward(max);
		z=transform_backward_z(p);
		if(z>0 && z<horizon*2)
		{
			if(init_point_set)
				bounding_rect.expand(p);
			else
				bounding_rect=Rect(p);
			init_point_set=true;
			maxz=std::max(maxz,z);
			minz=std::min(minz,z);
		}
		
		swap(min[1],max[1]);
		
		p=transform_forward(min);
		z=transform_backward_z(p);
		if(z>0 && z<horizon*2)
		{
			if(init_point_set)
				bounding_rect.expand(p);
			else
				bounding_rect=Rect(p);
			init_point_set=true;
			maxz=std::max(maxz,z);
			minz=std::min(minz,z);
		}
		
		p=transform_forward(max);
		z=transform_backward_z(p);
		if(z>0 && z<horizon*2)
		{
			if(init_point_set)
				bounding_rect.expand(p);
			else
				bounding_rect=Rect(p);
			init_point_set=true;
			maxz=std::max(maxz,z);
			minz=std::min(minz,z);
		}
		
		if(!init_point_set)
		{
			cairo_save(cr);
			cairo_set_operator(cr, CAIRO_OPERATOR_CLEAR);
			cairo_paint(cr);
			cairo_restore(cr);
			return true;
		}
		zoom_factor=(1+(maxz-minz));
		
	}
	
#ifdef ACCEL_WARP_IS_BROKEN
	return Layer::accelerated_cairorender(context,cr,quality,renddesc, cb);
#else
	
	/*swap(tl[1],br[1]);
	 bounding_rect
	 .expand(transform_forward(tl))
	 .expand(transform_forward(br))
	 ;
	 swap(tl[1],br[1]);*/
	
	//synfig::warning("given window: [%f,%f]-[%f,%f] %dx%d",tl[0],tl[1],br[0],br[1],renddesc.get_w(),renddesc.get_h());
	//synfig::warning("Projected: [%f,%f]-[%f,%f]",bounding_rect.get_min()[0],bounding_rect.get_min()[1],bounding_rect.get_max()[0],bounding_rect.get_max()[1]);
	
	// If we are clipping, then go ahead and clip to the
	// source rectangle
	if(clip)
		clip_rect&=Rect(src_tl,src_br);
	
	// Bound ourselves to the bounding rectangle of
	// what is under us
	clip_rect&=context.get_full_bounding_rect();//.expand_x(abs(zoom_factor/pw)).expand_y(abs(zoom_factor/ph));
	
	bounding_rect&=clip_rect;
	
	Point min_point(bounding_rect.get_min());
	Point max_point(bounding_rect.get_max());
	
	// we're going to divide by the difference of these pairs soon;
	// if they're the same, we'll be dividing by zero, and we don't
	// want to do that!
	// \todo what should we do in this case?
	if (min_point[0] == max_point[0]) max_point[0] += 0.001;
	if (min_point[1] == max_point[1]) max_point[1] += 0.001;
	
	if(tl[0]>br[0])
	{
		tl[0]=max_point[0];
		br[0]=min_point[0];
	}
	else
	{
		br[0]=max_point[0];
		tl[0]=min_point[0];
	}
	if(tl[1]>br[1])
	{
		tl[1]=max_point[1];
		br[1]=min_point[1];
	}
	else
	{
		br[1]=max_point[1];
		tl[1]=min_point[1];
	}
	
	const int tmp_d(max(renddesc.get_w(),renddesc.get_h()));
	Real src_pw=(tmp_d*zoom_factor)/(br[0]-tl[0]);
	Real src_ph=(tmp_d*zoom_factor)/(br[1]-tl[1]);
	
	
	RendDesc desc(renddesc);
	desc.clear_flags();
	//desc.set_flags(RendDesc::PX_ASPECT);
	desc.set_tl(tl);
	desc.set_br(br);
	desc.set_wh(ceil_to_int(src_pw*(br[0]-tl[0])),ceil_to_int(src_ph*(br[1]-tl[1])));
	
	//synfig::warning("surface to render: [%f,%f]-[%f,%f] %dx%d",desc.get_tl()[0],desc.get_tl()[1],desc.get_br()[0],desc.get_br()[1],desc.get_w(),desc.get_h());
	if(desc.get_w()==0 && desc.get_h()==0)
	{
		cairo_save(cr);
		cairo_set_operator(cr, CAIRO_OPERATOR_CLEAR);
		cairo_paint(cr);
		cairo_restore(cr);
		return true;
	}
	
	// Recalculate the pixel widths for the src renddesc
	src_pw=(desc.get_w())/(desc.get_br()[0]-desc.get_tl()[0]);
	src_ph=(desc.get_h())/(desc.get_br()[1]-desc.get_tl()[1]);
	
	cairo_surface_t* source=cairo_surface_create_similar(cairo_get_target(cr), CAIRO_CONTENT_COLOR_ALPHA, desc.get_w(),desc.get_h());
	cairo_surface_t* surface=cairo_surface_create_similar(cairo_get_target(cr), CAIRO_CONTENT_COLOR_ALPHA,renddesc.get_w(), renddesc.get_h());
	cairo_t* subcr=cairo_create(source);
	cairo_scale(subcr, 1/desc.get_pw(), 1/desc.get_ph());
	cairo_translate(subcr, -desc.get_tl()[0], -desc.get_tl()[1]);

	if(!context.accelerated_cairorender(subcr,quality,desc,&stageone))
		return false;
	
	cairo_destroy(subcr);
		
	int surfacew, surfaceh, sourcew, sourceh;
	
	CairoSurface csurface(surface);
	CairoSurface csource(source);
	
	csurface.map_cairo_image();
	csource.map_cairo_image();
	
	surfacew=csurface.get_w();
	surfaceh=csurface.get_h();
	sourcew=csource.get_w();
	sourceh=csource.get_h();
	
	CairoSurface::pen pen(csurface.begin());
	
	// Do the warp
	{
		int x,y;
		float u,v;
		Point point,tmp;
		for(y=0,point[1]=renddesc.get_tl()[1];y<surfaceh;y++,pen.inc_y(),pen.dec_x(x),point[1]+=1.0/ph)
		{
			for(x=0,point[0]=renddesc.get_tl()[0];x<surfacew;x++,pen.inc_x(),point[0]+=1.0/pw)
			{
				tmp=transform_forward(point);
				const float z(transform_backward_z(tmp));
				if(!clip_rect.is_inside(tmp) || !(z>0 && z<horizon))
				{
					csurface[y][x]=Color::alpha();
					continue;
				}
				
				u=(tmp[0]-tl[0])*src_pw;
				v=(tmp[1]-tl[1])*src_ph;
				
				if(u<0 || v<0 || u>=sourcew || v>=sourceh || isnan(u) || isnan(v))
					csurface[y][x]=context.get_cairocolor(tmp);
				else
				{
					// CUBIC
					if(quality<=4)
						csurface[y][x]=csource.cubic_sample_cooked(u,v);
					// INTEPOLATION_LINEAR
					else if(quality<=6)
						csurface[y][x]=csource.linear_sample_cooked(u,v);
					else
						// NEAREST_NEIGHBOR
						csurface[y][x]=csource[floor_to_int(v)][floor_to_int(u)];
				}
			}
			if((y&31)==0 && cb)
			{
				if(!stagetwo.amount_complete(y,surfaceh))
					return false;
			}
		}
	}
	
#endif
	
	if(cb && !cb->amount_complete(10000,10000)) return false;
	
	csurface.unmap_cairo_image();
	csource.unmap_cairo_image();
	cairo_surface_destroy(source);
	
	cairo_save(cr);
	
	cairo_translate(cr, renddesc.get_tl()[0], renddesc.get_tl()[1]);
	cairo_scale(cr, renddesc.get_pw(), renddesc.get_ph());
	cairo_set_source_surface(cr, surface, 0, 0);
	cairo_set_operator(cr, CAIRO_OPERATOR_SOURCE);
	cairo_paint(cr);
	
	cairo_restore(cr);
	
	cairo_surface_destroy(surface);
	return true;
}
コード例 #10
0
}

int FaceTracker::track(cv::Mat& _curFrame, std::vector<cv::Point2f>& _curFeatures, cv::Rect& faceRect, double& executionTime){ 
	
	
	// do tracking in the subregion, we always have at least one history //
	cv::Rect prevRect = faceTrajectroy.at(faceTrajectroy.size()-1);
	int roix = (prevRect.x - 1.5 * prevRect.width) > 0 ? (prevRect.x - 1.5 * prevRect.width):0;
	int roiy = (prevRect.y - 1.5 * prevRect.height) > 0 ? (prevRect.y - 1.5 * prevRect.height):0;
	int roiW = (roix + (4 * prevRect.width)) <= _curFrame.cols? (4 * prevRect.width) :  (_curFrame.cols - roix);
	int roiH = (roiy + (4 * prevRect.height)) <= _curFrame.rows? (4 * prevRect.height) :  (_curFrame.rows - roiy);
	cv::Rect ROI( roix, roiy, roiW, roiH);
	//std::cout << "ROI" <<  ROI.x << "," << ROI.y << "," << ROI.width << "," << ROI.height <<std::endl;
	/**
	if (faceTrajectroy.size() > 1){
		cv::Rect faceVelocity;
		float alpha = 0.7;
		for (int i = 1 ; i < faceTrajectroy.size(); ++i){
			// estimate velocity
			cv::Rect pRect = faceTrajectroy.at(i-1);
			cv::Rect cRect = faceTrajectroy.at(i);

			if (i == 1){
				faceVelocity.x = cRect.x - pRect.x;
				faceVelocity.y = cRect.y - pRect.y;
				faceVelocity.width = cRect.width - pRect.width;
				faceVelocity.height = cRect.height - pRect.height;
			}else{
				faceVelocity.x = alpha * (cRect.x - pRect.x) + (1-alpha) * faceVelocity.x;
				faceVelocity.y = alpha * (cRect.y - pRect.y) + (1-alpha) * faceVelocity.y;
				faceVelocity.width = alpha * (cRect.width - pRect.width) + (1-alpha) * faceVelocity.width;
				faceVelocity.height = alpha * (cRect.height - pRect.height) + (1-alpha) * faceVelocity.height;
			}
			
		}
		//update ROI
		ROI.x = prevRect.x + faceVelocity.x;
		ROI.y = prevRect.y + faceVelocity.y;
		ROI.width = prevRect.width + faceVelocity.width;
		ROI.height = prevRect.height + faceVelocity.height;
	}
	**/
	// crop original image, update feature points // 
	cv::Mat croppedPrevFrame(prevFrame, ROI);
	cv::Mat croppedCurFrame(_curFrame, ROI);
	std::vector<cv::Point2f> croppedPrevFeatures;
	for (int i = 0 ; i < _prevFeatures.size(); ++i){
		cv::Point2f p(_prevFeatures.at(i).x - ROI.x, _prevFeatures.at(i).y - ROI.y);
		croppedPrevFeatures.push_back(p);
	}

	/**
	for (int i = 0; i < croppedPrevFeatures.size(); ++i){
		circle( croppedPrevFrame, croppedPrevFeatures[i], 3, cv::Scalar(0,255,0), -1 , 8);
	}
	cv::imshow("croppedFrame", croppedPrevFrame);
	cv::waitKey( 50 );
	**/
	// Track on subregion 
	int returnValue;
	cv::vector<float> err;
	cv::vector<uchar> status;
	std::vector<cv::Point2f> croppedCurFeatures;
	cv::calcOpticalFlowPyrLK(croppedPrevFrame, croppedCurFrame, croppedPrevFeatures, croppedCurFeatures,
		   status, err, cv::Size(31,31), 3, termcrit, 0, 0.001);
	
	//cv::calcOpticalFlowPyrLK(prevFrame, _curFrame, _prevFeatures, _curFeatures,
	//	   status, err, cv::Size(31,31), 3, termcrit, 0, 0.001);
	
	cv::Point2f min_point(FLT_MAX, FLT_MAX);
	cv::Point2f max_point(FLT_MIN, FLT_MIN);
	// refactor the points array to remove points lost due to tracking error, 
	// and map it to the original image location
	size_t i, k;
	std::vector<cv::Point2f> velocity;
	for (i = 0; i < croppedCurFeatures.size(); ++i){
		// adjust it to the correct position
		croppedCurFeatures.at(i).x +=  ROI.x;
		croppedCurFeatures.at(i).y +=  ROI.y;
		
	
		// status[i] = 0, the feature has lost
		//if (status[i] == 0 || distance(croppedCurFeatures[i].x, croppedCurFeatures[i].y , _prevFeatures[i].x , _prevFeatures[i].y) > 30){
		if (status[i] == 0){
			
		continue;
		}
		
		// compute the moving speed for each of the good feature point //
		cv::Point2f vp(croppedCurFeatures.at(i).x - _prevFeatures.at(i).x, croppedCurFeatures.at(i).y - _prevFeatures.at(i).y);
		velocity.push_back(vp); 
	}
	// tracker failed!
	/**
	if (velocity.size() < 3)
		return -1;
	
	std::sort(velocity.begin(), velocity.end(), myFunction);
	int mid = velocity.size()/2;
	int offset = velocity.size() % 2 == 0? mid: (mid+1); 
	float q1, q3 = 0;
	if (mid % 2 == 0){
		int q1_index = (mid/2 + 1) >= velocity.size()? (velocity.size()-1) :(mid/2 + 1);
		q1 = (sqrt(velocity.at((mid/2)).x * velocity.at((mid/2)).x + velocity.at((mid/2)).y * velocity.at((mid/2)).y) + 
		sqrt(velocity.at(q1_index).x * velocity.at(q1_index).x + velocity.at(q1_index).y * velocity.at(q1_index).y))/2;

		int q3_index_1 = (mid/2) + offset >= velocity.size()? velocity.size()-1:(mid/2) + offset;
		int q3_index_2 = (mid/2)+ 1 + offset >= velocity.size()? velocity.size()-1:(mid/2) + offset +1 ;

		q3 = (sqrt(velocity.at(q3_index_1).x * velocity.at(q3_index_1).x + velocity.at(q3_index_1).y * velocity.at(q3_index_1).y) + 
		sqrt(velocity.at(q3_index_2).x * velocity.at(q3_index_2).x + velocity.at(q3_index_2).y * velocity.at(q3_index_2).y))/2;
	
	}else{
		int q3_index = (mid/2) + offset >= velocity.size()? velocity.size()-1:(mid/2) + offset;
		q1 = sqrt(velocity.at((mid/2)).x * velocity.at((mid/2)).x + velocity.at((mid/2)).y * velocity.at((mid/2)).y);
		q3 = sqrt(velocity.at(q3_index).x * velocity.at(q3_index).x + velocity.at(q3_index).y * velocity.at(q3_index).y);
	}
	
	float interval = (q3 - q1) * 3;
	**/

	for(i = k = 0; i < croppedCurFeatures.size(); ++i){		

		// status[i] = 0, the feature has lost
		if (status[i] == 0){
			continue;
		}
		// for each working feature point //
		/**
		cv::Point2f vp(croppedCurFeatures.at(i).x - _prevFeatures.at(i).x, croppedCurFeatures.at(i).y - _prevFeatures.at(i).y);
		float v = sqrt(vp.x * vp.x) + (vp.y * vp.y);
		if (v  > q3 + interval){
			croppedCurFeatures.at(i).x = _prevFeatures.at(i).x + velocity.at(mid).x;
			croppedCurFeatures.at(i).y = _prevFeatures.at(i).y + velocity.at(mid).y;
		}
		**/
		croppedCurFeatures[k].x = croppedCurFeatures[i].x;
		croppedCurFeatures[k].y = croppedCurFeatures[i].y;

		min_point.x = Min(min_point.x, croppedCurFeatures[k].x);
		min_point.y = Min(min_point.y, croppedCurFeatures[k].y);
		max_point.x = Max(max_point.x, croppedCurFeatures[k].x);
		max_point.y = Max(max_point.y, croppedCurFeatures[k].y);
		
		++k;
	}
	croppedCurFeatures.resize(k);
	_curFeatures = croppedCurFeatures;



	// stablize and decide the bounding box
	faceRect.x = cvRound(min_point.x);
	faceRect.y = cvRound(min_point.y);
	//faceRect.width = (cvRound((max_point.x - min_point.x)) +  cvRound((max_point.y - min_point.y)))/2;
	//faceRect.height = (cvRound((max_point.x - min_point.x)) +  cvRound((max_point.y - min_point.y)))/2;
	faceRect.width = cvRound(max_point.x - min_point.x);
	faceRect.height = cvRound(max_point.y - min_point.y);

	fc.faceRect = faceRect;
	fc.featurePoints = _curFeatures;

//	if (faceRect.width < 25 || faceRect.height < 25 || k < 15){
	if (faceRect.width < 25 || faceRect.height < 25 || k < 3){
	
		std::cout << "tracker fails because of width/height/k:" << faceRect.width << "," << faceRect.height << "," << k <<  std::endl;
		return -1;
	}

	// estimate execution time //
	executionTime = estimateExecutionTime(ROI, prevFrame);
	
	// save into history //
	faceTrajectroy.push_back(faceRect);
	if (faceTrajectroy.size() == HISTORY_NUM + 1){
		faceTrajectroy.pop_front();
	}

	// update //
	_curFrame.copyTo(prevFrame);
	_prevFeatures = _curFeatures;
	
コード例 #11
0
}

int FaceTracker::subRegionTrack(cv::Mat& _curFrame, std::vector<cv::Point2f>& _curFeatures, cv::Rect& faceRect, double& executionTime){ 
	
	
	// do tracking in the subregion, we always have at least one history //
	cv::Rect prevRect = faceTrajectroy.at(faceTrajectroy.size()-1);
	int roix = (prevRect.x - prevRect.width) > 0 ? (prevRect.x - prevRect.width):0;
	int roiy = (prevRect.y - prevRect.height) > 0 ? (prevRect.y - prevRect.height):0;
	int roiW = (roix + (3 * prevRect.width)) <= _curFrame.cols? (3 * prevRect.width) :  (_curFrame.cols - roix);
	int roiH = (roiy + (3 * prevRect.height)) <= _curFrame.rows? (3 * prevRect.height) :  (_curFrame.rows - roiy);
	cv::Rect ROI( roix, roiy, roiW, roiH);
	
	// crop original image, update feature points // 
	cv::Mat croppedPrevFrame(prevFrame, ROI);
	cv::Mat croppedCurFrame(_curFrame, ROI);
	std::vector<cv::Point2f> croppedPrevFeatures;
	for (int i = 0 ; i < _prevFeatures.size(); ++i){
		cv::Point2f p(_prevFeatures.at(i).x - ROI.x, _prevFeatures.at(i).y - ROI.y);
		croppedPrevFeatures.push_back(p);
	}

	
	
	// Track on subregion 
	int returnValue;
	cv::vector<float> err;
	cv::vector<uchar> status;
	std::vector<cv::Point2f> croppedCurFeatures;
	cv::calcOpticalFlowPyrLK(croppedPrevFrame, croppedCurFrame, croppedPrevFeatures, croppedCurFeatures,
		   status, err, cv::Size(31,31), 3, termcrit, 0, 0.001);
	
	
	
	cv::Point2f min_point(FLT_MAX, FLT_MAX);
	cv::Point2f max_point(FLT_MIN, FLT_MIN);
	// refactor the points array to remove points lost due to tracking error, 
	// and map it to the original image location
	size_t i, k;
	for (i = k = 0; i < croppedCurFeatures.size(); ++i){
		
		// status[i] = 0, the feature has lost
		//if (status[i] == 0){
		//	continue;
		//}
		if (status[i] == 0 || distance(croppedCurFeatures[i].x, croppedCurFeatures[i].y , croppedPrevFeatures[i].x , croppedPrevFeatures[i].y) > 30){
			continue;
		}

		croppedCurFeatures[k].x = croppedCurFeatures[i].x + ROI.x;
		croppedCurFeatures[k].y = croppedCurFeatures[i].y + ROI.y;

		min_point.x = Min(min_point.x, croppedCurFeatures[k].x);
		min_point.y = Min(min_point.y, croppedCurFeatures[k].y);
		max_point.x = Max(max_point.x, croppedCurFeatures[k].x);
		max_point.y = Max(max_point.y, croppedCurFeatures[k].y);
		
		++k;
	
	}
	croppedCurFeatures.resize(k);
	_curFeatures = croppedCurFeatures;



	// stablize and decide the bounding box
	faceRect.x = cvRound(min_point.x);
	faceRect.y = cvRound(min_point.y);
	faceRect.width = cvRound((max_point.x - min_point.x));
	faceRect.height = cvRound((max_point.y - min_point.y));

	fc.faceRect = faceRect;
	fc.featurePoints = _curFeatures;

	if (faceRect.width < 25 || faceRect.height < 25 || k < 15 || faceRect.height/(faceRect.width * 1.0) > 2.5){
		return -1;
	}
	// estimate execution time //
	executionTime = estimateExecutionTime(ROI, prevFrame);
	
	// save into history //
	faceTrajectroy.push_back(faceRect);
	if (faceTrajectroy.size() == HISTORY_NUM + 1){
		faceTrajectroy.pop_front();
	}

	// update //
	_curFrame.copyTo(prevFrame);
	_prevFeatures = _curFeatures;
	
コード例 #12
0
}

int FaceTracker::trackWholeFrame(cv::Mat& _curFrame, std::vector<cv::Point2f>& _curFeatures, cv::Rect& faceRect, double& executionTime){


	int returnValue;
	cv::vector<float> err;
	cv::vector<uchar> status;
	cv::calcOpticalFlowPyrLK(prevFrame, _curFrame, _prevFeatures, _curFeatures,
		   status, err, cv::Size(31,31), 3, termcrit, 0, 0.001);

	cv::Point2f min_point(FLT_MAX, FLT_MAX);
	cv::Point2f max_point(FLT_MIN, FLT_MIN);
	// refactor the points array to remove points lost due to tracking error, 
	// and map it to the original image location
	size_t i, k;
	for (i = k = 0; i < _curFeatures.size(); ++i){
		
		// status[i] = 0, the feature has lost
		//if (status[i] == 0 || distance(_curFeatures[i].x, _curFeatures[i].y, _prevFeatures[i].x, _prevFeatures[i].y) > 30){
		if (status[i] == 0){
			continue;
		}

		// adjust it to the correct position
		_curFeatures[k].x = _curFeatures[i].x;
		_curFeatures[k].y = _curFeatures[i].y;

		min_point.x = Min(min_point.x, _curFeatures[k].x);
		min_point.y = Min(min_point.y, _curFeatures[k].y);
		max_point.x = Max(max_point.x, _curFeatures[k].x);
		max_point.y = Max(max_point.y, _curFeatures[k].y);
		
		++k;
	}
	_curFeatures.resize(k);
	
	// stablize and decide the bounding box
	faceRect.x = cvRound(min_point.x);
	faceRect.y = cvRound(min_point.y);
	faceRect.width = cvRound((max_point.x - min_point.x));
	faceRect.height = cvRound((max_point.y - min_point.y));
	fc.faceRect = faceRect;
	fc.featurePoints = _curFeatures;

	//if (faceRect.width < 25 || faceRect.height < 25 || k < 15 || faceRect.height/(faceRect.width * 1.0) > 2.5){
	if (faceRect.width < 25 || faceRect.height < 25 || k < 15){	
		return -1;
	}


	// estimate time
	std::random_device rd;
	std::default_random_engine generator(rd());
	std::normal_distribution<double> distribution(42.29251578,7.351960796);
	executionTime = distribution(generator);
	
	// update //
	_curFrame.copyTo(prevFrame);
	_prevFeatures = _curFeatures;
	

	return 0;