Ejemplo n.º 1
0
// get the rotation matrix that brings the coordinate frame to the camera coordinate frame
Quaternion Viewer3dParam::GetRotation()
{
    // compute the rotation that brings (unit,lat) onto (Z,X)
    Quaternion f = ComputeRotation( unit, lat, Vec3f(0,0,1), Vec3f(1,0,0) );

    //Vec3f a = f.Rotate( unit );
    //Vec3f b = f.Rotate( lat );
    //Vec3f c = f.Rotate( up );

    //log_dbg(DBG_INFO, "unit -> %f %f %f  lat -> %f %f %f  up-> %f %f %f", 
    //a[0], a[1], a[2], b[0], b[1], b[2], c[0], c[1], c[2]);

    return f;//.Conjugate();
}
Ejemplo n.º 2
0
//
// LATER: remove printfs...
//
// main tracking function. Processes a given sequence of type YARPImageSequence.
int 
YARPFlowTracker::Apply (YARPOutputPortOf<YARPGenericImage>& port)
{
	printf ("ox %d oy %d\n", ox, oy);

	// PROCESSING SEQUENCE.
	int start = seq->GetStartPushing();
	int stop = seq->GetStopPushing();
	printf ("Processing from frame %d to %d\n", start, stop); 

	printf ("Sequence has %d frames\n", seq->GetNumElements());
	assert (stop-start+1 <= m_maxsize);	

	// PREPARE SEQUENCE FOR PROCESSING.
	processor.Reset ();

	oseq.Reset ();

	contact = 0;
	contact_frame = 0;
	howmanycontacts = 0;

	lack_of_vectors = 0;

	int i, frame;
	for (frame = start; frame <= stop; frame++)
	{
		oseq.IncrementCounter ();
		contact = processor.Apply (seq->GetImageRef (frame), 
								   oseq.GetImageRef (frame-start));

		oseq.GetImageRef (frame-start) = seq->GetImageRef (frame);

		if (contact)
		{
			processor.GetPokeDirection (dirx, diry);
			processor.GetSegmentedImage (segmentation_mask);
			segmentation_mask_copy = segmentation_mask;
		
			processor.GetFlipper(flipper_segmentation_mask);
			flipper_segmentation_mask_copy = flipper_segmentation_mask;
						
			ba.Apply(segmentation_mask_copy);
			orientation = ba.GetAngle();
			orientation_quality = ba.GetPointiness();

			// enlarge it twice.
			GrowMask (segmentation_mask, extended_segmentation);
			segmentation_mask = extended_segmentation;
			GrowMask (segmentation_mask, extended_segmentation);

			CenterOfMass (extended_segmentation, com_x, com_y);

			contact_frame = frame;
			howmanycontacts++;

			GenerateAndSend (port);
		}
	}

	if (howmanycontacts == 0)
	{
		printf ("No poking detected... \n");

		// nothing much to do.
		return -1;
	}

	// OPTICFLOW.	
	int startopticflow = contact_frame - 5; //10;
	if (startopticflow < start) startopticflow = start;

	int endopticflow = contact_frame + FRAMES_TRACKED;
	if (endopticflow > stop) endopticflow = stop;

	// TRACK.
	int endtracking = contact_frame+FRAMES_TRACKED;
	if (endtracking > stop) endtracking = stop;

	// optic flow mask, initialize optic flow and additional tracker.
	mask.Zero();

	YARPColorConverter::RGB2Grayscale (seq->GetImageRef(startopticflow), mono);
	of.Initialize (mono);

	bool trackvalid = false;
	int sx = com_x, sy = com_y;
	int sx2 = com_x, sy2 = com_y;
	tracker.Initialize (seq->GetImageRef(startopticflow), com_x, com_y);
	
	YarpPixelBGR green;
	green.r = green.b = 0;
	green.g = 0;

	i = 0;
	for (frame = startopticflow+1; frame <= endopticflow; frame++)
	{
		AddCircleOutline (oseq.GetImageRef(frame-start+1), green, sx, sy, 10);

		if (tracker.IsTracking())
		{
			tracker.Apply (seq->GetImageRef(frame), true, sx2, sy2, trackvalid);
			printf ("frame: %d, valid: %d, sx, sy: %d %d\n", frame, trackvalid, sx2, sy2);
		}
 
		YARPColorConverter::RGB2Grayscale (seq->GetImageRef(frame), mono);

		if (frame < contact_frame)
			of.Apply (mono, mask, sx2-sx, sy2-sy, outimage, vx[frame-start], vy[frame-start]);
		else
			of.Apply (mono, extended_segmentation, sx2-sx, sy2-sy, outimage, vx[frame-start], vy[frame-start]);

		sx = sx2;
		sy = sy2;
 
		of.DrawFlow (oseq.GetImageRef (frame-start));

		if (frame == contact_frame)
			WriteMask (extended_segmentation, oseq.GetImageRef (frame-start));

		if (frame >= contact_frame+1 && frame <= endtracking)
		{
			if (ComputeRotation (extended_segmentation, 
								 vx[frame-start], 
								 vy[frame-start], 
								 ox, 
								 oy, 
								 trsf[i], 
								 10) == -2)
				lack_of_vectors++;
			i++;

			WriteMask (extended_segmentation, oseq.GetImageRef (frame-start));
		}
	}

	CenterOfMass (extended_segmentation, final_x, final_y);
	printf ("starting point: %d %d\n", com_x, com_y);
	printf ("center of mass: %d %d\n", final_x, final_y);

	dispframe = contact_frame - start - 10;	// it was -5.
	m_stepping = 2;

	if (lack_of_vectors > 6)
	{
		printf ("optic flow is poor, skipping frames\n");

		oseq.Reset ();

		// adjust start frame parity.
		int newstart = start;
		if (((contact_frame % 2) == 0 && (newstart % 2) == 1) ||
			((contact_frame % 2) == 1 && (newstart % 2) == 0)
			)
			newstart ++;

		lack_of_vectors = 0;

		printf ("re-processing from frame %d to %d\n", start, stop); 
		
		// RECOMPUTING INDEX ETC.
		// 
		segmentation_mask = segmentation_mask_copy; 
		flipper_segmentation_mask = flipper_segmentation_mask_copy; 

		// enlarge it twice.
		GrowMask (segmentation_mask, extended_segmentation);
		segmentation_mask = extended_segmentation;
		GrowMask (segmentation_mask, extended_segmentation);

		// contact frame is ok.
		// poke dir is ok.
		// center of mass is ok.
				
		// RECOMPUTE OPTIC FLOW.
		int startopticflow = contact_frame - 10; //20;
		if (startopticflow < newstart) startopticflow = newstart;
		if (((contact_frame % 2) == 0 && (startopticflow % 2) == 1) ||
			((contact_frame % 2) == 1 && (startopticflow % 2) == 0)
			)
			startopticflow ++;

		int endopticflow = contact_frame + FRAMES_TRACKED*2;
		if (endopticflow > stop) endopticflow = stop;


		// TRACK.
		int endtracking = contact_frame+FRAMES_TRACKED*2;
		if (endtracking > stop) endtracking = stop;


		YARPColorConverter::RGB2Grayscale (seq->GetImageRef(startopticflow), mono);
		of.Initialize (mono);

		bool trackvalid = false;
		int sx = com_x, sy = com_y;
		int sx2 = com_x, sy2 = com_y;
		tracker.Initialize (seq->GetImageRef(startopticflow), com_x, com_y);
		
		YarpPixelBGR green;
		green.r = green.b = 0;
		green.g = 0;

		for (frame = start; frame <= stop; frame++)
		{
			oseq.IncrementCounter ();
			oseq.GetImageRef (frame-start) = seq->GetImageRef (frame);		
		}

		i = 0;
		for (frame = startopticflow+2; frame <= endopticflow; frame+=2)
		{
			AddCircleOutline (oseq.GetImageRef(frame-start+2), green, sx, sy, 10);

			if (tracker.IsTracking())
			{
				tracker.Apply (seq->GetImageRef(frame), true, sx2, sy2, trackvalid);
				printf ("frame: %d, valid: %d, sx, sy: %d %d\n", frame, trackvalid, sx2, sy2);
			}

			YARPColorConverter::RGB2Grayscale (seq->GetImageRef(frame), mono);
			if (frame < contact_frame)
				of.Apply (mono, mask, sx2-sx, sy2-sy, outimage, vx[frame-start], vy[frame-start]);
			else
				of.Apply (mono, extended_segmentation, sx2-sx, sy2-sy, outimage, vx[frame-start], vy[frame-start]);

			sx = sx2;
			sy = sy2;

			of.DrawFlow (oseq.GetImageRef (frame-start));

			if (frame == contact_frame)
				WriteMask (extended_segmentation, oseq.GetImageRef (frame-start));

			if (frame >= contact_frame+2 && frame <= endtracking)
			{
				if (ComputeRotation (extended_segmentation, 
									 vx[frame-start], 
									 vy[frame-start], 
									 ox, 
									 oy, 
									 trsf[i], 
									 10) == -2)
					lack_of_vectors++;
				i++;

				WriteMask (extended_segmentation, oseq.GetImageRef (frame-start));
			}
		}

		CenterOfMass (extended_segmentation, final_x, final_y);
		printf ("starting point: %d %d\n", com_x, com_y);
		printf ("center of mass: %d %d\n", final_x, final_y);

		printf ("improved? %d\n", lack_of_vectors);

		//
		//
		//
		m_stepping = 4;
		dispframe = contact_frame - start - 10;
		if (dispframe < 0) dispframe = 0;

		if (lack_of_vectors > 6)
		{
			// bad sequence.
			printf ("Still bad flow after post-processing\n");
			return -2;
		}
	}

	return 0;
}