bool ProjectProxyModel::lessThan(const QModelIndex & left, const QModelIndex & right) const
{
    KDevelop::ProjectBaseItem *iLeft=projectModel()->item(left), *iRight=projectModel()->item(right);
    if(!iLeft || !iRight) return false;

    return( iLeft->lessThan( iRight ) );
}
KDevelop::ProjectBaseItem* ProjectProxyModel::itemFromProxyIndex( const QModelIndex& idx ) const
{
    return static_cast<KDevelop::ProjectBaseItem*>( projectModel()->itemFromIndex( mapToSource( idx ) ) );
}
QModelIndex ProjectProxyModel::proxyIndexFromItem(QStandardItem* item) const
{
    return mapFromSource(projectModel()->indexFromItem(item));
}
void 
activeModelTracker::deformSilhouette(vpHomogeneousMatrix& cMo_)
{
	// step 1: based on the pose and cad model, forward project the silhouette of the model onto the image plane.
	// step 1.1: find visible lines
	findVisibleLines(cMo_);	
	
	// step 1.2: generate the control points, based on the distance between corners
	controlPoints.clear();
	projectModel(cMo_, cam);

	for (int i = 0; i < 12; i++)
	{
		if (isVisible[i])
		{
			int p1, p2;
			line2Pts(i, p1, p2);
			vpPoint c1, c2;
			c1.set_x(prjCorners[p1].x);
			c1.set_y(prjCorners[p1].y);
			c1.setWorldCoordinates(corners[p1].x,corners[p1].y,corners[p1].z);
			c2.set_x(prjCorners[p2].x);
			c2.set_y(prjCorners[p2].y);
			c2.setWorldCoordinates(corners[p2].x,corners[p2].y,corners[p2].z);

			// save the corners first
			controlPoints[i].push_back(c1);
			controlPoints[i].push_back(c2);

			//then generate the control points
			genControlPoints(controlPoints[i]);
		}
	}
	
	// step 2: actively find the high gradient region
	// step 2.1: for each line, get its slope
	// step 2.2: for each control point on the line, find the position with the highest gradient magnitude
	//
	for (int i = 0; i < 12; i++)
	{
		if (isVisible[i])
		{
			cv::Point p1(controlPoints[i][0].get_x(), controlPoints[i][0].get_y());
			cv::Point p2(controlPoints[i][1].get_x(), controlPoints[i][1].get_y());
			bool isHorizontal;
			double step = lineSlope(p1, p2, isHorizontal);

			int detectionRange = 5;
			deformLine(step, isHorizontal, controlPoints[i], gradient, rows, cols, detectionRange);
		}
	}

	// step 3: based on the tentatively moved control points, estimate the pose
	//
	vpPoseFeatures pose;
	for (int i = 0; i < 12; i++)
	{
		if (isVisible[i])
		{
			for (size_t j = 0; j < controlPoints[i].size(); j++)
			{
				// debug only
				// tracked features
				// TODO: this will plot all the deformation step of the tracker
				cv::circle(processedImg, cv::Point(controlPoints[i][j].get_x(), controlPoints[i][j].get_y()), 3, cv::Scalar(0, 0, 255));

				double x, y;
				vpPixelMeterConversion::convertPoint(cam, controlPoints[i][j].get_x(), controlPoints[i][j].get_y(), x, y);
				controlPoints[i][j].set_x(x);
				controlPoints[i][j].set_y(y);
				pose.addFeaturePoint(controlPoints[i][j]);
			}
		}
	}	
	pose.setLambda(0.6);
	try
	{
		pose.computePose(cMo_);
	}
	catch(...) // catch all kinds of Exceptions
	{
		std::cout<<"Exception raised in computing pose"<<std::endl;
	}
}