/**
 * @author     JIA Pei
 * @version    2010-06-07
 * @brief      Constrain all points respetively
 * @param      ioShape     	Input and Output - the input and output shape
*/
void VO_Point2DDistributionModel::VO_ConstrainAllPoints(VO_Shape& ioShape)
{
    unsigned int NbOfPoints = ioShape.GetNbOfPoints();
    Point2f pt;

    for(unsigned int i = 0; i < NbOfPoints; i++)
    {
        pt = ioShape.GetA2DPoint(i);
        VO_Point2DDistributionModel::VO_ConstrainSinglePoint( pt, this->m_VONormalizedEllipses[i] );
        ioShape.SetA2DPoint(pt, i);
    }
}
Example #2
0
/**
 * @author      Yao Wei
 * @brief       CMU Inverse Compositional !!
 * @param       - matDeltaP     Input -- deltap
 * @param       - matDeltaQ     Input -- deltaq
 * @param       - s             Input -- the shape
 * @param       - estShape      Output -- newly estimated shape by Inverse compositional
 */
void VO_FittingAAMInverseIA::VO_CMUInverseCompositional(const Mat_<float>& matDeltaP,
                                                        const Mat_<float>& matDeltaQ,
                                                        const VO_Shape& s,
                                                        VO_Shape& estShape)
{
    VO_Shape S0;
    this->VO_PParamQParam2ModelAlignedShape( matDeltaP, matDeltaQ, S0);
//    cvConvertScale(dpq, __inv_pq, -1);
//    __shape.CalcShape(__inv_pq, __update_s0);    // __update_s0 = N.W(s0, -delta_p, -delta_q)

    //Secondly: Composing the Incremental Warp with the Current Warp Estimate.
    Point2f res, tmp;
    int count = 0;
    vector<unsigned int> vertexIdxes;

    for(unsigned int i = 0; i < this->m_VOAAMInverseIA->m_iNbOfPoints; i++)
    {
        res.x = 0.0;    res.y = 0.0;
        count = 0;
        //The only problem with this approach is which triangle do we use?
        //In general there will be several triangles that share the i-th vertex.
        for(unsigned j = 0; j < this->m_VOAAMInverseIA->m_iNbOfTriangles; j++)    // see Figure (11)
        {
            if ( this->m_vTriangle2D[j].HasNode(i) )
            {
                vertexIdxes = this->m_vTriangle2D[j].GetVertexIndexes();

                VO_WarpingPoint::WarpOnePoint(  S0.GetA2DPoint(i),
                                                this->m_vTriangle2D[j], 
                                                tmp,
                                                s.GetA2DPoint(vertexIdxes[0]),
                                                s.GetA2DPoint(vertexIdxes[1]),
                                                s.GetA2DPoint(vertexIdxes[2]) );
                res.x += tmp.x;
                res.y += tmp.y;
                count++;
            }
        }
        // average the result so as to smooth the warp at each vertex
        if(count == 0)
            cerr << "There must be something wrong when CMU Inverse Compositional !" << endl;
        res.x /= count;
        res.y /= count;
        estShape.SetA2DPoint(res, i);
    }
}
// Estimate face absolute orientations
vector<float> CRecognitionAlgs::CalcAbsoluteOrientations(
    const VO_Shape& iShape2D,
    const VO_Shape& iShape3D,
    VO_Shape& oShape2D)
{
    assert (iShape2D.GetNbOfPoints() == iShape3D.GetNbOfPoints() );
    unsigned int NbOfPoints = iShape3D.GetNbOfPoints();
    Point3f pt3d;
    Point2f pt2d;
    float height1 = iShape2D.GetHeight();
    float height2 = iShape3D.GetHeight();
    VO_Shape tempShape2D = iShape2D;
    tempShape2D.Scale(height2/height1);

    //Create the model points
    std::vector<CvPoint3D32f> modelPoints;
    for(unsigned int i = 0; i < NbOfPoints; ++i)
    {
        pt3d = iShape3D.GetA3DPoint(i);
        modelPoints.push_back(cvPoint3D32f(pt3d.x, pt3d.y, pt3d.z));
    }

    //Create the image points
    std::vector<CvPoint2D32f> srcImagePoints;
    for(unsigned int i = 0; i < NbOfPoints; ++i)
    {
        pt2d = tempShape2D.GetA2DPoint(i);
        srcImagePoints.push_back(cvPoint2D32f(pt2d.x, pt2d.y));
    }

    //Create the POSIT object with the model points
    CvPOSITObject *positObject = cvCreatePOSITObject( &modelPoints[0], NbOfPoints );

    //Estimate the pose
    CvMatr32f rotation_matrix = new float[9];
    CvVect32f translation_vector = new float[3];
    CvTermCriteria criteria = cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 100, 1.0e-4f);
    cvPOSIT( positObject, &srcImagePoints[0], FOCAL_LENGTH, criteria, rotation_matrix, translation_vector );

    //rotation_matrix to Euler angles, refer to VO_Shape::GetRotation
    float sin_beta  = -rotation_matrix[0 * 3 + 2];
    float tan_alpha = rotation_matrix[1 * 3 + 2] / rotation_matrix[2 * 3 + 2];
    float tan_gamma = rotation_matrix[0 * 3 + 1] / rotation_matrix[0 * 3 + 0];

    //Project the model points with the estimated pose
    oShape2D = tempShape2D;
    for ( unsigned int i=0; i < NbOfPoints; ++i )
    {
        pt3d.x = rotation_matrix[0] * modelPoints[i].x +
            rotation_matrix[1] * modelPoints[i].y +
            rotation_matrix[2] * modelPoints[i].z +
            translation_vector[0];
        pt3d.y = rotation_matrix[3] * modelPoints[i].x +
            rotation_matrix[4] * modelPoints[i].y +
            rotation_matrix[5] * modelPoints[i].z +
            translation_vector[1];
        pt3d.z = rotation_matrix[6] * modelPoints[i].x +
            rotation_matrix[7] * modelPoints[i].y +
            rotation_matrix[8] * modelPoints[i].z +
            translation_vector[2];
        if ( pt3d.z != 0 )
        {
            pt2d.x = FOCAL_LENGTH * pt3d.x / pt3d.z;
            pt2d.y = FOCAL_LENGTH * pt3d.y / pt3d.z;
        }
        oShape2D.SetA2DPoint(pt2d, i);
    }

    //return Euler angles
    vector<float> pos(3);
    pos[0] = atan(tan_alpha);    // yaw
    pos[1] = asin(sin_beta);     // pitch
    pos[2] = atan(tan_gamma);    // roll
    return pos;
}
/**
 * @author      YAO Wei, JIA Pei
 * @version     2010-05-20
 * @brief       Find the best offset for one point
 * @param       asmmodel        Input - the ASM model
 * @param       iImg            Input - image to be fitted
 * @param       ioShape         Input and output - the input and output shape
 * @param       iShapeInfo      Input - the shape information
 * @param       iMean           Input - mean profile
 * @param       iCovInverse     Input - covariance inverse
 * @param       Lev             Input - current pyramid level
 * @param       offSetTolerance Input - offset tolerance, which is used to determine whether this point is convergede or not
 * @param       profdim         Input - specify the dimension that is going to be used when updating shape.
 *                              Sometimes, the trained data is of 4D profiles, but the user may only use 1D to test.
 * @note        Refer to "AAM Revisited, page 34, figure 13", particularly, those steps.
*/
int VO_FittingASMNDProfiles::UpdateShape(   const VO_ASMNDProfiles* asmmodel,
                                            const cv::Mat& iImg,
                                            VO_Shape& ioShape,
                                            const std::vector<VO_Shape2DInfo>& iShapeInfo,
                                            const std::vector< VO_Profile >& iMean,
                                            const std::vector< std::vector< cv::Mat_<float> > >& iCovInverse,
                                            unsigned int offSetTolerance,
                                            unsigned int profdim)
{
    int nGoodLandmarks = 0;
    std::vector<int> nBestOffset(profdim, 0);
    unsigned int NbOfPoints     = ioShape.GetNbOfPoints();
    unsigned int NbOfShapeDim   = ioShape.GetNbOfDim();
    unsigned int ProfileLength    = iMean[0].GetProfileLength();
    //std::vector<float> dists(NbOfPoints, 0.0f);
    cv::Point2f pt;

    // Take care of the 1st direction first.
    for (unsigned int i = 0; i < NbOfPoints; i++)
    {
        /////////////////////////////////////////////////////////////////////////////
        ///Calculate profile norm direction//////////////////////////////////////////
        /** Here, this is not compatible with 3D */
        cv::Point2f PrevPoint = ioShape.GetA2DPoint ( iShapeInfo[i].GetFrom() );
        cv::Point2f ThisPoint = ioShape.GetA2DPoint ( i );
        cv::Point2f NextPoint = ioShape.GetA2DPoint ( iShapeInfo[i].GetTo() );

        float deltaX, deltaY;
        float normX, normY;
        float sqrtsum;
        float bestXOffset, bestYOffset;

        // left side (connected from side)
        deltaX = ThisPoint.x - PrevPoint.x;
        deltaY = ThisPoint.y - PrevPoint.y;
        sqrtsum = sqrt ( deltaX*deltaX + deltaY*deltaY );
        if ( sqrtsum < FLT_EPSILON ) sqrtsum = 1.0f;
        deltaX /= sqrtsum; deltaY /= sqrtsum;         // Normalize
        // Firstly, normX normY record left side norm.
        normX = -deltaY;
        normY = deltaX;

        // right side (connected to side)
        deltaX = NextPoint.x - ThisPoint.x;
        deltaY = NextPoint.y - ThisPoint.y;
        sqrtsum = sqrt ( deltaX*deltaX + deltaY*deltaY );
        if ( sqrtsum < FLT_EPSILON ) sqrtsum = 1.0f;
        deltaX /= sqrtsum; deltaY /= sqrtsum;         // Normalize
        // Secondly, normX normY will average both left side and right side norm.
        normX += -deltaY;
        normY += deltaX;

        // Average left right side
        sqrtsum = sqrt ( normX*normX + normY*normY );
        if ( sqrtsum < FLT_EPSILON ) sqrtsum = 1.0f;
        normX /= sqrtsum;
        normY /= sqrtsum;                             // Final Normalize
        /////////////////////////////////////////////////////////////////////////////

        nBestOffset[0] = VO_FittingASMNDProfiles::VO_FindBestMatchingProfile1D( iImg,
                                                                                ThisPoint,
                                                                                iMean[i].Get1DimProfile(0),
                                                                                iCovInverse[i][0],
                                                                                ProfileLength,
                                                                                offSetTolerance,
                                                                                normX,
                                                                                normY);

        // set OutShape(iPoint) to best offset from current position
        // one dimensional profile: must move point along the whisker
        bestXOffset = nBestOffset[0] * normX;
        bestYOffset = nBestOffset[0] * normY;
        pt.x = ThisPoint.x + bestXOffset;
        pt.y = ThisPoint.y + bestYOffset;
        ioShape.SetA2DPoint(pt, i);
        //dists[i] = sqrt( pow( (double)bestXOffset, 2.0) + pow( (double)bestYOffset, 2.0) );

        //if (abs(nBestOffset[0]) <= offSetTolerance/2)
        if(profdim == 1)
        {
            if (abs(nBestOffset[0]) <= 1)
                nGoodLandmarks++;
        }
    }
    
    // Originality from JIA Pei!! Now, take care of the 2nd direction now.
    if(profdim == 2)
    {
        for (unsigned int i = 0; i < NbOfPoints; i++)
        {
            /////////////////////////////////////////////////////////////////////////////
            ///Calculate profile norm direction//////////////////////////////////////////
            /** Here, this is not compatible with 3D */
            cv::Point2f PrevPoint = ioShape.GetA2DPoint ( iShapeInfo[i].GetFrom() );
            cv::Point2f ThisPoint = ioShape.GetA2DPoint ( i );
            cv::Point2f NextPoint = ioShape.GetA2DPoint ( iShapeInfo[i].GetTo() );

            float deltaX, deltaY;
            float normX, normY;
            float tangentX, tangentY;
            float sqrtsum;
            float bestXOffset, bestYOffset;

            // left side (connected from side)
            deltaX = ThisPoint.x - PrevPoint.x;
            deltaY = ThisPoint.y - PrevPoint.y;
            sqrtsum = sqrt ( deltaX*deltaX + deltaY*deltaY );
            if ( sqrtsum < FLT_EPSILON ) sqrtsum = 1.0f;
            deltaX /= sqrtsum; deltaY /= sqrtsum;         // Normalize
            // Firstly, normX normY record left side norm.
            normX = -deltaY;
            normY = deltaX;

            // right side (connected to side)
            deltaX = NextPoint.x - ThisPoint.x;
            deltaY = NextPoint.y - ThisPoint.y;
            sqrtsum = sqrt ( deltaX*deltaX + deltaY*deltaY );
            if ( sqrtsum < FLT_EPSILON ) sqrtsum = 1.0f;
            deltaX /= sqrtsum; deltaY /= sqrtsum;         // Normalize
            // Secondly, normX normY will average both left side and right side norm.
            normX += -deltaY;
            normY += deltaX;

            // Average left right side
            sqrtsum = sqrt ( normX*normX + normY*normY );
            if ( sqrtsum < FLT_EPSILON ) sqrtsum = 1.0f;
            normX /= sqrtsum;
            normY /= sqrtsum;                             // Final Normalize
            tangentX     =     -normY;
            tangentY    =    normX;                        // Final tangent
            /////////////////////////////////////////////////////////////////////////////

            nBestOffset[1] = VO_FittingASMNDProfiles::VO_FindBestMatchingProfile1D( iImg,
                                                                                    ThisPoint,
                                                                                    iMean[i].Get1DimProfile(1),
                                                                                    iCovInverse[i][1],
                                                                                    ProfileLength,
                                                                                    1,    // in tangent direction, offset = 1
                                                                                    tangentX,
                                                                                    tangentY);

            // set OutShape(iPoint) to best offset from current position
            // one dimensional profile: must move point along the whisker
            bestXOffset = nBestOffset[1] * tangentX;
            bestYOffset = nBestOffset[1] * tangentY;
            pt.x = ThisPoint.x + bestXOffset;
            pt.y = ThisPoint.y + bestYOffset;
            ioShape.SetA2DPoint(pt, i);
            //dists[i] += sqrt( pow((double)bestXOffset, 2.0) + pow((double)bestYOffset, 2.0) );

            //if (abs(nBestOffset) <= offSetTolerance/2)
            if (abs(nBestOffset[0]) <= 1 && abs(nBestOffset[1]) <= 1)
                nGoodLandmarks++;
        }
    }

    return nGoodLandmarks;
}