Пример #1
0
VO_FaceCompPos VO_ShapeFace::CalcFaceCompRects(const VO_FaceParts& faceparts)
{
    Rect face, lefteye, righteye, nose, mouth;
    VO_Shape leftEyeShape, rightEyeShape, noseShape, nostrilShape, nosetipShape, wholenoseShape, mouthShape;

    // face
    face                = this->GetShapeBoundRect();

    // left eye
    leftEyeShape        = this->GetSubShape(faceparts.VO_GetOneFacePart(VO_FacePart::LEFTEYE).GetIndexes());
    lefteye             = leftEyeShape.GetShapeBoundRect();

    // right eye
    rightEyeShape       = this->GetSubShape(faceparts.VO_GetOneFacePart(VO_FacePart::RIGHTEYE).GetIndexes());
    righteye            = rightEyeShape.GetShapeBoundRect();

    // nose
    noseShape           = this->GetSubShape(faceparts.VO_GetOneFacePart(VO_FacePart::NOSE).GetIndexes());
    nostrilShape        = this->GetSubShape(faceparts.VO_GetOneFacePart(VO_FacePart::NOSTRIL).GetIndexes());
    nosetipShape        = this->GetSubShape(faceparts.VO_GetOneFacePart(VO_FacePart::NOSETIP).GetIndexes());
    wholenoseShape      = VO_Shape::Combine2Shapes(noseShape, nostrilShape);
    wholenoseShape      = VO_Shape::Combine2Shapes(wholenoseShape, nosetipShape);
    nose                = wholenoseShape.GetShapeBoundRect();

    // mouth
    mouthShape          = this->GetSubShape(faceparts.VO_GetOneFacePart(VO_FacePart::LIPOUTERLINE).GetIndexes());
    mouth               = mouthShape.GetShapeBoundRect();
    
    m_VOFaceCompPos     = VO_FaceCompPos("", &face, &lefteye, &righteye, &nose, &mouth);
    
    return  m_VOFaceCompPos;
}
Пример #2
0
float CRecognitionAlgs::CalcFaceYaw(const vector<float>& iLine,
                                    const VO_Shape& iShape,
                                    const VO_FaceParts& iFaceParts)
{
    float yaw = 0.0f;
    int dim = iShape.GetNbOfDim();

    // Theoretically, using eye corner is correct, but it's not stable at all. Therefore, here we use COG_left and COG_right instead.
    ///////////////////////////////////////////////////////////////////////////////
    //     float leftDist = 0.0f, rightDist = 0.0f;    
    //     vector<unsigned int> eyeCornerPoints = iFaceParts.GetEyeCornerPoints().GetIndexes();
    //     Point2f leftmostEyeCorner = Point2f(FLT_MAX, 0.0f);
    //     Point2f rightmostEyeCorner = Point2f(0.0f, 0.0f);
    // 
    //     for(unsigned int i = 0; i < eyeCornerPoints.size(); ++i)
    //     {
    //         if(leftmostEyeCorner.x > iShape.GetAShape(dim*eyeCornerPoints[i]) )
    //         {
    //             leftmostEyeCorner.x = iShape.GetAShape(dim*eyeCornerPoints[i]);
    //             leftmostEyeCorner.y = iShape.GetAShape(dim*eyeCornerPoints[i]+1);
    //         }
    //         if(rightmostEyeCorner.x < iShape.GetAShape(dim*eyeCornerPoints[i]) )
    //         {
    //             rightmostEyeCorner.x = iShape.GetAShape(dim*eyeCornerPoints[i]);
    //             rightmostEyeCorner.y = iShape.GetAShape(dim*eyeCornerPoints[i]+1);
    //         }
    //     }
    //     leftDist = cvDistFromAPoint2ALine2D(leftmostEyeCorner,  iLine);
    //     rightDist = cvDistFromAPoint2ALine2D(rightmostEyeCorner,  iLine);
    //     float r = leftDist/rightDist;
    // Refer to my PhD dissertation. Chapter 4
    //     yaw = atan ( ( 0.65*(r-1) ) / ( 0.24 * (r+1) ) ) * 180.0f / CV_PI;
    ///////////////////////////////////////////////////////////////////////////////

    float leftDist = 0.0f, rightDist = 0.0f;
    vector<unsigned int> leftSidePoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LEFTSIDEPOINTS).GetIndexes();
    vector<unsigned int> rightSidePoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::RIGHTSIDEPOINTS).GetIndexes();
    for(unsigned int i = 0; i < leftSidePoints.size(); ++i)
    {
        leftDist += cvDistFromAPoint2ALine2D(Point2f(iShape.GetAShape(dim*leftSidePoints[i]), iShape.GetAShape(dim*leftSidePoints[i]+1)),  iLine);
    }
    for(unsigned int i = 0; i < rightSidePoints.size(); ++i)
    {
        rightDist += cvDistFromAPoint2ALine2D(Point2f(iShape.GetAShape(dim*rightSidePoints[i]), iShape.GetAShape(dim*rightSidePoints[i]+1)),  iLine);
    }

    float r = leftDist/rightDist;
    // Refer to my PhD dissertation. Chapter 4
    // yaw = atan ( ( 0.65*(r-1) ) / ( 0.24 * (r+1) ) ) * 180.0f / CV_PI;
    yaw = atan ( ( (r-1) ) / ((r+1) ) ) * safeDoubleToFloat(180.0 / CV_PI);

    return yaw;
}
Пример #3
0
/**
* @brief    Calculate some key lines on the face
* @param    oLine       Output  output those lines
* @param    iShape      Input   the known shape
* @param    iFaceParts  Input   the faceparts
* @param    oSubshape   Output  the output subshape, namely, the line is represented by a VO_Shape
* @param    partIdx     Input   which part is it
* @return   void
 */
void VO_KeyPoint::CalcFaceKeyline(
    std::vector<float>& oLine,
    const VO_Shape& iShape,
    const VO_FaceParts& iFaceParts,
    VO_Shape& oSubshape,
    unsigned int partIdx)
{
    oLine.resize(3);
    int dim = iShape.GetNbOfDim();

    cv::Vec4f line;
    std::vector<unsigned int> linePoints;

    switch(partIdx)
    {
    case VO_FacePart::NOSTRIL:
        linePoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::NOSTRIL).GetIndexes();
        break;
    case VO_FacePart::MOUTHCORNERPOINTS:
        linePoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::MOUTHCORNERPOINTS).GetIndexes();
        break;
    case VO_FacePart::PITCHAXISLINEPOINTS:
        linePoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::PITCHAXISLINEPOINTS).GetIndexes();
        break;
    case VO_FacePart::EYECORNERPOINTS:
        linePoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::EYECORNERPOINTS).GetIndexes();
        break;
    case VO_FacePart::MIDLINEPOINTS:
    default:
        linePoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::EYECORNERPOINTS).GetIndexes();
        break;
    }
    
    oSubshape = iShape.GetSubShape(linePoints);

    // Explained by JIA Pei, some times, there is no linePoints, which means the specified parts are not in one of the database
    if(linePoints.size() >= 2 )
    {
        cv::fitLine( oSubshape.GetTheShape(), line, CV_DIST_L2, 0, 0.001, 0.001 );

        // Ax+By+C = 0
        oLine[0] = -line[1];
        oLine[1] = line[0];
        oLine[2] = line[1]*line[2]-line[0]*line[3];
    }
}
Пример #4
0
// Refer to my PhD thesis, chapter 4
float CRecognitionAlgs::CalcFacePitch(  const VO_Shape& iShape,
                                        const VO_FaceParts& iFaceParts)
{
    float pitch = 0.0f;
    int dim = iShape.GetNbOfDim();
    float NNQ, ENQ, EQ, NO;

    // Theoretically, using eye corner is correct, but it's not quite stable at all. It's better we use two nostrils first if nostirl is defined in faceparts
    ///////////////////////////////////////////////////////////////////////////////
    //     unsigned int nosetipBottom = 0;
    //     vector<unsigned int> nosePoints             = iFaceParts.GetNose().GetIndexes();
    //     vector<unsigned int> midlinePoints         = iFaceParts.GetMidlinePoints().GetIndexes();
    //     vector<unsigned int> pitchAxisPoints    = iFaceParts.GetPitchAxisLinePoints().GetIndexes();
    //     VO_Shape nose, midLine, pitchAxis;
    //     nose.SetDim(dim);
    //     midLine.SetDim(dim);
    //     pitchAxis.SetDim(dim);
    //     nose.SetSize( nosePoints.size()*dim );
    //     midLine.SetSize( midlinePoints.size()*dim );
    //     pitchAxis.SetSize(pitchAxisPoints.size()*dim );
    // 
    //     for(unsigned int i = 0; i < nosePoints.size(); ++i)
    //     {
    //         for(unsigned int j = 0; j < midlinePoints.size(); ++j)
    //         {
    //             if(nosePoints[i] == midlinePoints[j])
    //             {
    //                 nosetipBottom = nosePoints[i];
    //                 break;
    //             }
    //         }
    //     }
    // 
    //     Point2f ntPoint  = Point2f(iShape.GetAShape(dim*nosetipBottom), iShape.GetAShape(dim*nosetipBottom+1));
    //     Point2f paPoint1 = Point2f(iShape.GetAShape(dim*pitchAxisPoints[0]), iShape.GetAShape(dim*pitchAxisPoints[0]+1));
    //     Point2f paPoint2 = Point2f(iShape.GetAShape(dim*pitchAxisPoints[1]), iShape.GetAShape(dim*pitchAxisPoints[1]+1));
    // 
    //     float NNQ = ( (ntPoint.y - paPoint1.y) + (ntPoint.y - paPoint2.y) ) / 2.0f;
    //     float ENQ = fabs(ntPoint.x - paPoint1.x) > fabs(paPoint2.x - ntPoint.x) ? fabs(ntPoint.x - paPoint1.x) : fabs(paPoint2.x - ntPoint.x);
    //     float EQ = sqrt(ENQ*ENQ + NNQ*NNQ);
    //     float NO = sqrt(2.0f)/2.0f*EQ;
    ///////////////////////////////////////////////////////////////////////////////

    vector<unsigned int> nostrilPoints          = iFaceParts.VO_GetOneFacePart(VO_FacePart::NOSTRIL).GetIndexes();
    if(nostrilPoints.size() != 0)
    {
        vector<unsigned int> pitchAxisPoints    = iFaceParts.VO_GetOneFacePart(VO_FacePart::PITCHAXISLINEPOINTS).GetIndexes();

        Point2f ntPoint1 = Point2f(iShape.GetAShape(dim*nostrilPoints[0]), iShape.GetAShape(dim*nostrilPoints[0]+1));
        Point2f ntPoint2 = Point2f(iShape.GetAShape(dim*nostrilPoints[1]), iShape.GetAShape(dim*nostrilPoints[1]+1));
        Point2f paPoint1 = Point2f(iShape.GetAShape(dim*pitchAxisPoints[0]), iShape.GetAShape(dim*pitchAxisPoints[0]+1));
        Point2f paPoint2 = Point2f(iShape.GetAShape(dim*pitchAxisPoints[1]), iShape.GetAShape(dim*pitchAxisPoints[1]+1));

        NNQ = ( (ntPoint1.y - paPoint1.y) + (ntPoint2.y - paPoint2.y) ) / 2.0f;
        ENQ = fabs(ntPoint1.x - paPoint1.x) > fabs(paPoint2.x - ntPoint2.x) ? fabs(ntPoint1.x - paPoint1.x + (ntPoint2.x - ntPoint1.x) / 2.0f) : fabs(paPoint2.x - ntPoint2.x + (ntPoint2.x - ntPoint1.x) / 2.0f);
        EQ = sqrt(ENQ*ENQ + NNQ*NNQ);
        NO = sqrt(2.0f)/2.0f*EQ;
    }
    else
    {
        unsigned int nosetipBottom = 0;
        vector<unsigned int> nosePoints         = iFaceParts.VO_GetOneFacePart(VO_FacePart::NOSE).GetIndexes();
        vector<unsigned int> midlinePoints      = iFaceParts.VO_GetOneFacePart(VO_FacePart::MIDLINEPOINTS).GetIndexes();
        vector<unsigned int> pitchAxisPoints    = iFaceParts.VO_GetOneFacePart(VO_FacePart::PITCHAXISLINEPOINTS).GetIndexes();

        for(unsigned int i = 0; i < nosePoints.size(); ++i)
        {
            for(unsigned int j = 0; j < midlinePoints.size(); ++j)
            {
                if(nosePoints[i] == midlinePoints[j])
                {
                    nosetipBottom = nosePoints[i];
                    break;
                }
            }
        }

        Point2f ntPoint  = Point2f(iShape.GetAShape(dim*nosetipBottom), iShape.GetAShape(dim*nosetipBottom+1));
        Point2f paPoint1 = Point2f(iShape.GetAShape(dim*pitchAxisPoints[0]), iShape.GetAShape(dim*pitchAxisPoints[0]+1));
        Point2f paPoint2 = Point2f(iShape.GetAShape(dim*pitchAxisPoints[1]), iShape.GetAShape(dim*pitchAxisPoints[1]+1));

        NNQ = ( (ntPoint.y - paPoint1.y) + (ntPoint.y - paPoint2.y) ) / 2.0f;
        ENQ = fabs(ntPoint.x - paPoint1.x) > fabs(paPoint2.x - ntPoint.x) ? fabs(ntPoint.x - paPoint1.x) : fabs(paPoint2.x - ntPoint.x);
        EQ = sqrt(ENQ*ENQ + NNQ*NNQ);
        NO = sqrt(2.0f)/2.0f*EQ;
    }

    if( fabs(NNQ/NO) < 1.0f)
        pitch = asin ( NNQ / NO ) * safeDoubleToFloat(180.0 / CV_PI);
    else if (NNQ * NO < 0.0f)
        pitch = -90.0f;
    else
        pitch = 90.0f;

    return pitch;
}
Пример #5
0
/**
* @brief    whether the tracked shape is really a face?
*           If we can detect both eyes and mouth
*           according to some prior knowledge due to its shape,
*           we may regard this shape correctly describe a face.
* @param    iImg        - input     input image
* @param    iShape      - input     the current tracked shape
* @param    iShapeInfo  - input     shape info
* @param    iFaceParts  - input     face parts
* @return   bool    whether the tracked shape is acceptable?
*/
bool CRecognitionAlgs::EvaluateFaceTrackedByCascadeDetection(
    const CFaceDetectionAlgs* fd,
    const Mat& iImg,
    const VO_Shape& iShape,
    const vector<VO_Shape2DInfo>& iShapeInfo, 
    const VO_FaceParts& iFaceParts)
{
    double t = (double)cvGetTickCount();

    unsigned int ImgWidth       = iImg.cols;
    unsigned int ImgHeight      = iImg.rows;

    vector<unsigned int> leftEyePoints      = iFaceParts.VO_GetOneFacePart(VO_FacePart::LEFTEYE).GetIndexes();
    vector<unsigned int> rightEyePoints     = iFaceParts.VO_GetOneFacePart(VO_FacePart::RIGHTEYE).GetIndexes();
    vector<unsigned int> lipOuterLinerPoints= iFaceParts.VO_GetOneFacePart(VO_FacePart::LIPOUTERLINE).GetIndexes();

    VO_Shape leftEyeShape       = iShape.GetSubShape(leftEyePoints);
    VO_Shape rightEyeShape      = iShape.GetSubShape(rightEyePoints);
    VO_Shape lipOuterLinerShape = iShape.GetSubShape(lipOuterLinerPoints);

    float dolEye = 12.0f;
    float dolMouth = 12.0f;

    unsigned int possibleLeftEyeMinX    = 0.0f > (leftEyeShape.MinX() - dolEye) ? 0: (int)(leftEyeShape.MinX() - dolEye);
    unsigned int possibleLeftEyeMinY    = 0.0f > (leftEyeShape.MinY() - dolEye) ? 0: (int)(leftEyeShape.MinY() - dolEye);
    unsigned int possibleLeftEyeMaxX    = (leftEyeShape.MaxX() + dolEye) > ImgWidth ? ImgWidth : (int)(leftEyeShape.MaxX() + dolEye);
    unsigned int possibleLeftEyeMaxY    = (leftEyeShape.MaxY() + dolEye) > ImgHeight ? ImgHeight : (int)(leftEyeShape.MaxY() + dolEye);
    unsigned int possibleLeftEyeWidth   = possibleLeftEyeMaxX - possibleLeftEyeMinX;
    unsigned int possibleLeftEyeHeight  = possibleLeftEyeMaxY - possibleLeftEyeMinY;
    unsigned int possibleRightEyeMinX   = 0.0f > (rightEyeShape.MinX() - dolEye) ? 0: (int)(rightEyeShape.MinX() - dolEye);
    unsigned int possibleRightEyeMinY   = 0.0f > (rightEyeShape.MinY() - dolEye) ? 0: (int)(rightEyeShape.MinY() - dolEye);
    unsigned int possibleRightEyeMaxX   = (rightEyeShape.MaxX() + dolEye) > ImgWidth ? ImgWidth : (int)(rightEyeShape.MaxX() + dolEye);
    unsigned int possibleRightEyeMaxY   = (rightEyeShape.MaxY() + dolEye) > ImgHeight ? ImgHeight : (int)(rightEyeShape.MaxY() + dolEye);
    unsigned int possibleRightEyeWidth  = possibleRightEyeMaxX - possibleRightEyeMinX;
    unsigned int possibleRightEyeHeight = possibleRightEyeMaxY - possibleRightEyeMinY;
    unsigned int possibleMouthMinX      = 0.0f > (lipOuterLinerShape.MinX() - dolMouth) ? 0: (int)(lipOuterLinerShape.MinX() - dolMouth);
    unsigned int possibleMouthMinY      = 0.0f > (lipOuterLinerShape.MinY() - dolMouth) ? 0: (int)(lipOuterLinerShape.MinY() - dolMouth);
    unsigned int possibleMouthMaxX      = (lipOuterLinerShape.MaxX() + dolMouth) > ImgWidth ? ImgWidth : (int)(lipOuterLinerShape.MaxX() + dolMouth);
    unsigned int possibleMouthMaxY      = (lipOuterLinerShape.MaxY() + dolMouth) > ImgHeight ? ImgHeight : (int)(lipOuterLinerShape.MaxY() + dolMouth);
    unsigned int possibleMouthWidth     = possibleMouthMaxX - possibleMouthMinX;
    unsigned int possibleMouthHeight    = possibleMouthMaxY - possibleMouthMinY;

    Rect LeftEyePossibleWindow  = Rect( possibleLeftEyeMinX, possibleLeftEyeMinY, possibleLeftEyeWidth, possibleLeftEyeHeight );
    Rect RightEyePossibleWindow = Rect( possibleRightEyeMinX, possibleRightEyeMinY, possibleRightEyeWidth, possibleRightEyeHeight );
    Rect MouthPossibleWindow    = Rect( possibleMouthMinX, possibleMouthMinY, possibleMouthWidth, possibleMouthHeight );
    Rect CurrentWindow          = Rect( 0, 0, iImg.cols, iImg.rows );
    Rect DetectedLeftEyeWindow, DetectedRightEyeWindow, DetectedMouthWindow;

    bool LeftEyeDetected    = const_cast<CFaceDetectionAlgs*>(fd)->VO_FacePartDetection ( iImg, LeftEyePossibleWindow, DetectedLeftEyeWindow, VO_FacePart::LEFTEYE);
    bool RightEyeDetected   = const_cast<CFaceDetectionAlgs*>(fd)->VO_FacePartDetection ( iImg, RightEyePossibleWindow, DetectedRightEyeWindow, VO_FacePart::RIGHTEYE );
    bool MouthDetected      = const_cast<CFaceDetectionAlgs*>(fd)->VO_FacePartDetection ( iImg, MouthPossibleWindow, DetectedMouthWindow, VO_FacePart::LIPOUTERLINE );

    t = ((double)cvGetTickCount() -  t )
        / (cvGetTickFrequency()*1000.0f);
    cout << "Detection Confirmation time cost: " << t << "millisec" << endl;

    if(LeftEyeDetected && RightEyeDetected && MouthDetected)
        return true;
    else
        return false;
}
Пример #6
0
/**
 * @brief       Calculate some key points on the face
 * @param       oPoint      output  point list
 * @param       iShape      input   shape
 * @param       iFaceParts  inut    faceparts
 * @param       ptType      input   point type
 * @return      void
 */
void VO_KeyPoint::CalcFaceKeyPoint( cv::Point2f& oPoint,
                                    const VO_Shape& iShape,
                                    const VO_FaceParts& iFaceParts,
                                    unsigned int ptType)
{
    std::vector<unsigned int> facePartsPoints;
    VO_Shape subiShape;
    // Very very very very important.
    // Explained by JIA Pei.
    // "resize()" is just for resize;
    // it doesn't always set what's already inside the the std::vector to "0"
    // Therefore, clear() is a must before resize().

    switch(ptType)
    {
    case CENTEROFGRAVITY:
        if (iShape.GetNbOfPoints() > 0)
            oPoint = iShape.GetA2DPoint( VO_Shape::CENTER);
        break;
    case LEFTEYELEFTCORNER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LEFTEYE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::LEFTMOST);
            }
        }
        break;
    case LEFTEYERIGHTCORNER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LEFTEYE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::RIGHTMOST);
            }
        }
        break;
    case LEFTEYECENTER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LEFTEYE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint( VO_Shape::CENTER);
            }
        }
        break;
    case RIGHTEYELEFTCORNER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::RIGHTEYE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::LEFTMOST);
            }
        }
        break;
    case RIGHTEYERIGHTCORNER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::RIGHTEYE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::RIGHTMOST);
            }
        }
        break;
    case RIGHTEYECENTER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::RIGHTEYE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint( VO_Shape::CENTER);
            }
        }
        break;
    case NOSETIPKEY:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::NOSETIP).GetIndexes();    // Just one point
            if (facePartsPoints.size() == 1)
                oPoint = iShape.GetA2DPoint(facePartsPoints[0]);
        }
        break;
    case NOSTRILLEFT:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::NOSTRIL).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::LEFTMOST);
            }
        }
        break;
    case NOSTRILRIGHT:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::NOSTRIL).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::RIGHTMOST);
            }
        }
        break;
    case NOSECENTER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::NOSE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint( VO_Shape::CENTER);
            }
        }
        break;
    case MOUTHLEFTCORNER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LIPOUTERLINE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::LEFTMOST);
            }
        }
        break;
    case MOUTHRIGHTCORNER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LIPOUTERLINE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint(VO_Shape::RIGHTMOST);
            }
        }
        break;
    case MOUTHCENTER:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LIPOUTERLINE).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
                subiShape = iShape.GetSubShape(facePartsPoints);
                oPoint = subiShape.GetA2DPoint( VO_Shape::CENTER);
            }
        }
        break;
    case EARLOBELEFT:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::LEFTEAR).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
            }
        }
        break;
    case EARLOBERIGHT:
        {
            facePartsPoints = iFaceParts.VO_GetOneFacePart(VO_FacePart::RIGHTEAR).GetIndexes();
            if (facePartsPoints.size() > 0)
            {
            }
        }
        break;
    }
}