void FaceTrackingRenderer::DrawExpressions(PXCFaceData::Face* trackedFace, const int faceId) { PXCFaceData::ExpressionsData* expressionsData = trackedFace->QueryExpressions(); if (!expressionsData) return; HWND panelWindow = GetDlgItem(m_window, IDC_PANEL); HDC dc1 = GetDC(panelWindow); HDC dc2 = CreateCompatibleDC(dc1); if (!dc2) { ReleaseDC(panelWindow, dc1); return; } SelectObject(dc2, m_bitmap); BITMAP bitmap; GetObject(m_bitmap, sizeof(bitmap), &bitmap); HPEN cyan = CreatePen(PS_SOLID, 3, RGB(255, 0, 0)); if (!cyan) { DeleteDC(dc2); ReleaseDC(panelWindow, dc1); return; } SelectObject(dc2, cyan); const int maxColumnDisplayedFaces = 5; const int widthColumnMargin = 570; const int rowMargin = FaceTrackingUtilities::TextHeight; const int yStartingPosition = faceId % maxColumnDisplayedFaces * m_expressionMap.size() * FaceTrackingUtilities::TextHeight; const int xStartingPosition = widthColumnMargin * (faceId / maxColumnDisplayedFaces); WCHAR tempLine[200]; int yPosition = yStartingPosition; swprintf_s<sizeof(tempLine) / sizeof(pxcCHAR)> (tempLine, L"ID: %d", trackedFace->QueryUserID()); TextOut(dc2, xStartingPosition, yPosition, tempLine, wcslen(tempLine)); yPosition += rowMargin; for (auto expressionIter = m_expressionMap.begin(); expressionIter != m_expressionMap.end(); expressionIter++) { PXCFaceData::ExpressionsData::FaceExpressionResult expressionResult; if (expressionsData->QueryExpression(expressionIter->first, &expressionResult)) { int intensity = expressionResult.intensity; std::wstring expressionName = expressionIter->second; swprintf_s<sizeof(tempLine) / sizeof(WCHAR)>(tempLine, L"%s = %d", expressionName.c_str(), intensity); TextOut(dc2, xStartingPosition, yPosition, tempLine, wcslen(tempLine)); yPosition += rowMargin; } } DeleteObject(cyan); DeleteDC(dc2); ReleaseDC(panelWindow, dc1); }
void Genie_Emotion::updateFaceFrame(){ const PXCCapture::Sample *sample = senseManager->QuerySample(); if (sample) { updateColorImage(sample->color); } PXCEmotion::EmotionData arrData[NUM_TOTAL_EMOTIONS]; emotionDet = senseManager->QueryEmotion(); if (emotionDet == 0) { throw std::runtime_error("7"); return; } const char *EmotionLabels[NUM_PRIMARY_EMOTIONS] = { "ANGER", "CONTEMPT", "DISGUST", "FEAR", "JOY", "SADNESS", "SURPRISE" }; const char *SentimentLabels[NUM_SENTIMENT_EMOTIONS] = { "NEGATIVE", "POSITIVE", "NEUTRAL" }; faceData->Update(); const int numFaces = faceData->QueryNumberOfDetectedFaces(); // 사람이 몇명인지 확인 //사람 숫자만큼 , face가 없을 경우 확인하지 않음 for (int i = 0; i < numFaces; ++i) { auto face = faceData->QueryFaceByIndex(i); if (face == 0){ continue; } PXCRectI32 faceRect = { 0 }; PXCFaceData::PoseEulerAngles poseAngle = { 0 }; PXCFaceData::ExpressionsData *expressionData; PXCFaceData::ExpressionsData::FaceExpressionResult expressionResult; auto detection = face->QueryDetection(); if (detection != 0){ detection->QueryBoundingRect(&faceRect); } // 표정 데이터 요청 expressionData = face->QueryExpressions(); if (expressionData != NULL) { if (expressionData->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_MOUTH_OPEN, &expressionResult)){ { if (expressionResult.intensity >= 70) numOfExpressions[0]++; } } if (expressionData->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_TONGUE_OUT, &expressionResult)){ { if (expressionResult.intensity >= 70) numOfExpressions[1]++; } } if (expressionData->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_SMILE, &expressionResult)){ { if (expressionResult.intensity >= 70) numOfExpressions[2]++; } } } emotionDet->QueryAllEmotionData(i, &arrData[0]); int idx_outstanding_emotion = -1; bool IsSentimentPresent = false; pxcI32 maxscoreE = -3; pxcF32 maxscoreI = 0; for (int i = 0; i<NUM_PRIMARY_EMOTIONS; i++) { if (arrData[i].evidence < maxscoreE) continue; if (arrData[i].intensity < maxscoreI) continue; maxscoreE = arrData[i].evidence; maxscoreI = arrData[i].intensity; idx_outstanding_emotion = i; } if (idx_outstanding_emotion != -1) { { numOfPrimary[idx_outstanding_emotion]++; } } if (maxscoreI > 0.4){ IsSentimentPresent = true; } if (IsSentimentPresent){ int idx_sentiment_emotion = -1; maxscoreE = -3; maxscoreI = 0; for (int i = 0; i<(10 - NUM_PRIMARY_EMOTIONS); i++) { if (arrData[NUM_PRIMARY_EMOTIONS + i].evidence < maxscoreE) continue; if (arrData[NUM_PRIMARY_EMOTIONS + i].intensity < maxscoreI) continue; maxscoreE = arrData[NUM_PRIMARY_EMOTIONS + i].evidence; maxscoreI = arrData[NUM_PRIMARY_EMOTIONS + i].intensity; idx_sentiment_emotion = i; } if (idx_sentiment_emotion != -1){ { numOfSentimental[idx_outstanding_emotion]++; } } } } }
void Emotions::update() { int numFaces = 0; if (mSenseMgr->AcquireFrame(true) >= PXC_STATUS_NO_ERROR) { // Emotion Data PXCEmotion *emotionDet = mSenseMgr->QueryEmotion(); PXCEmotion::EmotionData arrData[10]; fdata->Update(); int numFaces = fdata->QueryNumberOfDetectedFaces(); for (int i = 0; i < numFaces; ++i) { // FaceData PXCFaceData::Face *face = fdata->QueryFaceByIndex(i); PXCFaceData::ExpressionsData *edata = face->QueryExpressions(); emotionDet->QueryAllEmotionData(i, &arrData[0]); //Face Detection and Location if (arrData->rectangle.x > -1 && arrData->rectangle.y > -1) { //cout << arrData->rectangle.x << ", " << arrData->rectangle.y << endl; iSeeYou = true; } else iSeeYou = false; # pragma region Expression Logic if (iSeeYou) { PXCFaceData::ExpressionsData::FaceExpressionResult smileScore; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_SMILE, &smileScore); PXCFaceData::ExpressionsData::FaceExpressionResult raiseLeftBrow; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_BROW_RAISER_LEFT, &raiseLeftBrow); PXCFaceData::ExpressionsData::FaceExpressionResult raiseRightBrow; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_BROW_RAISER_RIGHT, &raiseRightBrow); PXCFaceData::ExpressionsData::FaceExpressionResult eyeClosedLeft; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_EYES_CLOSED_LEFT, &eyeClosedLeft); PXCFaceData::ExpressionsData::FaceExpressionResult eyeClosedRight; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_EYES_CLOSED_RIGHT, &eyeClosedRight); PXCFaceData::ExpressionsData::FaceExpressionResult kiss; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_KISS, &kiss); PXCFaceData::ExpressionsData::FaceExpressionResult openMouth; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_MOUTH_OPEN, &openMouth); PXCFaceData::ExpressionsData::FaceExpressionResult tongueOut; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_TONGUE_OUT, &tongueOut); if (smileScore.intensity > 80) cout << "smile back!" << endl; if (raiseLeftBrow.intensity > 80 && raiseRightBrow.intensity > 80) cout << "eyebrows up" << endl; if (raiseLeftBrow.intensity > 80 && raiseRightBrow.intensity < 80) cout << "eyebrow raised" << endl; if (raiseLeftBrow.intensity < 80 && raiseRightBrow.intensity > 80) cout << "eyebrow raised" << endl; if (eyeClosedLeft.intensity > 80 && eyeClosedRight.intensity > 80) cout << "eyes Closed" << endl; //else // eyes open if (kiss.intensity > 80) cout << "kissy face!" << endl; if (openMouth.intensity > 80) cout << "say Ahhhhh" << endl; if (tongueOut.intensity > 80) cout << "Stick Tongue Out" << endl; } //PXCFaceData::ExpressionsData::FaceExpressionResult score; //edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_KISS, &score); //cout << score.intensity << endl; # pragma endregion //Expression Logic #pragma region Emotion Logic /*bool emotionPresent = false; int epidx = -1; pxcI32 maxScoreE = -3; pxcF32 maxscoreI = 0; for (int i = 0; i < 7; i++) { if (arrData[i].evidence < maxScoreE) continue; if (arrData[i].intensity < maxscoreI) continue; maxScoreE = arrData[i].evidence; maxscoreI = arrData[i].intensity; epidx = i; std::this_thread::sleep_for(std::chrono::milliseconds(50)); } if (maxScoreE > -1) { std::string foundEmo = ""; switch (arrData[epidx].eid) { case PXCEmotion::EMOTION_PRIMARY_ANGER: foundEmo = "Anger"; break; case PXCEmotion::EMOTION_PRIMARY_CONTEMPT: foundEmo = "Contempt"; break; case PXCEmotion::EMOTION_PRIMARY_DISGUST: foundEmo = "Disgust"; break; case PXCEmotion::EMOTION_PRIMARY_FEAR: foundEmo = "Fear"; break; case PXCEmotion::EMOTION_PRIMARY_JOY: foundEmo = "Joy"; break; case PXCEmotion::EMOTION_PRIMARY_SADNESS: foundEmo = "Sadness"; break; case PXCEmotion::EMOTION_PRIMARY_SURPRISE: foundEmo = "Surprise"; break; case PXCEmotion::EMOTION_SENTIMENT_POSITIVE: foundEmo = "Positive"; break; case PXCEmotion::EMOTION_SENTIMENT_NEGATIVE: foundEmo = "Negative"; break; case PXCEmotion::EMOTION_SENTIMENT_NEUTRAL: foundEmo = "Neutral"; break; } cout << "outstanding emotion = " << foundEmo << endl; } if (maxscoreI > 0.4) emotionPresent = true; if (emotionPresent) { //int spidx = -1; maxScoreE = -3; maxscoreI = 0; for (int i = 0; i < 7; i++) { if (arrData[i].evidence < maxScoreE) continue; if (arrData[i].intensity < maxscoreI) continue; maxScoreE = arrData[i].evidence; maxscoreI = arrData[i].intensity; //spidx = i; } }*/ #pragma endregion //Emotion Logic } numFaces = emotionDet->QueryNumFaces(); const PXCCapture::Sample *sample = mSenseMgr->QueryEmotionSample(); mSenseMgr->ReleaseFrame(); } }
void updateFaceFrame(){ // フレームデータを取得する const PXCCapture::Sample *sample = senseManager->QuerySample(); if (sample) { // 各データを表示する updateColorImage(sample->color); } //SenceManagerモジュールの顔のデータを更新する faceData->Update(); //検出した顔の数を取得する const int numFaces = faceData->QueryNumberOfDetectedFaces(); //顔の領域を示す四角形を用意する PXCRectI32 faceRect = { 0 }; //顔のの表出情報のデータ、および角度のデータの入れ物を用意 PXCFaceData::ExpressionsData *expressionData; PXCFaceData::ExpressionsData::FaceExpressionResult expressionResult; //それぞれの顔ごとに情報取得および描画処理を行う for (int i = 0; i < numFaces; ++i) { //顔の情報を取得する auto face = faceData->QueryFaceByIndex(i); if (face == 0){ continue; } // 顔の位置を取得:Colorで取得する auto detection = face->QueryDetection(); if (detection != 0){ //顔の大きさを取得する detection->QueryBoundingRect(&faceRect); } //顔の位置と大きさから、顔の領域を示す四角形を描画する cv::rectangle(colorImage, cv::Rect(faceRect.x, faceRect.y, faceRect.w, faceRect.h), cv::Scalar(255, 0, 0)); //追加:フェイスデータから顔の表情データの情報を得る expressionData = face->QueryExpressions(); if (expressionData != 0){ //追加:口の開き具合を取得、表示 if (expressionData->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_MOUTH_OPEN, &expressionResult)){ //描画処理 { std::stringstream ss; ss << "Mouth_Open:" << expressionResult.intensity; cv::putText(colorImage, ss.str(), cv::Point(faceRect.x, faceRect.y - 65), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 255), 2, CV_AA); } } //追加:舌の出し具合を取得、表示 if (expressionData->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_TONGUE_OUT, &expressionResult)){ //描画処理 { std::stringstream ss; ss << "TONGUE_Out:" << expressionResult.intensity; cv::putText(colorImage, ss.str(), cv::Point(faceRect.x, faceRect.y - 40), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 255), 2, CV_AA); } } //追加:笑顔の度合を取得、表示 if (expressionData->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_SMILE, &expressionResult)){ //描画処理 { std::stringstream ss; ss << "SMILE:" << expressionResult.intensity; cv::putText(colorImage, ss.str(), cv::Point(faceRect.x, faceRect.y - 15), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 255), 2, CV_AA); } } } else{ //描画処理 { std::stringstream ss; ss << "NO EXPRESSION"; cv::putText(colorImage, ss.str(), cv::Point(faceRect.x, faceRect.y - 40), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 255), 2, CV_AA); } } } }