int Avatar::OnTrack(void *image, int size, int width, int height, int step, char* out) { int ret = OnTrack(image, size, width, height, step); memcpy(out, pEvent_, paEventGetSize(pEvent_)); return ret; }
/** Local function that calls OnTrack() and updates the cursor position is OnTrack() changes them.*/ void CallOnTrack( VWindow const& window, VPoint const& ptFrom, VPoint& ptCurrent) { /* Save original point.*/ VPoint ptOriginal = ptCurrent; /* Call virtual function.*/ OnTrack(window, ptFrom, ptCurrent); /* Did the point change?*/ if ( ptCurrent != ptOriginal ) { /* Reuse ptOriginal, leave ptCurrent alone.*/ ptOriginal = ptCurrent; /* Move the screen cursor to new location.*/ window.ClientToScreen(ptOriginal); SetCursorPos(VPOINT_BREAK(ptOriginal)); } }
static STATUS_T CmdSplitTrack( wAction_t action, coOrd pos ) { track_p trk0, trk1; EPINX_T ep0; int oldTrackCount; int inx, mode, quad; ANGLE_T angle; switch (action) { case C_START: InfoMessage( _("Select track to split") ); case C_DOWN: case C_MOVE: return C_CONTINUE; break; case C_UP: onTrackInSplit = TRUE; trk0 = OnTrack( &pos, TRUE, TRUE ); if ( trk0 != NULL) { if (!CheckTrackLayer( trk0 ) ) { onTrackInSplit = FALSE; return C_TERMINATE; } ep0 = PickEndPoint( pos, trk0 ); onTrackInSplit = FALSE; if (ep0 < 0) { return C_CONTINUE; } UndoStart( _("Split Track"), "SplitTrack( T%d[%d] )", GetTrkIndex(trk0), ep0 ); oldTrackCount = trackCount; SplitTrack( trk0, pos, ep0, &trk1, FALSE ); UndoEnd(); return C_TERMINATE; } onTrackInSplit = FALSE; return C_TERMINATE; break; case C_CMDMENU: splitTrkTrk[0] = OnTrack( &pos, TRUE, TRUE ); if ( splitTrkTrk[0] == NULL ) return C_CONTINUE; if ( splitPopupM[0] == NULL ) { splitPopupM[0] = MenuRegister( "End Point Mode R-L" ); splitPopupMI[0][0] = wMenuToggleCreate( splitPopupM[0], "", _("None"), 0, TRUE, ChangeSplitEPMode, (void*)0 ); splitPopupMI[0][1] = wMenuToggleCreate( splitPopupM[0], "", _("Left"), 0, FALSE, ChangeSplitEPMode, (void*)1 ); splitPopupMI[0][2] = wMenuToggleCreate( splitPopupM[0], "", _("Right"), 0, FALSE, ChangeSplitEPMode, (void*)2 ); splitPopupMI[0][3] = wMenuToggleCreate( splitPopupM[0], "", _("Both"), 0, FALSE, ChangeSplitEPMode, (void*)3 ); splitPopupM[1] = MenuRegister( "End Point Mode T-B" ); splitPopupMI[1][0] = wMenuToggleCreate( splitPopupM[1], "", _("None"), 0, TRUE, ChangeSplitEPMode, (void*)0 ); splitPopupMI[1][1] = wMenuToggleCreate( splitPopupM[1], "", _("Top"), 0, FALSE, ChangeSplitEPMode, (void*)1 ); splitPopupMI[1][2] = wMenuToggleCreate( splitPopupM[1], "", _("Bottom"), 0, FALSE, ChangeSplitEPMode, (void*)2 ); splitPopupMI[1][3] = wMenuToggleCreate( splitPopupM[1], "", _("Both"), 0, FALSE, ChangeSplitEPMode, (void*)3 ); } splitTrkEP[0] = PickEndPoint( pos, splitTrkTrk[0] ); angle = NormalizeAngle(GetTrkEndAngle( splitTrkTrk[0], splitTrkEP[0] )); if ( angle <= 45.0 ) quad = 0; else if ( angle <= 135.0 ) quad = 1; else if ( angle <= 225.0 ) quad = 2; else if ( angle <= 315.0 ) quad = 3; else quad = 0; splitTrkFlip = (quad<2); if ( (splitTrkTrk[1] = GetTrkEndTrk( splitTrkTrk[0], splitTrkEP[0] ) ) == NULL ) { ErrorMessage( MSG_BAD_BLOCKGAP ); return C_CONTINUE; } splitTrkEP[1] = GetEndPtConnectedToMe( splitTrkTrk[1], splitTrkTrk[0] ); mode = 0; if ( GetTrkEndOption( splitTrkTrk[1-splitTrkFlip], splitTrkEP[1-splitTrkFlip] ) & EPOPT_GAPPED ) mode |= 2; if ( GetTrkEndOption( splitTrkTrk[splitTrkFlip], splitTrkEP[splitTrkFlip] ) & EPOPT_GAPPED ) mode |= 1; for ( inx=0; inx<4; inx++ ) wMenuToggleSet( splitPopupMI[quad&1][inx], mode == inx ); wMenuPopupShow( splitPopupM[quad&1] ); break; } return C_CONTINUE; }
static STATUS_T CmdHandLaidTurnout( wAction_t action, coOrd pos ) { ANGLE_T angle, angle2, angle3, reverseR, pointA, reverseA1, angle0; EPINX_T ep, ep1, ep2, ep2a=-1, ep2b=-1, pointEp0, pointEp1; DIST_T dist, reverseD, pointD; coOrd off, intersectP; coOrd pointP, pointC, pointP1, reverseC, point0; track_p trk, trk1, trk2, trk2a=NULL, trk2b=NULL, pointT; trkSeg_p segP; BOOL_T right; track_p trks[4], *trkpp; switch (action) { case C_START: InfoMessage( _("Place frog and drag angle") ); DYNARR_SET( trkSeg_t, tempSegs_da, 1 ); Dhlt.state = 0; Dhlt.normalT = NULL; tempSegs_da.cnt = 0; DYNARR_SET( trkSeg_t, tempSegs_da, 2 ); tempSegs(0).color = drawColorBlack; tempSegs(0).width = 0; tempSegs(1).color = drawColorBlack; tempSegs(1).width = 0; return C_CONTINUE; case C_DOWN: if (Dhlt.state == 0) { if ((Dhlt.normalT = OnTrack( &pos, TRUE, TRUE )) == NULL) break; if ( QueryTrack( Dhlt.normalT, Q_NOT_PLACE_FROGPOINTS ) ) { ErrorMessage( MSG_CANT_PLACE_FROGPOINTS, _("frog") ); Dhlt.normalT = NULL; break; } Dhlt.normalP = Dhlt.reverseP = Dhlt.reverseP1 = pos; Dhlt.normalA = GetAngleAtPoint( Dhlt.normalT, Dhlt.normalP, NULL, NULL ); InfoMessage( _("Drag to set angle") ); DrawLine( &tempD, Dhlt.reverseP, Dhlt.reverseP1, 0, wDrawColorBlack ); Dhlt.state = 1; pointC = pointP = pointP1 = reverseC = zero; return C_CONTINUE; } case C_MOVE: case C_UP: if (Dhlt.normalT == NULL) break; if (Dhlt.state == 1) { DrawLine( &tempD, Dhlt.reverseP, Dhlt.reverseP1, 0, wDrawColorBlack ); Dhlt.reverseP1 = pos; Dhlt.reverseA = FindAngle( Dhlt.reverseP, Dhlt.reverseP1 ); Dhlt.frogA = NormalizeAngle( Dhlt.reverseA - Dhlt.normalA ); /*printf( "RA=%0.3f FA=%0.3f ", Dhlt.reverseA, Dhlt.frogA );*/ if (Dhlt.frogA > 270.0) { Dhlt.frogA = 360.0-Dhlt.frogA; right = FALSE; } else if (Dhlt.frogA > 180) { Dhlt.frogA = Dhlt.frogA - 180.0; Dhlt.normalA = NormalizeAngle( Dhlt.normalA + 180.0 ); /*ep = Dhlt.normalEp0; Dhlt.normalEp0 = Dhlt.normalEp1; Dhlt.normalEp1 = ep;*/ right = TRUE; } else if (Dhlt.frogA > 90.0) { Dhlt.frogA = 180.0 - Dhlt.frogA; Dhlt.normalA = NormalizeAngle( Dhlt.normalA + 180.0 ); /*ep = Dhlt.normalEp0; Dhlt.normalEp0 = Dhlt.normalEp1; Dhlt.normalEp1 = ep;*/ right = FALSE; } else { right = TRUE; } /*printf( "NA=%0.3f FA=%0.3f R=%d\n", Dhlt.normalA, Dhlt.frogA, right );*/ Dhlt.frogNo = tan(D2R(Dhlt.frogA)); if (Dhlt.frogNo > 0.01) Dhlt.frogNo = 1.0/Dhlt.frogNo; else Dhlt.frogNo = 0.0; if (action == C_MOVE) { if (Dhlt.frogNo != 0) { InfoMessage( _("Angle = %0.2f Frog# = %0.2f"), Dhlt.frogA, Dhlt.frogNo ); } else { InfoMessage( _("Frog angle is too close to 0") ); } } else { InfoMessage( _("Select point position") ); Dhlt.state = 2; Translate( &Dhlt.reverseP, Dhlt.reverseP, Dhlt.normalA+(right?+90:-90), trackGauge ); Translate( &Dhlt.reverseP1, Dhlt.reverseP1, Dhlt.normalA+(right?+90:-90), trackGauge ); } DrawLine( &tempD, Dhlt.reverseP, Dhlt.reverseP1, 0, wDrawColorBlack ); return C_CONTINUE; } else if ( Dhlt.state == 2 ) { DrawSegs( &tempD, zero, 0.0, &tempSegs(0), tempSegs_da.cnt, trackGauge, wDrawColorBlack ); tempSegs_da.cnt = 0; pointP = pos; if ((pointT = OnTrack( &pointP, TRUE, TRUE )) == NULL) break; if ( QueryTrack( pointT, Q_NOT_PLACE_FROGPOINTS ) ) { ErrorMessage( MSG_CANT_PLACE_FROGPOINTS, _("points") ); break; } dist = FindDistance( Dhlt.normalP, pointP ); pointA = GetAngleAtPoint( pointT, pointP, &pointEp0, &pointEp1 ); angle = NormalizeAngle( pointA + 180.0 - Dhlt.reverseA ); PTRACE(( "rA=%0.1f pA=%0.1f a=%0.1f ", Dhlt.reverseA, pointA, angle )) if ( angle > 90.0 && angle < 270.0 ) { pointA = NormalizeAngle( pointA + 180.0 ); angle = NormalizeAngle( angle + 180.0 ); PTRACE(( " {pA=%0.1f a=%0.1f} ", pointA, angle )) } else {
LRESULT CVideoMarkup::OnCommand( UINT, WPARAM wParam, LPARAM lParam, BOOL& bHandled) { USES_CONVERSION; bool needToRerunClassifier = false; long sliderPosition, sliderRange, selStart, selEnd; switch(LOWORD(wParam)) { // this tells us which control the message came from case IDC_TRAINBUTTON: { WCHAR errorMessage[1000] = L"Sorry, you don't have enough examples to train this recognizer. Please add some more examples and try again.\n"; if (!classifier->ContainsSufficientSamples(&sampleSet)) { if (recognizerMode == ADABOOST_FILTER) { wcscat(errorMessage, L"To build an Adaboost recognizer you need at least 3 positive and 3 negative examples."); } else if (recognizerMode == MOTION_FILTER) { wcscat(errorMessage, L"To build a Motion recognizer you need to to create one or more 'Motion Examples' by making selections while in the current recognizer mode."); } else if (recognizerMode == GESTURE_FILTER) { wcscat(errorMessage, L"To build a gesture recognizer you need to to select a range of frames using the 'Mark In' and 'Mark Out' buttons."); } MessageBox(errorMessage, L"Error Training Recognizer", MB_OK | MB_ICONERROR); break; } EnableControls(FALSE); classifier->StartTraining(&sampleSet); EnableControls(TRUE); } needToRerunClassifier = true; break; case IDC_FRAMELEFT: case IDC_FRAMERIGHT: sliderPosition = (long) ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_GETPOS, 0, 0); sliderPosition = (wParam==IDC_FRAMELEFT) ? sliderPosition-1 : sliderPosition+1; ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_SETPOS, TRUE, sliderPosition); OnTrack(0,0,0,bHandled); scrubbingVideo = false; needToRerunClassifier = true; break; case IDC_MARKIN: case IDC_MARKOUT: if (recognizerMode != GESTURE_FILTER) break; sliderPosition = (long) ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_GETPOS, 0, 0); sliderRange = (long) ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_GETRANGEMAX, 0, 0); selStart = ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_GETSELSTART, 0, 0); selEnd = ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_GETSELEND, 0, 0); if (wParam==IDC_MARKIN) { selStart = sliderPosition; selEnd = (selEnd>sliderPosition) ? selEnd : sliderRange; } else { selStart = (selStart<sliderPosition) ? selStart : 0; selEnd = sliderPosition; } ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_SETSEL, TRUE, MAKELONG (selStart, selEnd)); break; case IDC_GRABRANGE: { selStart = ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_GETSELSTART, 0, 0); selEnd = ::SendDlgItemMessage(m_videoControl, IDC_VIDEOSLIDER, TBM_GETSELEND, 0, 0); // TODO: display informative error message if not enough frames are selected if (selEnd - selStart < GESTURE_MIN_TRAJECTORY_LENGTH) break; MotionTrack mt = m_videoLoader.GetTrajectoryInRange(selStart, selEnd); TrainingSample *sample = new TrainingSample(m_videoLoader.copyFrame, mt, m_sampleListView, m_hImageList, GROUPID_RANGESAMPLES); sampleSet.AddSample(sample); } break; case IDC_SHOWBUTTON: showGuesses = !showGuesses; needToRerunClassifier = true; break; case IDC_QUICKTEST: m_classifierTester.TestClassifierOnVideo(classifier, &m_videoLoader, recognizerMode); break; case IDC_SAVEFILTER: { classifier->Save(); list<Classifier*>::iterator c_iter = find(savedClassifiers.begin(), savedClassifiers.end(), classifier); if (c_iter == savedClassifiers.end()) { // current classifier is not in saved list, so we will add it savedClassifiers.push_back(classifier); // also add to the listbox of saved classifiers m_filterSelect.AddSavedFilter(classifier); } // disable the "train" button until we start a new classifier m_filterSelect.GetDlgItem(IDC_TRAINBUTTON).EnableWindow(FALSE); // m_filterSelect.GetDlgItem(IDC_SAVEFILTER).EnableWindow(FALSE); // m_filterSelect.GetDlgItem(IDC_FILTER_THRESHOLD).EnableWindow(FALSE); } break; case IDC_FILTER_COMBO: if (HIWORD(wParam) == CBN_SELCHANGE) { // the user selected a new filter type int selectedIndex = ComboBox_GetCurSel(m_filterSelect.GetDlgItem(IDC_FILTER_COMBO)); recognizerMode = selectedIndex; switch(selectedIndex) { case COLOR_FILTER: ReplaceClassifier(new ColorClassifier()); break; case SHAPE_FILTER: ReplaceClassifier(new ShapeClassifier()); break; case BRIGHTNESS_FILTER: ReplaceClassifier(new BrightnessClassifier()); break; case SIFT_FILTER: ReplaceClassifier(new SiftClassifier()); break; case ADABOOST_FILTER: ReplaceClassifier(new HaarClassifier()); break; case MOTION_FILTER: ReplaceClassifier(new MotionClassifier()); break; case GESTURE_FILTER: ReplaceClassifier(new GestureClassifier()); break; } } needToRerunClassifier = true; break; } if (showGuesses && needToRerunClassifier) { RunClassifierOnCurrentFrame(); } InvalidateRgn(activeRgn, FALSE); return 0; }