コード例 #1
0
/********************************************************************
Initialize the Tracker for the object
Exceptions:
    None
*********************************************************************/
bool Object::InitializeObjectTracker( Matrixu* pFrameImageColor,
                                      Matrixu* pFrameImageGray,
                                      int frameInd,
                                      uint videoLength,
                                      Matrixu* pFrameDisplay,
                                      Matrixu* pFrameDisplayTraining,
                                      Matrixu* pFrameImageHSV,
                                      Matrixf* pGroundTruthMatrix )
{
    try
    {
        if ( m_trackerParametersPtr.get() == NULL )
        {
            abortError( __LINE__, __FILE__, "m_pSimpleTrackerParameters is NULL" );
        }

        if ( m_classifierParamPtr.get() == NULL )
        {
            abortError( __LINE__, __FILE__, "m_pClassifierParams is NULL" );
        }

        if( frameInd != m_trackerParametersPtr->m_initState[4] )
            return false; //return false, if initialization info is not available now at frameInd
        else
        {
            m_trackerPtr->InitializeTrackerWithParameters(
                pFrameImageColor, pFrameImageGray, frameInd, videoLength, m_trackerParametersPtr, m_classifierParamPtr,
                pFrameDisplay, pFrameDisplayTraining, pFrameImageHSV, pGroundTruthMatrix );
            return true;
        }

    }
    EXCEPTION_CATCH_AND_ABORT( "Error in InitializeTrackerParameters" )
}
コード例 #2
0
ファイル: Tracker.cpp プロジェクト: caomw/MILTracker1.01
void			Tracker::replayTracker(vector<Matrixu> &vid, string statesfile, string outputvid, uint R, uint G, uint B)
{
	Matrixf states;
	states.DLMRead(statesfile.c_str());
	Matrixu colorframe;

	// save video file
	CvVideoWriter* w = NULL;
	if( ! outputvid.empty() ){
		w = cvCreateVideoWriter( outputvid.c_str(), CV_FOURCC('I','Y','U','V'), 15, cvSize(vid[0].cols(), vid[0].rows()), 3 );
		if( w==NULL ) abortError(__LINE__,__FILE__,"Error opening video file for output");
	}

	for( uint k=0; k<vid.size(); k++ )
	{	
		vid[k].conv2RGB(colorframe);
		colorframe.drawRect(states(k,2),states(k,3),states(k,0),states(k,1),1,0,2,R,G,B);
		colorframe.drawText(("#"+int2str(k,3)).c_str(),1,25,255,255,0);
		colorframe._keepIpl=true;
		colorframe.display(1,2);
		cvWaitKey(1);
		if( w != NULL )
			cvWriteFrame( w, colorframe.getIpl() );
		colorframe._keepIpl=false; colorframe.freeIpl();
	}

	// clean up
	if( w != NULL )
		cvReleaseVideoWriter( &w );
}
コード例 #3
0
ファイル: XMLHttpRequest.cpp プロジェクト: Gin-Rye/duibrowser
void XMLHttpRequest::loadRequestSynchronously(ResourceRequest& request, ExceptionCode& ec)
{
    ASSERT(!m_async);
    Vector<char> data;
    ResourceError error;
    ResourceResponse response;

    {
        // avoid deadlock in case the loader wants to use JS on a background thread
        KJS::JSLock::DropAllLocks dropLocks;
        if (m_doc->frame())
            m_identifier = m_doc->frame()->loader()->loadResourceSynchronously(request, error, response, data);
    }

    m_loader = 0;

    // No exception for file:/// resources, see <rdar://problem/4962298>.
    // Also, if we have an HTTP response, then it wasn't a network error in fact.
    if (error.isNull() || request.url().isLocalFile() || response.httpStatusCode() > 0) {
        processSyncLoadResults(data, response, ec);
        return;
    }

    if (error.isCancellation()) {
        abortError();
        ec = XMLHttpRequestException::ABORT_ERR;
        return;
    }

    networkError();
    ec = XMLHttpRequestException::NETWORK_ERR;
}
コード例 #4
0
void XMLHttpRequest::loadRequestSynchronously(ResourceRequest& request, ExceptionCode& ec)
{
    ASSERT(!m_async);
    Vector<char> data;
    ResourceError error;
    ResourceResponse response;

    unsigned long identifier = ThreadableLoader::loadResourceSynchronously(scriptExecutionContext(), request, error, response, data);
    m_loader = 0;

    // No exception for file:/// resources, see <rdar://problem/4962298>.
    // Also, if we have an HTTP response, then it wasn't a network error in fact.
    if (error.isNull() || request.url().isLocalFile() || response.httpStatusCode() > 0) {
        processSyncLoadResults(identifier, data, response, ec);
        return;
    }

    if (error.isCancellation()) {
        abortError();
        ec = XMLHttpRequestException::ABORT_ERR;
        return;
    }

    networkError();
    ec = XMLHttpRequestException::NETWORK_ERR;
}
コード例 #5
0
    /****************************************************************
    CreateAndInitializeClassifier
        Creates a classifier according to the type specified in the
        parameters. Initializes appropriate classifier.
    ****************************************************************/
    StrongClassifierBasePtr    StrongClassifierFactory::CreateAndInitializeClassifier( StrongClassifierParametersBasePtr strongClassifierParametersBasePtr )
    {
        try
        {
            StrongClassifierBasePtr strongClassifierBasePtr;

            switch ( strongClassifierParametersBasePtr->GetClassifierType( ) )
            {
                case ONLINE_ADABOOST:
                    strongClassifierBasePtr = StrongClassifierBasePtr( new AdaBoostClassifier( strongClassifierParametersBasePtr ) );
                    break;
                case ONLINE_STOCHASTIC_BOOST_MIL:
                    strongClassifierBasePtr = StrongClassifierBasePtr( new MILBoostClassifier( strongClassifierParametersBasePtr ) );
                    break;
                case ONLINE_ENSEMBLE_BOOST_MIL:
                    strongClassifierBasePtr = StrongClassifierBasePtr( new MILEnsembleClassifier( strongClassifierParametersBasePtr ) );
                    break;
                case ONLINE_ANY_BOOST_MIL:
                    strongClassifierBasePtr    = StrongClassifierBasePtr( new MILAnyBoostClassifier( strongClassifierParametersBasePtr ) );
                    break;
                default:
                    abortError(__LINE__,__FILE__,"Incorrect pStrongClassifierBase type!");
            }

            ASSERT_TRUE( strongClassifierBasePtr.get( ) != NULL );

            return strongClassifierBasePtr;
        }
        EXCEPTION_CATCH_AND_ABORT("Failed to create and initialize the classifier" )
    }
コード例 #6
0
    /****************************************************************
    Tracker::ReplaysTracker
        Replays the tracker based on the saved output.
    Exceptions:
        None
    ****************************************************************/
    void    Tracker::ReplayTrackers(vector<Matrixu> &vid, vector<string> statesfile, string outputvid, Matrixu colors)
    {
        Matrixu states;

        vector<Matrixu> resvid(vid.size());
        Matrixu colorframe;

        // save videoList file
        CvVideoWriter* w = NULL;
        if ( ! outputvid.empty() )
        {
            w = cvCreateVideoWriter( outputvid.c_str(), CV_FOURCC('x','v','i','d'), 15, cvSize(vid[0].cols(), vid[0].rows()), 3 );

            if ( w==NULL ) 
            {
                abortError(__LINE__,__FILE__,"Error opening videoList file for output");
            }
        }

        for ( uint k=0; k<vid.size(); k++ )
        {
            vid[k].conv2RGB(resvid[k]);
            resvid[k].drawText(("#"+int2str(k,3)).c_str(),1,25,255,255,0);
        }

        for ( uint j=0; j < statesfile.size(); j++ )
        {
            states.DLMRead(statesfile[j].c_str());
            for( uint k=0; k<vid.size(); k++ )
            {
                resvid[k].drawRect(states(k,3),states(k,2),states(k,0),states(k,1),1,0,3,colors(j,0),colors(j,1),colors(j,2));
            }
        }

        for ( uint k=0; k<vid.size(); k++ )
        {
            resvid[k]._keepIpl=true;
            resvid[k].display(0,2);
            cvWaitKey(1);
            if( w!=NULL && k<vid.size()-1)
            {            
                Matrixu::WriteFrame(w, resvid[k]);
            }
            resvid[k]._keepIpl=false; resvid[k].freeIpl();
        }

        // clean up
        if( w != NULL )
        {
            cvReleaseVideoWriter( &w );
        }
    }
コード例 #7
0
ファイル: Tracker.cpp プロジェクト: mjacobsen/milboosttracker
void Tracker::update_classifier(Matrixu *img)
{
	static SampleSet posx, negx;

	if (!img->isInitII())
		abortError(__LINE__,__FILE__,"Integral image not initialized before calling update_classifier");

	// train location clf (negx are randomly selected from image, posx is just the current tracker location)
	negx.sampleImage(img, _x, _y, _w, _h, (1.5f*_trparams->_srchwinsz), _trparams->_posradtrain+5, _trparams->_negnumtrain);
	posx.sampleImage(img, _x, _y, _w, _h, _trparams->_posradtrain, 0, _trparams->_posmaxtrain);
	_clf->update(posx, negx);

	// clean up
	posx.clear(); negx.clear();
}
コード例 #8
0
int		main(int argc, char * argv[])
{
	string clipname;
	vector<string> clipnames;
	switch( 2 ){
		case 1: // FACE TRACK DEMO
			FaceTrackDemo(argc>1 ? argv[1] : 0);
			break;
		case 2: // RUN ON VIDEO
			if( argc<3 ) abortError(__LINE__,__FILE__,"Not enough parameters.  See README file.");
			Exp(argv[1],argv[2],(argc>3?atoi(argv[3]):1),(argc>4?atoi(argv[4]):1),(argc>5?atoi(argv[5])==1:0));
			break;					
		default:
			break;
	}

}
コード例 #9
0
/********************************************************************
SaveObjectStatesAllFrames
    Notify the tracker to save the state for the object for all frames
    to trajectory file.
Exceptions:
    None
*********************************************************************/
void Object::SaveObjectStatesAllFrames( )
{
    try
    {
        if ( m_trackerParametersPtr.get() == NULL )
        {
            abortError(__LINE__,__FILE__,"m_pSimpleTrackerParameters is NULL");
        }

        m_trackerPtr->SaveStates();

        if( m_cameraTrackingParametersPtr->m_calculateTrackingError )
        {
            m_trackerPtr->CalculateTrackingErrroFromGroundTruth();
        }
    }
    EXCEPTION_CATCH_AND_ABORT( "Error while saving objects states on all the frames" );

}
コード例 #10
0
ファイル: Tracker.cpp プロジェクト: mjacobsen/milboosttracker
double Tracker::update_location(Matrixu *img)
{
	static SampleSet detectx;
	static vectorf prob;
	double resp;

	if (!img->isInitII())
		abortError(__LINE__,__FILE__,"Integral image not initialized before calling update_location");

	// run current clf on search window
	detectx.sampleImage(img, _x, _y, _w, _h, (float)_trparams->_srchwinsz);
	prob = _clf->classify(detectx,_trparams->_useLogR);

	// find best location
	int bestind = max_idx(prob);
	resp=prob[bestind];
	_x = (float)detectx[bestind]._col;
	_y = (float)detectx[bestind]._row; 

	// clean up
	detectx.clear();
	return resp;
}
コード例 #11
0
    /****************************************************************
    InitializeWeakClassifiers

    Exceptions:
        None
    ****************************************************************/
    void StrongClassifierBase::InitializeWeakClassifiers( )
    {
        try
        {
            ASSERT_TRUE( m_strongClassifierParametersBasePtr != NULL );

            m_selectorList.resize( m_strongClassifierParametersBasePtr->m_numberOfSelectedWeakClassifiers, 0 );
            m_weakClassifierPtrList.resize( m_strongClassifierParametersBasePtr->m_totalNumberOfWeakClassifiers );

            for ( int featureIndex = 0; featureIndex < m_strongClassifierParametersBasePtr->m_totalNumberOfWeakClassifiers; featureIndex++ )
            {
                if ( m_strongClassifierParametersBasePtr->m_weakClassifierType == STUMP )
                {
                    m_weakClassifierPtrList[featureIndex] = new OnlineStumpsWeakClassifier(featureIndex);
                    m_weakClassifierPtrList[featureIndex]->SetLearningRate( m_strongClassifierParametersBasePtr->m_learningRate );
                }
                else if ( m_strongClassifierParametersBasePtr->m_weakClassifierType == WEIGHTED_STUMP )
                {
                    m_weakClassifierPtrList[featureIndex] = new WeightedStumpsWeakClassifier(featureIndex);
                    m_weakClassifierPtrList[featureIndex]->SetLearningRate( m_strongClassifierParametersBasePtr->m_learningRate );
                }
                else if ( m_strongClassifierParametersBasePtr->m_weakClassifierType == PERCEPTRON )
                {
                    m_weakClassifierPtrList[featureIndex] = new PerceptronWeakClassifier(featureIndex);
                    m_weakClassifierPtrList[featureIndex]->SetLearningRate( m_strongClassifierParametersBasePtr->m_learningRate );
                }
                else
                {
                    abortError(__LINE__,__FILE__,"incorrect weak pStrongClassifierBase name");
                }
            }

            ASSERT_TRUE( !m_weakClassifierPtrList.empty() );
        }
        EXCEPTION_CATCH_AND_ABORT( "Failed to initialize the weak classifiers" );
    }
コード例 #12
0
    /****************************************************************
    InitializeFeatureVector

    Exceptions:
        None
    ****************************************************************/
    void StrongClassifierBase::InitializeFeatureVector( )
    {
        try
        {
            ASSERT_TRUE( m_strongClassifierParametersBasePtr != NULL );

            //create the feature vector according to the feature type
            if ( m_strongClassifierParametersBasePtr->m_featureParametersPtr->GetFeatureType() == Features::HAAR_LIKE )
            {
                m_featureVectorPtr = Features::FeatureVectorPtr(new Features::HaarFeatureVector( ));
            }
            else if ( m_strongClassifierParametersBasePtr->m_featureParametersPtr->GetFeatureType() == Features::MULTI_DIMENSIONAL_COLOR_HISTOGRAM )
            {
                m_featureVectorPtr = Features::FeatureVectorPtr( new Features::MultiDimensionalColorHistogramFeatureVector( ) );
            }
            else if ( m_strongClassifierParametersBasePtr->m_featureParametersPtr->GetFeatureType() == Features::CULTURE_COLOR_HISTOGRAM )
            {
                m_featureVectorPtr = Features::FeatureVectorPtr( new Features::CultureColorHistogramFeatureVector( ) );
            }
            else if ( m_strongClassifierParametersBasePtr->m_featureParametersPtr->GetFeatureType() == Features::HAAR_COLOR_HISTOGRAM )
            {
                m_featureVectorPtr = Features::FeatureVectorPtr( new Features::HaarAndColorHistogramFeatureVector( ) );
            }
            else
            {
                abortError( __LINE__, __FILE__, "Unsupported Feature Type" );
            }

            //Generate feature vectors
            ASSERT_TRUE( m_featureVectorPtr != NULL );

            //Generate Features
            m_featureVectorPtr->Generate( m_strongClassifierParametersBasePtr->m_featureParametersPtr );
        }
        EXCEPTION_CATCH_AND_ABORT( "Failed to initialize the feature vector" );
    }
コード例 #13
0
    /********************************************************************
    CalculateTrackingErrroFromGroundTruth
        Calculate the tracking error based on m_GroundTruthMatrix
    Exceptions:
        None
    *********************************************************************/
    void SimpleTracker::CalculateTrackingErrroFromGroundTruth( )
    {

        try
        {
            if( m_groundTruthMatrix.rows( ) == 0 )
                return;
            
            int videoLength = m_states.rows();

            Matrixf m_statesError( videoLength, 2 );
            //first column is the mean square error (pixels) of center position
            //first column: mean square error of center pixel
            //second column:??????
            for( int frameInd=0; frameInd < videoLength; frameInd++ )
            {
                float centerTrackedX = m_states( frameInd, 0 ) + m_states(frameInd, 2) / 2;
                float centerTrackedY = m_states( frameInd, 1 ) + m_states(frameInd, 3) / 2;
                float centerX = m_groundTruthMatrix( frameInd, 1 ) + m_groundTruthMatrix(frameInd, 3) / 2;
                float centerY = m_groundTruthMatrix( frameInd, 2 ) + m_groundTruthMatrix(frameInd, 4) / 2;
                
                m_statesError( frameInd, 0 ) = sqrt( (centerX - centerTrackedX)*(centerX - centerTrackedX) +
                                                     (centerY - centerTrackedY)*(centerY - centerTrackedY) );

                //calculate overlapping area
                float area = m_groundTruthMatrix(frameInd, 3)* m_groundTruthMatrix(frameInd, 4);
                float area_tracked =  m_states(frameInd, 2) * m_states(frameInd, 3);
                
                float xleft = max( m_states( frameInd, 0 ), m_groundTruthMatrix( frameInd, 1 ) );
                float xright = min( m_states( frameInd, 0 ) + m_states( frameInd, 2 ), 
                                     m_groundTruthMatrix( frameInd, 1 ) + m_groundTruthMatrix( frameInd, 3 ) );

                float width = xright - xleft;  

                float yTop = max( m_states( frameInd, 1), m_groundTruthMatrix( frameInd, 2 ) );
                float yBottom = min( m_states( frameInd, 1 ) + m_states( frameInd, 3 ), 
                                    m_groundTruthMatrix( frameInd, 2 ) + m_groundTruthMatrix(frameInd, 4) );

                float height = yBottom - yTop;
                
                float overLappingArea = 0; 
                if( width > 0 && height > 0 )
                {
                    overLappingArea = width*height;
                }

                float precision = overLappingArea/area_tracked;
                float recall = overLappingArea/area;
                //m_statesError( frameInd, 1 ) = ( 2 * overLappingArea - area_tracked )/area ;
                m_statesError( frameInd, 1 ) = 2*precision*recall/(precision+recall); //f-measure
            }

            // save states
            if( !m_simpleTrackerParamsPtr->m_trajSave.empty() )
            {
                //write into the specified text file
                bool scs = m_statesError.DLMWrite( ( m_simpleTrackerParamsPtr->m_trajSave+"_Error.txt" ).c_str( ) );
                if( !scs )
                {
                    abortError(__LINE__,__FILE__,"error saving trajectory error file");
                }
            }            
        }
        EXCEPTION_CATCH_AND_ABORT("Error while calculating and saving trajectory error file.");
    }
コード例 #14
0
ファイル: Tracker.cpp プロジェクト: caomw/MILTracker1.01
void			SimpleTracker::track_frames(vector<Matrixu> &video, SimpleTrackerParams p, ClfStrongParams *clfparams)
{
	CvVideoWriter* w = NULL;
	Matrixu states(video.size(), 4);
	Matrixu t; string pnd="#";

	// save video file
	if( ! p._vidsave.empty() ){
		w = cvCreateVideoWriter( p._vidsave.c_str(), CV_FOURCC('I','Y','U','V'), 15, cvSize(video[0].cols(), video[0].rows()), 3 );
		if( w==NULL ) abortError(__LINE__,__FILE__,"Error opening video file for output");
	}

	// initialization
	int frameind = 0;
	if( p._initWithFace ){  // init with face
		fprintf(stderr,"Searching for face...\n");
		while( !Tracker::initFace(&p,video[frameind]) ){
			video[frameind].conv2RGB(t); t._keepIpl=true;
			t.drawText(("#"+int2str(frameind,3)).c_str(),1,25,255,255,0);
			// display on screen & write to disk
			if( p._disp ){ t.display(1); cvWaitKey(1); }
			if( w != NULL ) cvWriteFrame( w, t.getIpl() );
			t._keepIpl=false; t.freeIpl();
			//frameind++;
		}
		clfparams->_ftrParams->_width	= (uint)p._initstate[2];
		clfparams->_ftrParams->_height	= (uint)p._initstate[3];
		init(video[frameind], p, clfparams);
		
	} // init with params
	else{
		clfparams->_ftrParams->_width	= (uint)p._initstate[2];
		clfparams->_ftrParams->_height	= (uint)p._initstate[3];
		init(video[0], p, clfparams);
		states(frameind,0) = (uint)_curState[0];
		states(frameind,1) = (uint)_curState[1];
		states(frameind,2) = (uint)_curState[2];
		states(frameind,3) = (uint)_curState[3];
		//frameind++;
	}

	// track rest of frames
	StopWatch sw(true); double ttt;
	for( frameind; frameind<(int)video.size(); frameind++ )
	{
		ttt = sw.Elapsed(true);
		fprintf(stderr,"%s%d Frames/%f sec = %f FPS",ERASELINE,frameind,ttt,((double)frameind)/ttt);
		track_frame(video[frameind],t);
		if( p._disp || w!= NULL ){
			video[frameind] = t;
			video[frameind].drawText(("#"+int2str(frameind,3)).c_str(),1,25,255,255,0);
			video[frameind].createIpl();
			video[frameind]._keepIpl = true;
			// display on screen
			if( p._disp ){
				video[frameind].display(1);
				cvWaitKey(1);
			}
			// save video
			if( w != NULL && frameind<(int)video.size()-1 )
				Matrixu::WriteFrame(w, video[frameind]);
			video[frameind]._keepIpl = false;
			video[frameind].freeIpl();
		}
		
		for( int s=0; s<4; s++ ) states(frameind,s) = (uint)_curState[s];

	}

	// save states
	if( !p._trsave.empty() ){
		bool scs = states.DLMWrite(p._trsave.c_str());
		if( !scs ) abortError(__LINE__,__FILE__,"error saving states to file");
	}

	// clean up
	if( w != NULL )
		cvReleaseVideoWriter( &w );

}
コード例 #15
0
void	Exp(const char* dir, const char* name, int strong, int trial, bool savevid)
{
	bool success=true; 
	randinitalize(trial);

	string dataDir= "E://miltracker//" + string(dir);
	if( dataDir[dataDir.length()-2] != '//' ) dataDir+="//";
	dataDir += (string(name) + "//");

	Matrixf frameb, initstate;
	vector<Matrixu> vid;
	
	// read in frames and ground truth
	success=frameb.DLMRead((dataDir + name + "_frames.txt").c_str());
	if( !success ) abortError(__LINE__,__FILE__,"Error: frames file not found.");
	success=initstate.DLMRead((dataDir + name + "_gt.txt").c_str());
	if( !success ) abortError(__LINE__,__FILE__,"Error: gt file not found.");

	// TRACK

	vid.clear();
	vid = Matrixu::LoadVideo((dataDir+"imgs/").c_str(),"img", "png", (int)frameb(0), (int)frameb(1), 5, false);
	SimpleTracker tr;
	SimpleTrackerParams		trparams;
	vector<Matrixu>			saveseq;
	string paramname = "";
	

	////////////////////////////////////////////////////////////////////////////////////////////
	// PARAMETERS	

	ClfStrongParams			*clfparams;
	
	// strong model
	switch( strong ){
		case 1:		// MILTrack
			clfparams = new ClfMilBoostParams();
			((ClfMilBoostParams*)clfparams)->_numSel		= 50;
			((ClfMilBoostParams*)clfparams)->_numFeat		= 250;
			paramname += "_MIL";
			trparams._posradtrain							= 4.0f;
			trparams._negnumtrain							= 65;
			((ClfMilBoostParams*)clfparams)->ifPF = false;
			break;
		case 2:		// OBA1
			clfparams = new ClfAdaBoostParams();
			((ClfAdaBoostParams*)clfparams)->_numSel		= 50;
			((ClfAdaBoostParams*)clfparams)->_numFeat		= 250;
			paramname += "_OAB1";
			trparams._posradtrain							= 1.0f;
			trparams._negnumtrain							= 65;
			break;
		case 3:		// OBA5
			clfparams = new ClfAdaBoostParams();
			((ClfAdaBoostParams*)clfparams)->_numSel		= 50;
			((ClfAdaBoostParams*)clfparams)->_numFeat		= 250;
			paramname += "_OAB5";
			trparams._posradtrain							= 4.0f;
			trparams._negnumtrain							= 65;
			break;
		case 4:		// PFMIL
		clfparams = new ClfMilBoostParams();
		((ClfMilBoostParams*)clfparams)->_numSel		= 50;
		((ClfMilBoostParams*)clfparams)->_numFeat		= 250;
		paramname += "_PFMIL";
		trparams._posradtrain							= 4.0f;
		trparams._negnumtrain							= 65;
		((ClfMilBoostParams*)clfparams)->ifPF = true;
		break;

		default:
			abortError(__LINE__,__FILE__,"Error: invalid classifier choice.");
	}

	// feature parameters
	FtrParams *ftrparams;
	HaarFtrParams haarparams;
	ftrparams = &haarparams;

	clfparams->_ftrParams	= ftrparams;

	// tracking parameters
	trparams._init_negnumtrain = 65;
	trparams._init_postrainrad = 3.0f;
	trparams._initstate[0]	= initstate(0,0);
	trparams._initstate[1]	= initstate(0,1);
	trparams._initstate[2]	= initstate(0,2);
	trparams._initstate[3]	= initstate(0,3);
	trparams._srchwinsz		= 25;
	trparams._negsamplestrat = 1;
	trparams._initWithFace	= false;

	trparams._debugv		= false;
	trparams._disp			= false; // set this to true if you want to see video output (though it slows things down)
	trparams._vidsave		= savevid? dataDir + name + paramname + "_TR" + int2str(trial,3) + ".avi" : "";
	trparams._trsave		= dataDir + name + paramname + "_TR" + int2str(trial,3) + "_c.txt";

	////////////////////////////////////////////////////////////////////////////////////////////
	// TRACK

	
	cout << "\n===============================================\nTRACKING: " << name + paramname + "_TR" + int2str(trial,3) << endl;
	cout <<   "-----------------------------------------------\n";
    
	
	//CvCapture* pCapture = cvCaptureFromFile("1.avi");

	tr.track_frames(vid, trparams, clfparams);
	cout << endl << endl;

	delete clfparams;
	
}
コード例 #16
0
void	FaceTrackDemo(const char* savefile)
{
	float vwidth = 240, vheight = 180;  // images coming from webcam will be resized to these dimensions (smaller images = faster runtime)
	ClfStrongParams			*clfparams;
	SimpleTracker			tr;
	SimpleTrackerParams		trparams;

	////////////////////////////////////////////////////////////////////////////////////////////
	// PARAMS

	// strong model
	switch( 2 ){
		case 1:		// OBA1
			clfparams = new ClfAdaBoostParams();
			((ClfAdaBoostParams*)clfparams)->_numSel	= 50;
			((ClfAdaBoostParams*)clfparams)->_numFeat	= 250;
			trparams._posradtrain						= 1.0f;
			break;
		case 2:		// MILTrack
			clfparams = new ClfMilBoostParams();
			((ClfMilBoostParams*)clfparams)->_numSel	= 50;
			((ClfMilBoostParams*)clfparams)->_numFeat	= 250;
			trparams._posradtrain						= 4.0f;
			break;
		
	}

	// feature parameters
	FtrParams *ftrparams;
	HaarFtrParams haarparams;
	ftrparams = &haarparams;
	clfparams->_ftrParams	= ftrparams;

	// online boosting parameters
	clfparams->_ftrParams		= ftrparams;

	// tracking parameters
	trparams._init_negnumtrain	= 65;
	trparams._init_postrainrad	= 3.0f;
	trparams._srchwinsz			= 25;
	trparams._negnumtrain		= 65;


	// set up video
	CvCapture* capture = cvCaptureFromCAM( -1 );
	if( capture == NULL ){
		abortError(__LINE__,__FILE__,"Camera not found!");
		return;
	}


	////////////////////////////////////////////////////////////////////////////////////////////
	// TRACK

	// print usage
	fprintf(stderr,"MILTRACK FACE DEMO\n===============================\n");
	fprintf(stderr,"This demo uses the OpenCV face detector to initialize the tracker.\n");
	fprintf(stderr,"Commands:\n");
	fprintf(stderr,"\tPress 'q' to QUIT\n");
	fprintf(stderr,"\tPress 'r' to RE-INITIALIZE\n\n");
	
	// grab first frame
	Matrixu frame,framer,framedisp;
	Matrixu::CaptureImage(capture,framer);
	frame = framer.imResize(vheight,vwidth);

	// save output
	CvVideoWriter *w=NULL;

	// initialize with face location
	while( !Tracker::initFace(&trparams,frame) ){
		Matrixu::CaptureImage(capture,framer);
		frame = framer.imResize(vheight,vwidth);
		frame.display(1,2);
	}
	ftrparams->_height		= (uint)trparams._initstate[2];
	ftrparams->_width		= (uint)trparams._initstate[3];

	//这里初始化了
	tr.init(frame, trparams, clfparams);

	StopWatch sw(true);
	double ttime=0.0;
	double probtrack=0.0;

	// track
	for (int cnt = 0; Matrixu::CaptureImage(capture,framer); cnt++) {
		frame = framer.imResize(vheight,vwidth); 
		tr.track_frame(frame, framedisp);  // grab tracker confidence

		// initialize video output
		if( savefile != NULL && w==NULL ){
			w = cvCreateVideoWriter( savefile, CV_FOURCC('I','Y','U','V'), 15, cvSize(framedisp.cols(), framedisp.rows()), 3 );
		}

		// display and save frame
		framedisp._keepIpl=true;
		framedisp.display(1,2);
		if( w != NULL )
			Matrixu::WriteFrame(w, framedisp);
		framedisp._keepIpl=false; framedisp.freeIpl();
		char q=cvWaitKey(1);
		ttime = sw.Elapsed(true);
		fprintf(stderr,"%s%d Frames/%f sec = %f FPS, prob=%f",ERASELINE,cnt,ttime,((double)cnt)/ttime,probtrack);
	
		// quit
		if( q == 'q' )
			break;

		// restart with face detector
		else if( q == 'r' || probtrack<0 ) 
		{
			while( !Tracker::initFace(&trparams,frame) && q!='q' ){
				Matrixu::CaptureImage(capture,framer);
				frame = framer.imResize(vheight,vwidth);
				frame.display(1,2);
				q=cvWaitKey(1);
			}
			if( q == 'q' )
				break;
			
			// re-initialize tracker with new parameters
			ftrparams->_height		= (uint)trparams._initstate[2];
			ftrparams->_width		= (uint)trparams._initstate[3];
			clfparams->_ftrParams	= ftrparams;
			fprintf(stderr,"\n");
			tr.init(frame, trparams, clfparams);
			
		}
	}


	// clean up
	if( w != NULL )
		cvReleaseVideoWriter( &w );
	cvReleaseCapture( &capture );

}