Ejemplo n.º 1
0
    bool OpenCVAlgorithms::findSubImage(const QList<QImage> &sources,
                      const QImage &target,
                      MatchingPointList &matchingPoints,
                      int matchPercentage,
                      int maximumMatches,
                      int downPyrs,
                      int searchExpansion,
                      AlgorithmMethod method)
	{
		mError = NoError;
		mErrorString.clear();

        QList<cv::Mat> sourcesMat;
        sourcesMat.reserve(sources.size());

        foreach(const QImage &source, sources)
            sourcesMat.append(toCVMat(source));

        cv::Mat targetMat = toCVMat(target);

        if(!checkInputImages(sourcesMat, targetMat))
			return false;

        matchingPoints = OpenCVAlgorithms::fastMatchTemplate(sourcesMat, targetMat, matchPercentage, maximumMatches, downPyrs, searchExpansion, method);

        return true;
    }
Ejemplo n.º 2
0
	/**
	 * Copies the last buffer that came from the stream into the Image (drawable)
	 * @param updateTexture if false, only the ram buffer of m_frameImg is udpate, if true, it's GPU texture is updated as well.
     */
	void MediaPlayerOCV::copyBufferIntoImage( bool updateTexture /*= false*/)
	{
		// Check if video is ok
		if ( !isValid() || !m_newBufferReady)
			return;
		
		// Retrieve frame (different paths windows/mac)
        // TODO: review why they differ
#ifdef WIN32
        cv::Mat videoFrame;
        cv::Mat outMat = toCVMat(m_frameImg);
		bool result = m_capture.retrieve( videoFrame );
        if ( result )
        {
            // Convert from BGR to RGB
            cv::cvtColor(videoFrame, outMat, CV_BGR2RGB);
            
            if ( updateTexture )
                m_frameImg.updateTexture();
        }
#elif __APPLE__
        cv::Mat outMat = toCVMat(m_frameImg);
        cv::Mat videoFrame;
        
        // NOTE: for some reason, the capture stores the image in a new mat that is RGBA format instead of RGB, so we are copying to
        // a temp image to fix this for now (less optimized but...)
		bool result = m_capture.retrieve( videoFrame );
        if ( result )
        {
            // Convert from BGR to RGB
            cv::cvtColor(videoFrame, outMat, CV_BGR2RGB);
            
            if ( updateTexture )
                m_frameImg.updateTexture();
            else
                m_frameImg.setUpdateTexture();
        }
#endif
        
		// Clear new buffer flag
		m_newBufferReady	= false;
	}
Ejemplo n.º 3
0
    bool OpenCVAlgorithms::findSubImageAsync(const QList<QImage> &sources,
                      const QImage &target,
                      int matchPercentage,
                      int maximumMatches,
                      int downPyrs,
                      int searchExpansion,
                      AlgorithmMethod method)
	{
		mError = NoError;
		mErrorString.clear();

		if(mFuture.isRunning())
		{
			mError = AlreadyRunningError;
			mErrorString = tr("FindSubImage is already running");

			return false;
		}

        QList<cv::Mat> sourcesMat;
        sourcesMat.reserve(sources.size());

        foreach(const QImage &source, sources)
            sourcesMat.append(toCVMat(source));

        cv::Mat targetMat = toCVMat(target);

        if(!checkInputImages(sourcesMat, targetMat))
			return false;

        connect(&mFutureWatcher, SIGNAL(finished()), this, SLOT(finished()));

        mFuture = QtConcurrent::run(boost::bind(&OpenCVAlgorithms::fastMatchTemplate, this, sourcesMat, targetMat, matchPercentage, maximumMatches, downPyrs, searchExpansion, method));
		mFutureWatcher.setFuture(mFuture);

		return true;
	}
Ejemplo n.º 4
0
 /*
  * Copies the new frame received from the capture thread into the MediaPlayerOCV internal image (the one that will be accessed from the outside).
  */
 void MediaPlayerOCV::copyFrameFromThread()
 {
     if ( !isValid() )
         return;
 
     // Try lock (instead of lock) to avoid stalling the main thread
     boost::mutex::scoped_try_lock lock(m_mutex);
     if ( lock )
     {
         // Copy the new frame
         cv::Mat imageMat = toCVMat(m_frameImg);
         m_bufferFromThread.copyTo(imageMat);
         m_frameImg.setUpdateTexture();
         m_newBufferReady = false;
     }
 }
Ejemplo n.º 5
0
/**
* @public
* @brief Add a new frame
*/
void OCVVideoRecorder::addFrame( Cing::Image& newFrame )
{
	bool isOpen = m_cvVideoWriter->isOpened();

	if (isOpen)
	{
		// HACK: Convert RGB2BGR then BGR2RGB
		cv::Mat* cvTempImage =  ImageResourceManager::getSingleton().getImage( newFrame.getWidth(), newFrame.getHeight(), newFrame.getNChannels() ); 
		if ( cvTempImage )
			cv::cvtColor( toCVMat(newFrame), *cvTempImage, CV_RGB2BGR );
		
		// Add frame to video
		(*m_cvVideoWriter) << *cvTempImage;

		ImageResourceManager::getSingleton().releaseImage( cvTempImage ); 
	}
	else
		LOG_CRITICAL("OCVVideoRecorder::addFrame() - Warning: OCVVideoRecorder is closed");
}
Ejemplo n.º 6
0
	/**
	 * Loads a movie file
	 * @param filename		Name of the movie to load (it can be a local path relative to the data folder, or a network path)
	 * @param requestedVideoFormat	Format in which the frames of the movie will be stored. Default RGB, which means that the
	 *								movie file video format will be used (regarding alpha channel: RGB vs RGBA)
	 * @param fps			Desired Frames per Second for the playback. -1 means to use the fps of the movie file.
	 * @return true if the video was succesfully loaded
	 */
	bool MediaPlayerOCV::load( const std::string& fileName, GraphicsType requestedVideoFormat /*= RGB*/, float fps /*= -1*/  )
	{
        // If this is re-load: release resources first
        if ( isValid() )
            end();
        
		// Build path to file
		bool result = buildPathToFile( fileName );
		if ( !result )
		{
			end();
			return false;
		}

        
		// Load video
		m_capture.open( m_filePath );
		if ( m_capture.isOpened() == false )
		{
			LOG_ERROR( "MediaPlayerOCV Could not load file %s", m_fileName.c_str() );
			return false;
		}
        
		m_videoWidth    = m_capture.get( CV_CAP_PROP_FRAME_WIDTH );
		m_videoHeight   = m_capture.get( CV_CAP_PROP_FRAME_HEIGHT );
		m_videoFps		= m_capture.get( CV_CAP_PROP_FPS );
		m_videoNFrames	= m_capture.get( CV_CAP_PROP_FRAME_COUNT );	
		m_videoDuration = (float)m_videoNFrames / m_videoFps;
		m_pixelFormat	= RGB;
		m_currentFrame	= 0;
		m_playing		= false;

		
		// Init the frame container to the video size
		m_frameImg.init( m_videoWidth, m_videoHeight, m_pixelFormat );
		m_bufferSizeInBytes = m_videoWidth * m_videoHeight * m_frameImg.getNChannels();
		
		// Check if the requested fps is different than the actual video fps -> if so, change it
 		if ( (fps > 0) && (equal(fps, m_videoFps) == false) )
		{
			m_capture.set( CV_CAP_PROP_FPS, fps );
			m_videoFps = m_capture.get( CV_CAP_PROP_FPS );
		}
        
        // Create capture thread
        if ( m_multithreaded )
        {
            // Load the intermediate image
            m_bufferFromThread = toCVMat(m_frameImg).clone();
            
            // Start the thread
            m_captureThread = new OCVCaptureThread( *this, m_timer, m_videoFps );
            m_captureThread->start();
        }
        
		LOG( "MediaPlayer: File %s correctly loaded (Working in Multithreaded mode = [%s]", m_fileName.c_str(), toString(m_multithreaded).c_str());
		
		// The object is valid when the file is loaded
		m_bIsValid = true;
		return true;
	}