Esempio n. 1
0
void Cinema::loadDirectMovie(string vidPath){
        bool found = false;
        for(int dbIndex=0;dbIndex<strdb.size();dbIndex++){
            for(int mIndex=0;mIndex<strdb[dbIndex].size();mIndex++){
                ofFile f(strdb[dbIndex][mIndex]);
                string name = f.getFileName();
                if(!name.compare(vidPath)){
                    loadMovie(currentVidIndex, dbIndex, mIndex, true);
                    found = true;
                    break;
                }
            }
            if(found)
                break;
        }
        
        if(!found){
            players[0]->closeMovie();
            delete players[0];
            
            players[0] = new ofVideoPlayer;
            try{
                players[0]->loadMovie(vidPath);
            }catch ( const std::exception & e ) {
                cout << e.what() << endl;
            }
            
            if(vidPath.find("sound")!=string::npos){
                players[0]->setVolume(1);
            }else{
                players[0]->setVolume(0);
            }
            
        }
    }
Esempio n. 2
0
void ofFadeVid::setup(string filename_in) {
    setPixelFormat(OF_PIXELS_RGBA);
    ofEnableAlphaBlending();
    alpha = 0.0f;
    alphaDest = 1.0f;
    loadMovie(filename_in);
}
Esempio n. 3
0
void Cinema::keyPressed(int key){
        switch(key){
            case ' ':{
//                dbIndex = ofRandom(strdb.size());
                loadLaterIndex = ofRandom(strdb[dbIndex].size());
                loadMovie(app->parameterMap[selectedPlayer], dbIndex, loadLaterIndex, true);
            }break;
            case '&':
                app->parameterMap[selectedPlayer] = 0;break;
            case 233:
                app->parameterMap[selectedPlayer] = 1;break;
            case 'n':
                {
                    randomPosition(ofRandom(0,1));
                    app->deltaMap[user5] = 1;
                        stillFbo->begin(); //capture transition fbo
                        fbo.draw(0,0);
                        stillFbo->end();
                }
                break;
            case 4352: randomPosition(ofRandom(0,1));
                break;
            default:;
                
        }
    };
Esempio n. 4
0
void MediaImpl::resetMovie()
{
    // XXX: There used to be an issue that when we reached EOS (_eos() == true) we could not seek anymore.
    if (_seekEnabled)
    {
        qDebug() << "Seeking at position 0.";
        GstEvent* seek_event;

        if (_rate > 0) {
            seek_event = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
                                             GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_NONE, 0);
        } else {
            seek_event = gst_event_new_seek (_rate, GST_FORMAT_TIME, GstSeekFlags( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
                                             GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_END, 0);
        }
        /* Send the event */
        gst_element_send_event (_appsink0, seek_event);
//    gst_element_seek_simple (_pipeline, GST_FORMAT_TIME,
//                             (GstSeekFlags) (GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0);
//    this->_currentFrameSample = NULL;
        _setMovieReady(true);
    }
    else
    {
        // Just reload movie.
        qDebug() << "Reloading the movie" << _seekEnabled;
        loadMovie(_uri);
    }
}
void ArchiveClip::loadFromPath( )
{
    loadMovie( path , OF_QTKIT_DECODE_TEXTURE_ONLY ) ;
    w = getWidth() ;
    h = getHeight() ;
    firstFrame() ;
}
Esempio n. 6
0
LONG SoundFileFormat::identifyFile(PSZ fileName, HMMIO fileHandle)
{
  HMMIO originalFileHandle = fileHandle;
  ULONG ulTempFlags = MMIO_READ | MMIO_DENYWRITE | MMIO_NOIDENTIFY;
  LONG returnCode;

  if (!fileName && !fileHandle)
    return (MMIO_ERROR);

  if (!fileHandle)
    if (!(fileHandle = mmioOpen((PSZ)fileName, NULL, ulTempFlags)))
      return (MMIO_ERROR);

  MovieAtom movieAtom;
  ULONG movieSize;
  returnCode = loadMovie(movieAtom, movieSize, fileHandle);

  if (!originalFileHandle)  /* Don't close handle if provided to us      */
    mmioClose(fileHandle, 0);

  if (returnCode == MMIO_SUCCESS) /* This is a QuickTime file */
    if (movieAtom.isSoundOnly())
      return (MMIO_SUCCESS);
    else
      return (MMIO_ERROR);

  else
    return (returnCode);
}
Esempio n. 7
0
MediaImpl::MediaImpl(const QString uri, bool live) :
    _bus(NULL),
    _pipeline(NULL),
    _uridecodebin0(NULL),
    _queue0(NULL),
    _videoconvert0(NULL),
    _appsink0(NULL),
    _audioqueue0(NULL),
    _audioconvert0(NULL),
    _audioresample0(NULL),
    _audiovolume0(NULL),
    _audiosink0(NULL),
    _currentFrameSample(NULL),
    _currentFrameBuffer(NULL),
    _bitsChanged(false),
    _width(640), // unused
    _height(480), // unused
    _data(NULL),
    _seekEnabled(false),
    _rate(1.0),
    _isSharedMemorySource(live),
    _attached(false),
    _movieReady(false),
    _playState(false),
    _uri(uri)
{
    _pollSource = NULL;
    if (uri != "")
    {
        loadMovie(uri);
    }
    _mutexLocker = new QMutexLocker(&_mutex);
}
Esempio n. 8
0
bool CParrot::PreEnterViewMsg(CPreEnterViewMsg *msg) {
	if (!_v4) {
		loadMovie("z167.avi", false);
		loadFrame(0);
	}

	return true;
}
Esempio n. 9
0
void MediaImpl::build()
{
    qDebug() << "Building video impl";
    if (!loadMovie(_uri))
    {
        qDebug() << "Cannot load movie " << _uri << ".";
    }
}
void DialogVideoPlayer::openMoviePress() {

    QString fileName = QFileDialog::getOpenFileName(this, tr("Open Movie"), QDir::homePath());

    if (!fileName.isEmpty() && loadMovie(fileName)) {
        qDebug() << "setting " << fileName;
        _participant->SetProjectSetting(Consts::SETTING_MOVIE_PATH, fileName);
    }
}
//--------------------------------------------------------------
void pmWarpPiRendererVideoPlayer::onFadeOutComplete(float* arg)
{
    // this function is called on when the tween is complete //
    //cout << "screen :: FadeOut Completed : arg = " << *arg << endl;
    loadMovie();
    Tweenzor::add((float *)&screenOpacity.get(), 0.0, maxScreenOpacity, 0.0, fadeTime, EASE_IN_OUT_EXPO);
    Tweenzor::addCompleteListener( Tweenzor::getTween((float*)&screenOpacity.get()), this, &pmWarpPiRendererVideoPlayer::onComplete);
    
}
//--------------------------------------------------------------
void FrameSequenceExtractor::dragEvent(ofDragInfo dragInfo){ 
    printf("Drop?\n");
    if(dragInfo.position.y<220){
        
        loadMovie(dragInfo.files[0],100);
        
    }
    else
        destFolder = dragInfo.files[0];
}
void VideoTextureActor::setup(){


player=new ofVideoPlayer;
loadMovie("resources/"+videoInfo);
setTextureID(videoInfo);
player->setSpeed(playSpeed);     //44 to 48 kHz problem...
Actor::setup();
//player->setLoopState(OF_LOOP_NONE);
}
void FrameSequenceExtractor::startPlayback(){
    
    if(destFolder.length()==0){
        printf("Error! Bad Destination\n");
        return;
    }
    passNum = 1;
    
    loadMovie(sourceFile);
    
    frameNumToSave = 0;
}
Esempio n. 15
0
void playStockMovie(const unsigned short sStockMovieIndex, const bool bStoreInEEPROM)
{
  Serial.print("PlayStockMovie: ");
  Serial.println(sStockMovieIndex);

  if(sStockMovieIndex < STOCK_MOVIE_ARRAY_SIZE)
  {
    storeActiveMovieToEEPROM(sStockMovieIndex);
    loadMovie(STOCK_MOVIES_LEFT[sStockMovieIndex].c_str(), STOCK_MOVIES_RIGHT[sStockMovieIndex].c_str());
  }
  else
  {
    Serial.println("Movie could not be loaded because Index was bigger than StockMovie Array.");
  }
}
Esempio n. 16
0
MediaImpl::MediaImpl(const QString uri) :
_currentMovie(""),
_bus(NULL),
_pipeline(NULL),
_source(NULL),
//_audioQueue(NULL),
//_audioConvert(NULL),
//_audioResample(NULL),
_videoQueue(NULL),
_videoConvert(NULL),
_videoColorSpace(NULL),
_audioSink(NULL),
_videoSink(NULL),
_frame(NULL),
_width(640),
_height(480),
_data(NULL),
//_audioBufferAdapter(NULL),
_seekEnabled(false),
//_audioNewBufferCounter(0),
_movieReady(false),
_uri(uri)
{
  if (uri != "")
    loadMovie(uri);

//  addPlug(_VIDEO_OUT = new PlugOut<VideoRGBAType>(this, "ImgOut", false));
//  addPlug(_AUDIO_OUT = new PlugOut<SignalType>(this, "AudioOut", false));
//
//  addPlug(_FINISH_OUT = new PlugOut<ValueType>(this, "FinishOut", false));
//
//  QList<AbstractPlug*> atLeastOneOfThem;
//  atLeastOneOfThem.push_back(_VIDEO_OUT);
//  atLeastOneOfThem.push_back(_AUDIO_OUT);
//  setPlugAtLeastOneNeeded(atLeastOneOfThem);
//
//  addPlug(_RESET_IN = new PlugIn<ValueType>(this, "Reset", false, new ValueType(0, 0, 1)));
//  addPlug(_MOVIE_IN = new PlugIn<StringType>(this, "Movie", false));
//
//  //_settings.add(Property::FILENAME, SETTING_FILENAME)->valueStr("");
//
//  _VIDEO_OUT->sleeping(true);
//  _AUDIO_OUT->sleeping(true);
//
//  // Crease audio buffer handler.
//  _audioBufferAdapter = gst_adapter_new();
}
Esempio n. 17
0
///*****************************************************************
///
///                        SETUP
///
///*****************************************************************
void testApp::setup(){
   // ofSetLogLevel(OF_LOG_NOTICE);
    bLiveVideo = true;
    bShowGui = false;
    selOutput = OUTPUT_IMAGE;
    bRecording = false;
    bMovieAudio = false;
    bFullScreen = false;
	saveFrame = false;
	threshold = 50;
    // if we set BG subtraction as default (no. 2) there is a segmentation fault because the background image is not set for the first frame
	algorithm = 1;
	cropping = '6';
	bZoomTarget = false;
	cols = 1;
	rows = 1;
	lastTime = ofGetElapsedTimeMillis();
	currFrameNum = 0;
	videoFPS = 0;
	imgLogos.loadImage("faldon-logos.bmp");
	imgLogos.resize(ofGetWidth(), round(imgLogos.height*((float)ofGetWidth()/(float)imgLogos.width)));
	//ofEnableAlphaBlending();
    ofSetVerticalSync(true);
    ofSetFrameRate(30);
    string devfile = "/dev/video"+ofToString(VIDEO_DEVICE_ID);
    settings.setup(devfile);

    vidGrabber.setVerbose(true);
    vidGrabber.setDeviceID(VIDEO_DEVICE_ID);
    vidGrabber.setDesiredFrameRate(30);
    if (bLiveVideo) setupLiveVideo();
    else loadMovie();
    setupGui();
#ifdef OFX_FENSTER
	outWinListener.setup();
	outWinListener.setImage(&blendImg);
#endif
    //recorder.setupRecordWindow(0,0,1024,576,24,"capture.mp4",ofxGstVideoRecorder::H264,30);
    //recorder.setupRecordWindow((DRAW_SPACE*2+320),(DRAW_SPACE*2+240+out_H_gap),analysisW*2,analysisH,24,"bg_sub_vs_bg&fd_01.mp4",ofxGstVideoRecorder::H264,30);

    setupOSC();
    blendMode = OF_BLENDMODE_ADD;//OF_BLENDMODE_ALPHA;
    alphaValues[0] = alphaValues[1] = alphaValues[2] = alphaValues[3] = 64;
    loadCameras();
}
Esempio n. 18
0
bool CParrot::PanningAwayFromParrotMsg(CPanningAwayFromParrotMsg *msg) {
	if (_v4) {
		CActMsg actMsg("PanAwayFromParrot");
		actMsg.execute(msg->_target);
		_field134 = 0;
	} else if (_v2) {
		_field134 = msg->_target;
		loadMovie("z168.avi", false);
		stopMovie();
		playClip("Take Off", MOVIE_NOTIFY_OBJECT);
		_npcFlags |= NPCFLAG_2000000;
	} else {
		_npcFlags |= NPCFLAG_400000;
		_field134 = msg->_target;
		stopMovie();
	}

	return true;
}
Esempio n. 19
0
void MediaImpl::resetMovie()
{
  // TODO: Check if we can still seek when we reach EOS. It seems like it's then impossible and we
  // have to reload but it seems weird so we should check.
  if (!_eos() && _seekEnabled)
  {
    qDebug() << "Seeking at position 0.";
    gst_element_seek_simple (_pipeline, GST_FORMAT_TIME,
                             (GstSeekFlags) (GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0);
    this->_frame = NULL;
    _setReady(true);
  }
  else
  {
    // Just reload movie.
    qDebug() << "Reloading the movie" << _seekEnabled;
    _currentMovie = "";
    loadMovie(_uri);
  }
}
Esempio n. 20
0
Cinema::Cinema(AbstractApp* app) : Scene(app, "cinema"){
        
        sceneID = "cinema";
        
        fbo.allocate(WIDTH, HEIGHT, GL_RGB);
        //        remanentFbo.allocate(WIDTH, HEIGHT2, GL_RGB);
        copyFbo.allocate(WIDTH,HEIGHT, GL_RGB);
        //        circleMask.loadImage("assets/maskCircle1280x720.png");
        //        circleMask.resize(WIDTH,HEIGHT);
    
    stillFbo = new ofFbo();
    stillFbo->allocate(WIDTH,HEIGHT, GL_RGB);
        
        load();
        
        circleShader.load("../shaders/circle");
        alphaShader.load("../shaders/alphaBlack");
        alphaWhiteShader.load("../shaders/alphawhite");
        alphaScreenShader.load("../shaders/alphascreen");
        contourShader.load("../shaders/contour");
        
        for(int i=0;i<4;i++){
            players.push_back(new ofVideoPlayer());
            loadLaterMoviePosition.push_back(0);
            setMixer(i,0);
        }
        
        loadMovie(0, 0, 0, true);
        setMixer(0,1); //need to load
        
        for(int i=0;i<players.size();i++)
            if(players[i]!=0)
                players[i]->stop();
        
        //        vlt.stzartThread();
        
        
    }
//-------------------------------------------------------------------------
void pmWarpPiRendererVideoPlayer::setupVideoPlayer(string _name, bool active)
{
    pmWarpPiRendererDrawable::setupScreen();
    
    createPlayer();
    /// VIDEO PLAYER

    videoFileName = _name;
    isTesting=false;
    activePlayer = active;
    
    loadMovie();

    /// GUI
    if(!guiIsSetup)
    {
        gui->setup(); // most of the time you don't need a name but don't forget to call setup
        gui->add(screenOpacity.set( "opacity", 1.0, 0.0, 1.0));
        gui->add(maxScreenOpacity.set( "max opacity", 1.0, 0.0, 1.0));
        gui->setPosition(520, 20 + 75);
        guiIsSetup = true;
    }
}
Esempio n. 22
0
void VideoImpl::resetMovie()
{
  if (_seekEnabled)
  {
    if (_rate > 0.0)
    {
      seekTo((guint64) 0);
      qWarning() << "update Rate" << endl;
      _updateRate();
    }
    else
    {
      // NOTE: Untested.
      seekTo(_duration);
      qWarning() << "update Rate" << endl;
      _updateRate();
    }
  }
  else
  {
    qDebug() << "Seeking not enabled: reloading the movie" << endl;
    loadMovie(_uri);
  }
}
Esempio n. 23
0
bool CParrot::MovieEndMsg(CMovieEndMsg *msg) {
	if ((_npcFlags & NPCFLAG_2000000) && clipExistsByEnd("Take Off", msg->_endFrame)) {
		setVisible(false);
		moveUnder(findRoom());
		stopMovie();

		CActMsg actMsg1("LoseParrot");
		actMsg1.execute("ParrotLobbyController");

		if (_field134) {
			CActMsg actMsg2("PanAwayFromParrot");
			actMsg2.execute(_field134);
			_field134 = nullptr;
		} else {
			CActMsg actMsg2("Shut");
			actMsg2.execute("ParrotCage");
		}

		_npcFlags &= ~NPCFLAG_2000000;
		_v4 = 2;
	} else if (_npcFlags & NPCFLAG_10000) {
		if (_npcFlags & NPCFLAG_20000) {
			_npcFlags = (_npcFlags & ~NPCFLAG_20000) | NPCFLAG_40000;
			if (_npcFlags & NPCFLAG_100000) {
				playClip("Walk Left Loop", MOVIE_NOTIFY_OBJECT);
				movieEvent(236);
			} else {
				playClip("Walk Right Loop", MOVIE_NOTIFY_OBJECT);
			}
		} else if (_npcFlags & NPCFLAG_40000) {
			int xp = _bounds.left + _bounds.width() / 2;

			if (_npcFlags & NPCFLAG_100000) {
				if ((xp - _field128) > 32) {
					setPosition(Point(_bounds.left - 40, _bounds.top));
					playClip("Walk Left Loop", MOVIE_NOTIFY_OBJECT);
					movieEvent(236);
				} else {
					setPosition(Point(_bounds.left - 10, _bounds.top));
					playClip("Walk Left Outro", MOVIE_NOTIFY_OBJECT);
					_npcFlags = (_npcFlags & ~NPCFLAG_40000) | NPCFLAG_80000;
				}
			} else {
				if ((_field128 - xp) > 32) {
					playClip("Walk Right Loop", MOVIE_NOTIFY_OBJECT);
					movieEvent(244);
				} else {
					playClip("Walk Right Outro", MOVIE_NOTIFY_OBJECT);
					_npcFlags = (_npcFlags & NPCFLAG_40000) | NPCFLAG_80000;
				}
			}
		} else if (_npcFlags & NPCFLAG_80000) {
			loadFrame(0);
			if (_npcFlags & NPCFLAG_100000)
				setPosition(Point(_bounds.left - 30, _bounds.top));
			else
				setPosition(Point(_bounds.left + 14, _bounds.top));

			_npcFlags &= ~(NPCFLAG_10000 | NPCFLAG_80000 | NPCFLAG_100000 | NPCFLAG_200000);
			CTrueTalkNPC::MovieEndMsg(msg);
		} else {
			if (_npcFlags & NPCFLAG_1000000) {
				Point pt = getMousePos();
				if (pt.x > 70 || pt.y < 90 || pt.y > 280) {
					stopMovie();
					loadFrame(0);
					_npcFlags &= ~NPCFLAG_1000000;
				}

				if (clipExistsByEnd("Walk Left Loop", msg->_endFrame)) {
					playClip("Lean Over To Chicken", MOVIE_NOTIFY_OBJECT);
					setPosition(Point(_bounds.left - 55, _bounds.top));
					_field130 = (-100 - _bounds.left) / 5;
					movieEvent(261);
					movieEvent(262);
					movieEvent(265);
					movieEvent(268);
					movieEvent(271);
					return true;

				} else if (clipExistsByEnd("Lean Over To Chicken", msg->_endFrame)) {
					playClip("Eat Chicken", 0);
					playClip("Eat Chicken 2", MOVIE_NOTIFY_OBJECT);
					_v1 = 1;

					CStatusChangeMsg statusMsg;
					statusMsg._newStatus = 0;
					statusMsg.execute("PerchCoreHolder");

					CTrueTalkTriggerActionMsg actionMsg;
					actionMsg._param1 = 280266;
					actionMsg._param2 = 1;
					actionMsg.execute(this);

					CCarry *chicken = dynamic_cast<CCarry *>(findUnder(getRoot(), "Chicken"));
					if (chicken) {
						CActMsg actMsg("Eaten");
						actMsg.execute(chicken);
					}

					_npcFlags &= ~NPCFLAG_1000000;
					return true;
				}
			}

			if (clipExistsByEnd("Eat Chicken 2", msg->_endFrame)) {
				CStatusChangeMsg statusMsg;
				statusMsg._newStatus = 1;
				statusMsg.execute("PerchCoreHolder");

				if (_v2) {
					loadMovie("z168.avi", false);
					playClip("Take Off", MOVIE_NOTIFY_OBJECT);
					setPosition(Point(20, 10));
					_npcFlags |= NPCFLAG_2000000;
				} else {
					_npcFlags &= ~(NPCFLAG_10000 | NPCFLAG_20000 | NPCFLAG_40000 | NPCFLAG_80000 | NPCFLAG_100000 | NPCFLAG_200000);
					_npcFlags |= NPCFLAG_400000;
					stopMovie();
					loadFrame(0);
					setPosition(Point(-90, _bounds.top));
				}
			} else {
				CTrueTalkNPC::MovieEndMsg(msg);
			}
		}
	}

	return true;
}
//-------------------------------------------------------------------------
void pmWarpPiRendererVideoPlayer::updateOSC(ofxOscMessage* m)
{
    ofLog(OF_LOG_NOTICE) << "VideoPlayer-OSC" << endl;
    
    
    string address = m->getAddress();
    
    // get the id
    string addressWithoutSlash = address.substr(1,address.size()-1);
    
    if(true)// It will be for me it they send it (address=="/all")||(id==addressWithoutSlash))
    {
        /// THIS MESSAGE IF FOR YOU !!
        
        /// COMMAND
        string command = m->getArgAsString(0);
        
        
        /// Change Volume
        if(command == "volume"){
            audioVolume = m->getArgAsFloat(1);
        }
        else if(command == "loop"){
            this->setPlayerLoop(m->getArgAsBool(1));
            cout<< "Set loop State: " << m->getArgAsBool(1) << endl;
            ofLog(OF_LOG_NOTICE) << "Set Loop State: " << loop;
        }
        
        /// PLAY
        else if(command == "playVideo")
        {
            if (isPlayerPaused() && !isFading){
                float fade = m->getArgAsFloat(1);
                //videoPlayer.play();
                if(!activePlayer){
                    activePlayer = true;
                    ofNotifyEvent(swapEvent, fade, this);
                }
                
                setPlayerPaused(false);
                Tweenzor::add((float *)&screenOpacity.get(), 0.0, maxScreenOpacity, 0.0, fade, EASE_IN_OUT_EXPO);
                Tweenzor::addCompleteListener(Tweenzor::getTween((float*)&screenOpacity.get()), this, &pmWarpPiRendererVideoPlayer::onComplete);
                isFading = true;
            }
        }
        /// STOP
        else if(command == "stopVideo")
        {
            if(!isPlayerPaused() && !isFading){
                Tweenzor::add((float *)&screenOpacity.get(), screenOpacity, 0.0, 0.0, m->getArgAsFloat(1),EASE_IN_OUT_EXPO);
                Tweenzor::addCompleteListener( Tweenzor::getTween((float*)&screenOpacity.get()), this, &pmWarpPiRendererVideoPlayer::onComplete);
                isFading = true;
            }
        }
        /// PAUSE
        else if(command == "pauseVideo")
        {
            if(isPlayerPaused()) setPlayerPaused(false);
            else setPlayerPaused(true);
        }
        /// RESTART
        else if(command == "restartVideo")
        {
            if(!isFading){
                restartMovie();
                Tweenzor::add((float *)&screenOpacity.get(), 0.0, maxScreenOpacity, 0.0, m->getArgAsFloat(1),EASE_IN_OUT_EXPO);
                Tweenzor::addCompleteListener( Tweenzor::getTween((float*)&screenOpacity.get()), this, &pmWarpPiRendererVideoPlayer::onComplete);
                isFading = true;
            }
        }
        /// LOAD MOVIE
        else if(command == "loadVideo")
        {
            if (!isFading){
                auto new_videoFileName = "./videos/" +  m->getArgAsString(1);
                ofFile videoCheck(new_videoFileName);
                if(videoCheck.exists()){
                    cout<<"Loading Video: " << new_videoFileName << endl;
                    fadeTime = m->getArgAsFloat(2);
                    videoFileName = new_videoFileName;
                    
                    if(!activePlayer){
                        activePlayer = true;
                        ofNotifyEvent(swapEvent, fadeTime, this);
                        loadMovie();
                        Tweenzor::add((float *)&screenOpacity.get(), 0.0, maxScreenOpacity, 0.0, fadeTime, EASE_IN_OUT_EXPO);
                        Tweenzor::addCompleteListener( Tweenzor::getTween((float*)&screenOpacity.get()), this, &pmWarpPiRendererVideoPlayer::onComplete);
                    }
                    else{
                        Tweenzor::add((float *)&screenOpacity.get(), maxScreenOpacity, 0.0, 0.0, fadeTime, EASE_IN_OUT_EXPO);
                        Tweenzor::addCompleteListener( Tweenzor::getTween((float*)&screenOpacity.get()), this, &pmWarpPiRendererVideoPlayer::onFadeOutComplete);
                        isFading = true;
                    }
                    
                    ofLog(OF_LOG_NOTICE) << "VideoPlayer-LoadVideo: " << videoFileName << " - fadeTime: " << fadeTime;
                }
                else{
                    ofLog(OF_LOG_ERROR) << "VideoPlayer - No VideoFile";
                    cout<< "No Videofile: " << new_videoFileName << endl;
                }
            }
        }
    }
    pmWarpPiRendererDrawable::updateOSC(m);
}
void DialogVideoPlayer::loadData(GrafixParticipant* participant, mat &p_roughM_in, mat &p_smoothM_in, mat &p_fixAllM_in, mat &p_segmentsM_in) {

    _participant = participant;

    p_roughM = &p_roughM_in;
    p_smoothM = &p_smoothM_in;
    p_fixAllM = &p_fixAllM_in;

    segmentsM = p_segmentsM_in;
    if (segmentsM.n_rows == 0) {
        segmentsM.zeros(1,3);
        segmentsM(0,0) = 0;
        segmentsM(0,1) = 0;
        segmentsM(0,2) = p_roughM->n_rows - 1;
    }
    // order the segments
    uvec indices = sort_index(segmentsM.cols(1,1));
    this->segmentsM = segmentsM.rows(indices);



    secsFragment = this->_participant->GetProject()->GetProjectSetting(Consts::SETTING_SECS_FRAGMENT, Consts::ACTIVE_CONFIGURATION()).toInt();
    hz = this->_participant->GetProject()->GetProjectSetting(Consts::SETTING_HZ, Consts::ACTIVE_CONFIGURATION()).toInt();
    samplesPerFragment = secsFragment * hz;
    expWidth = this->_participant->GetProject()->GetProjectSetting(Consts::SETTING_EXP_WIDTH, Consts::ACTIVE_CONFIGURATION()).toInt();
    expHeight = this->_participant->GetProject()->GetProjectSetting(Consts::SETTING_EXP_HEIGHT, Consts::ACTIVE_CONFIGURATION()).toInt();

    if (p_smoothM->is_empty()) {
        settingPlaySmooth = false;
        ui->checkBoxSmooth->setChecked(false);
        ui->checkBoxSmooth->setEnabled(false);
    } else {
        ui->checkBoxSmooth->setChecked(true);
    }

    firstSampleMS = (*p_roughM).at(0, 0); //this is the ms of when the player first started
    currentIndex = 0;
    currentFragment = -1;
    currentSegment = 0;

    settingChanged();

    stopPlaying();

    // Load media from settings
    QString moviePath = _participant->GetProjectSetting(Consts::SETTING_MOVIE_PATH).toString();
    loadMovie(moviePath);
    QByteArray imagesPaths = _participant->GetProjectSetting(Consts::SETTING_IMAGE_PATHS).toByteArray();
    pathsImages = QJsonDocument::fromJson(imagesPaths).object().toVariantMap();
    qDebug() << pathsImages;

    updatePlaybackState(0, true);
    qDebug() << " finish loading ";

    dotSizePercentage = 5;
    ui->sliderDotSize->blockSignals(true);
    ui->sliderDotSize->setValue(dotSizePercentage * 10);
    ui->sliderDotSize->blockSignals(false);

    resizeDisplay();
}
//--------------------------------------------------------------
void FrameSequenceExtractor::update(){
    
    if(ofGetFrameNum() == 1)
        loadMovie("/Users/Josh/Media/IMG_2352.MOV",1);
    
    if(passNum == 0){
        
        int testAtFrame = 0;
        if(fingerMovie.getTotalNumFrames()>0)
            testAtFrame = testAtPercent * fingerMovie.getTotalNumFrames() / 100;
        if(testAtFrame != lastJumpedTestFrame){
            fingerMovie.setFrame(testAtFrame);
            fingerMovie.update();
            lastJumpedTestFrame = testAtFrame;
        }
        
        return;
        
    }

    
    
    
    
    post.setFlip(true);
    for(int i = 0 ; i < 3 ; i++){
        //fingerMovie.update();
        fingerMovie.nextFrame();
        if(fingerMovie.isFrameNew()){
            fingerMovie.update();
            
            int currentReadFrame = fingerMovie.getCurrentFrame();
            if(currentReadFrame >= firstReadFrameNum){
                if( currentReadFrame < (finalReadFrameNum - fadeFrames) ){
                        //do it up regular. Just save the frame
                    
                    char filename[100];
                    sprintf(filename,"%s/%06d.jpg", destFolder.c_str(), frameNumToSave++);
                    
                    if(useEffects)
                        post.setFlip(true);
                    
                    tempFbo.begin();
                    
                    if(useEffects){
                        setPostVals();
                        post.begin();
                    }
                    
                    ofSetColor(255,255,255,255);
                    fingerMovie.draw(0,0);
                    
                    if(useEffects)
                        post.end();
                    
                    tempFbo.end();
                    turboJPEG.save(&tempFbo, filename, 100);
                    
                    
                    
                    
                    //turboJPEG.save(fingerMovie.getPixels(), filename, fingerMovie.getWidth(), fingerMovie.getHeight());
                    
                } else if(currentReadFrame < finalReadFrameNum){
                    if(currentReadFrame == (finalReadFrameNum - fadeFrames)){
                        frameNumToSave = 0;//START OVER! We will resave these!
                    }
                
                    char filename[100];
                    sprintf(filename,"%s/%06d.jpg", destFolder.c_str(), frameNumToSave++);
                    
                    
                    if(useEffects){
                        post.setFlip(false);
                        
                        setPostVals();
                        post.begin();
                        ofSetColor(255,255,255,255);
                        fingerMovie.draw(0,0);
                        post.end(false);
                        
                        
                        ofImage tempImage;
                        turboJPEG.load(filename, &tempImage);
                        
                        tempFbo.begin();
                        
                        ofSetColor(255,255,255,255);
                        tempImage.draw(0,0);
                        
                        ofSetColor(255,255,255,ofMap(currentReadFrame,finalReadFrameNum - fadeFrames-1,finalReadFrameNum,255,0));
                        post.draw();
                        
                        tempFbo.end();
                        
                        turboJPEG.save(&tempFbo, filename, 100);
                    } else {
                        
                        tempFbo.begin();
                        
                        ofImage tempImage;
                        turboJPEG.load(filename, &tempImage);
                        ofSetColor(255,255,255,255);
                        tempImage.draw(0,0);
                        
                        ofSetColor(255,255,255,ofMap(currentReadFrame,finalReadFrameNum - fadeFrames-1,finalReadFrameNum,255,0));
                        fingerMovie.draw(0,0);
                        
                        tempFbo.end();
                        
                        turboJPEG.save(&tempFbo, filename, 100);
                    }
                    
                } else {
                    passNum = 0;//free-up to restart process
                }
            //Save the frame!
            }
            
        }
    }
    post.setFlip(false);
}
Esempio n. 27
0
void Gear_VideoSource::runVideo()
{
  int frameFinished=0;

	if (_currentMovie != _MOVIE_IN->type()->value())
	{
		_currentMovie=_MOVIE_IN->type()->value();
		if (!loadMovie(_currentMovie))
			return;
	}

	if (!_movieReady)
		return;

  _VIDEO_OUT->type()->resize(_codecContext->width, _codecContext->height);

  if ((int)_RESET_IN->type()->value() == 1)
  {
    av_seek_frame(_formatContext, -1, _formatContext->start_time, AVSEEK_FLAG_BACKWARD);
  }
    

  //loop until we get a videoframe
  //if we reach end, return to the beginning
  if (av_read_frame(_formatContext, &_packet)<0)
	{
    av_seek_frame(_formatContext, -1, _formatContext->start_time, AVSEEK_FLAG_BACKWARD); 
		_FINISH_OUT->type()->setValue(1.0f);
	}
	else
		_FINISH_OUT->type()->setValue(0.0f);
	
  while (_packet.stream_index!=_videoStreamIndex)
  {    
    av_free_packet(&_packet);
    if (av_read_frame(_formatContext, &_packet)<0)
      av_seek_frame(_formatContext, -1, _formatContext->start_time, AVSEEK_FLAG_BACKWARD);
  }
  
  // Decode video frame
  do
  {    
    avcodec_decode_video2(_codecContext, _frame, &frameFinished, &_packet);
  } while (!frameFinished);

  // Convert the image from its native format to RGBA
  
  sws_scale
        (
            _sws_ctx,
            (uint8_t const * const *)_frame->data,
            _frame->linesize,
            0,
            _codecContext->height,
            _frameRGBA->data,
            _frameRGBA->linesize
        );

  
  //img_convert((AVPicture *)_frameRGBA, PIX_FMT_RGB24, (AVPicture*)_frame, _codecContext->pix_fmt, _codecContext->width, _codecContext->height);

  register char *out=(char*)_VIDEO_OUT->type()->data();
  register char *in=(char*)_frameRGBA->data[0];  
  register int size=_codecContext->width*_codecContext->height;
  for (register int i=0;i<size;i++)
  {
    *out++=*in++;
    *out++=*in++;
    *out++=*in++;
    *out++=255;
  }

  // Free the packet that was allocated by av_read_frame
  av_free_packet(&_packet);
}
bool DirectShowComponent::loadMovie (const URL& videoURL)
{
    return loadMovie (videoURL.toString (false));
}
Esempio n. 29
0
//--------------------------------------------------------------
void testApp::keyPressed  (int key){

	switch (key){
		case ' ':
			bFullScreen = !bFullScreen;
			break;
		case 'b':
			bLearnBakground = true;
			break;
		case '+':
			threshold ++;
			if (threshold > 255) threshold = 255;
			break;
		case '-':
			threshold --;
			if (threshold < 0) threshold = 0;
			break;
        case '3':   threshold = 10;
                    algorithm = key-48; // character '1' is dec number 49
                    break;
        case '0':
		case '1':
		case '2':
		case '4':
		case '5':
                    threshold = 80;
                    algorithm = key-48; // character '1' is dec number 49
                    break;
        case '6':
        case '7':
        case '8':   cropping = key;
                    break;
        case 'Z':
        case 'z':   bZoomTarget = !bZoomTarget;
                    break;
        case 's':   saveFrame = true;
                    break;

        case 'd':   settings.setDefaults();
                    break;
        case 'l':   bLiveVideo = !bLiveVideo;
                    if (bLiveVideo) {
                        vidPlayer.close();
                        setupLiveVideo();
                    }
                    else {
                        vidGrabber.close();
                        loadMovie();
                    }
                    ofSleepMillis(500);
                    break;

        case 'a':   bMovieAudio = !bMovieAudio;
                    if (!bMovieAudio) vidPlayer.setVolume(0);
                    else vidPlayer.setVolume(1);
                    break;
        case 'h':   hFlip = !hFlip; break;
        case 'v':   vFlip = !vFlip; break;

        case 'r':   if (!bRecording) {
                        setupRecording();
                    }
                    else  {
                        recorder.stop();
                        cout << "STOP RECORDING"<< endl;
                    }
                    bRecording = ! bRecording;
                    break;

        case 'g': bShowGui = !bShowGui; break;
        case 'o': bLiveVideo = false;
                  vidGrabber.close();
                  loadMovie();
                  break;
        case 'w': openOutWin(); break;
        case 'i': // initialize connection
                  /*for(int i = 0; i < NUM_CAMERAS; i++) {
                        ofRemoveListener(ipGrabber[i]->videoResized, this, &testApp::videoResized);
                        ofxSharedIpVideoGrabber c( new ofxIpVideoGrabber());
                        IPCameraDef& cam = getNextCamera();
                        c->setUsername(cam.username);
                        c->setPassword(cam.password);
                        URI uri(cam.url);
                        c->setURI(uri);
                        c->connect();
                            ipGrabber[i] = c;
                  }*/
            break;
        case OF_KEY_F1: selOutput =  OUTPUT_IMAGE;
            break;
        case OF_KEY_F2: selOutput =  ANALYSIS_WINDOW;
            break;
        case OF_KEY_F3: selOutput =  INPUT_IMAGE;
            break;
        case OF_KEY_F4: selOutput =  FOUR_WINDOWS;
            break;
        case OF_KEY_F5: selOutput =  BG_IMAGE;
            break;

        default:
            break;
     }
}
bool DirectShowComponent::loadMovie (const File& videoFile)
{
    return loadMovie (videoFile.getFullPathName());
}