コード例 #1
0
void PluginVideo::onRequestFinished() {
    if (m_request->status() == ResourcesRequest::Ready) {
        loadVideo(m_request->service(), m_request->result().toMap());
    }
    
    emit statusChanged(status());
}
コード例 #2
0
ファイル: CameraWidget.cpp プロジェクト: losas/OpenMoCap
void CameraWidget::createActions() {

    //--- On menu selection, window will show with preferred size
    _windowSelectAction = new QAction(windowTitle(), this);
    connect(_windowSelectAction, SIGNAL(triggered()), this, SLOT(showNormal()));
    connect(_windowSelectAction, SIGNAL(triggered()), this, SLOT(setFocus()));

    //--- Video Actions
    if(!_videoController->getCamera()->getCameraType())
    {
        _loadVideoAction = new QAction(QIcon("Resources/imageGUI/icons/configure64.png"), "Load video", this);
        connect(_loadVideoAction, SIGNAL(triggered()), this, SLOT(loadVideo()));
    }

    _saveFrameAction = new QAction(QIcon("Resources/imageGUI/icons/configure64.png"), "Save frame as...", this);
    connect(_saveFrameAction, SIGNAL(triggered()), this, SLOT(saveCurrentFrame()));

    _recordVideoAction = new QAction(QIcon("Resources/imageGUI/icons/configure64.png"), "Record video... (Click inside image to stop!)", this);
    connect(_recordVideoAction, SIGNAL(triggered()), this, SLOT(recordVideo()));

    //--- Settings
    _cameraInfoAction = new QAction(QIcon("Resources/imageGUI/icons/configure64.png"),
                                    "Camera...", this);
    connect(_cameraInfoAction, SIGNAL(triggered()), this, SLOT(openCameraInformationDialog()));

    _POISettingsAction = new QAction(QIcon("Resources/imageGUI/icons/configure64.png"), "POI Algorithm...",
                                     this);

    _trackingSettingsAction = new QAction(QIcon("Resources/imageGUI/icons/configure64.png"),
                                          "Tracking Algorithm...", this);

    _clearPOIsAction = new QAction(QIcon("Resources/imageGUI/icons/configure64.png"), "Clear initialized POIs", this);
    connect(_clearPOIsAction, SIGNAL(triggered()), this, SLOT(clearInitializedPOIs()));

}
コード例 #3
0
void TestAnalysisTab::testAddVideo() {
	QTabWidget* tabWidget = mw->findChildren<QTabWidget*>().first();
	tabWidget->setCurrentIndex(1);
	qApp->processEvents();
	QTest::qSleep(500);
	loadVideo(QFINDTESTDATA("blumeYuv420_planar_176x144.yuv"));
	//we skip pressing the "Add video" button and use the UndoStack instead
	GUI::AnalysisBoxContainer* boxCon = mw->findChildren<GUI::AnalysisBoxContainer*>().first();
	QVERIFY2(boxCon != NULL, "Could not find AnalysisBoxContainer");
	UndoRedo::AddVideo* addVideo = new UndoRedo::AddVideo(boxCon,QFINDTESTDATA("blume.mp4"));
	UndoRedo::UndoStack::getUndoStack().push(addVideo);
	QCoreApplication::processEvents();
	QTest::qSleep(2000);
	QCoreApplication::processEvents();
	QVERIFY2(mw->getMemento()->getAnalysisTabMemento()->getAnalysisBoxContainerMemento()->getAnalysisBoxList().size()
	         == 1,"Could not add video");
	//test UndoRedo
	TestMainWindow::triggerAction("Undo");
	QCoreApplication::processEvents();
	QTest::qSleep(2000);
	QVERIFY2(mw->getMemento()->getAnalysisTabMemento()->getAnalysisBoxContainerMemento()->getAnalysisBoxList().size()
	         == 0,"Could not undo the adding of a video");
	TestMainWindow::triggerAction("Redo");
	QCoreApplication::processEvents();
	QTest::qSleep(2000);
	QVERIFY2(mw->getMemento()->getAnalysisTabMemento()->getAnalysisBoxContainerMemento()->getAnalysisBoxList().size()
	         == 1,"Could not redo the adding of a video");
}
コード例 #4
0
PluginVideo::PluginVideo(const QString &service, const QVariantMap &video, QObject *parent) :
    CTVideo(parent),
    m_request(new ResourcesRequest(this))
{
    loadVideo(service, video);
    connect(m_request, SIGNAL(finished()), this, SLOT(onRequestFinished()));
}
コード例 #5
0
ファイル: main.cpp プロジェクト: proog128/dominant_colors
int main(int argc, char* argv[])
{
    if (argc < 6) {
        std::cout << "Usage: " << argv[0] << " in out-image out-csv threshold [sort-mode]\n";
        std::cout << "  sort-mode: sort-by-color, sort-by-count\n";
        return 0;
    }

    cv::Mat Z;
    std::string f = argv[1];
    if (fs::is_directory(f)) {
        Z = loadThumbnails(f);
    } else if (fs::path(f).extension() == ".png" || fs::path(f).extension() == ".jpg") {
        Z = loadImage(f);
    } else {
        Z = loadVideo(f);
    }

    std::cout << "Clustering...\n";
    std::vector<Cluster> centers;
    if (std::string(argv[4]).find(".") == std::string::npos) {
        cluster(Z, (int)atoi(argv[4]), centers);
    } else {
        cluster(Z, (float)atof(argv[4]), centers);
    }

    std::string sort_mode = argc >= 6 ? argv[5] : "sort-by-count";
    if (sort_mode == "sort-by-color") {
        sortCentersByColor(centers);
    } else {
        sortCentersByCount(centers);
    }
    exportCenters(centers, argv[2], argv[3]);
}
コード例 #6
0
void VideoManager::loadVideoAtCurrentStep()
{
	char filename[256] = "DATA/video/";
	if ( script[currentStep].filename[0] == '*' )
	{
		strcat_s(filename, 256, script[currentStep-1].filename); // check for videos named '*'. I think the original game used this to denote "play that video again"
	}
	else
	{
		strcat_s(filename, 256, script[currentStep].filename);
	}
	strcat_s(filename, 256, ".ogg");

	// default case: show a placeholder texture
	unloadVideo();
	clear_to_color(frameData, makecol(255,255,255));
	textprintf_centre(frameData, font, 160, 90, makecol(0,0,0), "%s", script[currentStep].filename);

	loadVideo(filename);
	if ( cmov == NULL )
	{
		al_trace("Movie %s did not open for whatever reason.\r\n", filename);
		return;
	}

	if ( apeg_advance_stream(cmov, true) != APEG_OK)
	{
		al_trace("Video problem! Breakpoint!\r\n"); // doesn't really matter if it fails
	}
	blit(cmov->bitmap, frameData, 0, 0, 0, 0, 320, 192);
}
コード例 #7
0
void MainWindow::on_actionLoad_video_triggered()
{
    // Prompt a video to load
   QString fileName = QFileDialog::getOpenFileName(this, "Load Video",QString(),"Video (*.avi *.asf *.mpg *.mp4 *.wmv)");
   if(!fileName.isNull())
   {
      loadVideo(fileName);
   }
}
コード例 #8
0
void TestAnalysisTab::testCommentBox() {
    //Add yuc and 2 decoded videos
    QTabWidget* tabWidget = mw->findChildren<QTabWidget*>().first();
    tabWidget->setCurrentIndex(1);
    qApp->processEvents();
    QTest::qSleep(500);
    loadVideo(QFINDTESTDATA("blumeYuv420_planar_176x144.yuv"));
    //we skip pressing the "Add video" button and use the UndoStack instead
    GUI::AnalysisBoxContainer* boxCon = mw->findChildren<GUI::AnalysisBoxContainer*>().first();
    QVERIFY2(boxCon != NULL, "Could not find AnalysisBoxContainer");
    UndoRedo::AddVideo* addVideo = new UndoRedo::AddVideo(boxCon,QFINDTESTDATA("blume.mp4"));
    UndoRedo::UndoStack::getUndoStack().push(addVideo);
    QCoreApplication::processEvents();
    QTest::qSleep(2000);
    QCoreApplication::processEvents();
    addVideo = new UndoRedo::AddVideo(boxCon,QFINDTESTDATA("blume.mp4"));
    UndoRedo::UndoStack::getUndoStack().push(addVideo);
    QCoreApplication::processEvents();
    QTest::qSleep(2000);
    QCoreApplication::processEvents();
    QPlainTextEdit* lineEditBox1 = mw->findChildren<GUI::AnalysisBox*>().first()->findChildren<QPlainTextEdit*>().first();
    QPlainTextEdit* lineEditBox2 = mw->findChildren<GUI::AnalysisBox*>().at(1)->findChildren<QPlainTextEdit*>().first();

    tabWidget = mw->findChildren<GUI::AnalysisBox*>().first()->findChildren<QTabWidget*>().first();
    tabWidget->setCurrentIndex(1);
    qApp->processEvents();
    tabWidget = mw->findChildren<GUI::AnalysisBox*>().at(1)->findChildren<QTabWidget*>().first();
    tabWidget->setCurrentIndex(1);
    qApp->processEvents();
    lineEditBox1->selectAll();
    QApplication::postEvent(lineEditBox1,new QKeyEvent(QEvent::KeyPress,Qt::Key_A,Qt::NoModifier,"test 1"));
    TestMainWindow::waitForWindow(300);
    QVERIFY2(lineEditBox1->toPlainText() == "test 1", "Error writing comment");
    TestMainWindow::triggerAction("Undo");
    TestMainWindow::waitForWindow(300);
    QVERIFY2(lineEditBox1->toPlainText() != "test 1", "Error undoing the writing of a comment");
    TestMainWindow::triggerAction("Redo");
    TestMainWindow::waitForWindow(300);
    QVERIFY2(lineEditBox1->toPlainText() == "test 1", "Error redoing the writing of a comment");
    lineEditBox2->selectAll();
    QApplication::postEvent(lineEditBox2,new QKeyEvent(QEvent::KeyPress,Qt::Key_A,Qt::NoModifier,"test 2"));
    TestMainWindow::waitForWindow(300);
    QApplication::postEvent(lineEditBox1,new QKeyEvent(QEvent::KeyPress,Qt::Key_A,Qt::NoModifier,"continue writing"));
    TestMainWindow::waitForWindow(300);
    QApplication::postEvent(lineEditBox2,new QKeyEvent(QEvent::KeyPress,Qt::Key_A,Qt::NoModifier," more"));
    TestMainWindow::waitForWindow(300);
    QVERIFY2(lineEditBox1->toPlainText() == "test 1continue writing", "Error writing comment");
    QVERIFY2(lineEditBox2->toPlainText() == "test 2 more", "Error writing comment");
    TestMainWindow::triggerAction("Undo");
    TestMainWindow::waitForWindow(300);
    QVERIFY2(lineEditBox1->toPlainText() == "test 1continue writing", "Error writing comment");
    QVERIFY2(lineEditBox2->toPlainText() != "test 2 more", "Error undoing writing a comment");
    TestMainWindow::triggerAction("Redo");
    TestMainWindow::waitForWindow(300);
    QVERIFY2(lineEditBox1->toPlainText() == "test 1continue writing", "Error writing comment");
    QVERIFY2(lineEditBox2->toPlainText() == "test 2 more", "Error undoing writing a comment");
}
コード例 #9
0
/**
    \fn decodeStreamHeader
*/
bool asfHeader::decodeStreamHeader(asfChunk *s)
{
uint8_t gid[16];
    // Client GID
        uint32_t audiovideo=0; // video=1, audio=2, 0=unknown
        uint32_t sid;
        s->read(gid,16);
        printf("Type            :");
        for(int z=0;z<16;z++) printf("0x%02x,",gid[z]);
        if(!memcmp(gid,asf_video,16))
        {
          printf("(video)");
          audiovideo=1;
        } else
        {
          if(!memcmp(gid,asf_audio,16))
          {
            printf("(audio)"); 
            audiovideo=2;
          } else printf("(? ? ? ?)"); 
        }
        printf("\nConceal       :");
        for(int z=0;z<16;z++) printf(":%02x",s->read8());
        printf("\n");
        printf("Reserved    : %08"PRIx64"\n",s->read64());
        printf("Total Size  : %04"PRIx32"\n",s->read32());
        printf("Size        : %04"PRIx32"\n",s->read32());
        sid=s->read16();
        printf("Stream nb   : %04d\n",sid);
        printf("Reserved    : %04"PRIx32"\n",s->read32());
        switch(audiovideo)
        {
          case 1: // Video
             {
                if(_videoIndex==-1) // take the 1st video track
                {
                    _videoIndex=sid;
                    _videoStreamId= sid;
                    if(!loadVideo(s))
                    {
                      return 0; 
                    }
                    ADM_info("Average fps available from ext header\n");
                }
              } 
              break;
          case 2: // audio
            loadAudio(s,sid);
            break;
          default: 
            break;
        }
        return true;
}
コード例 #10
0
void InstagramView::initProfileImage(string fileName){
	_profileImage.load(fileName);
	
	if(_args.type == "image"){
		loadImage();
	}

	if(_args.type == "video"){
		loadVideo();
	}
}
コード例 #11
0
ファイル: CasparDevice.cpp プロジェクト: IainCole/Client
void CasparDevice::playVideo(int channel, int videolayer, const QString& name, const QString &transition, int duration,
                             const QString& easing, const QString& direction, int seek, int length, bool loop, bool useAuto)
{
    if (useAuto)
        loadVideo(channel, videolayer, name, transition, duration, easing, direction, seek, length, loop, false, useAuto);
    else
        writeMessage(QString("PLAY %1-%2 \"%3\" %4 %5 %6 %7 %8 %9 %10")
                     .arg(channel).arg(videolayer).arg(name).arg(transition).arg(duration).arg(easing)
                     .arg(direction)
                     .arg((seek > 0) ? QString("SEEK %1").arg(seek) : "")
                     .arg((length > 0) ? QString("LENGTH %1").arg(length) : "")
                     .arg((loop == true) ? "LOOP" : ""));
}
コード例 #12
0
void TestAnalysisTab::testGraphs() {
	//Add yuc and decoded video
	QTabWidget* tabWidget = mw->findChildren<QTabWidget*>().first();
	tabWidget->setCurrentIndex(1);
	qApp->processEvents();
	QTest::qSleep(500);
	loadVideo(QFINDTESTDATA("blumeYuv420_planar_176x144.yuv"));
	//we skip pressing the "Add video" button and use the UndoStack instead
	GUI::AnalysisBoxContainer* boxCon = mw->findChildren<GUI::AnalysisBoxContainer*>().first();
	QVERIFY2(boxCon != NULL, "Could not find AnalysisBoxContainer");
	UndoRedo::AddVideo* addVideo = new UndoRedo::AddVideo(boxCon,QFINDTESTDATA("blume.mp4"));
	UndoRedo::UndoStack::getUndoStack().push(addVideo);
	QCoreApplication::processEvents();
	QTest::qSleep(2000);
	QCoreApplication::processEvents();

	TestMainWindow::clickButton("Red histogram");
	QTest::qSleep(1000);
	//raw vid
	QVERIFY2(qGray(mw->grab().toImage().pixel(600,110))==255 , "Failed clicking Red histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(360,250))!=255 , "Failed clicking Red histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(525,260))==255 , "Failed clicking Red histogramm");
	//encoded vid
	QVERIFY2(qGray(mw->grab().toImage().pixel(570,500))==255 , "Failed clicking Red histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(330,600))!=255 , "Failed clicking Red histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(470,600))==255 , "Failed clicking Red histogramm");

	TestMainWindow::clickButton("Green histogram");
	QTest::qSleep(1000);
	//raw vid
	QVERIFY2(qGray(mw->grab().toImage().pixel(600,110))==255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(360,250))!=255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(525,260))!=255 , "Failed clicking Green histogramm");
	//encoded vid
	QVERIFY2(qGray(mw->grab().toImage().pixel(570,500))==255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(330,600))!=255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(470,630))!=255 , "Failed clicking Green histogramm");

	TestMainWindow::clickButton("Blue histogram");
	QTest::qSleep(1000);
	//raw vid
	QVERIFY2(qGray(mw->grab().toImage().pixel(600,110))==255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(340,250))!=255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(525,260))==255 , "Failed clicking Green histogramm");
	//encoded vid
	QVERIFY2(qGray(mw->grab().toImage().pixel(550,500))==255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(345,600))!=255 , "Failed clicking Green histogramm");
	QVERIFY2(qGray(mw->grab().toImage().pixel(470,600))==255 , "Failed clicking Green histogramm");
}
コード例 #13
0
bool ImageManager::AsyncLoader::load( ImageRequest* request )
{
    // silently ignore images not (currently) on disk:
    if ( ! request->fileSystemFileName().exists() )
        return false;

    if ( Utilities::isVideo( request->fileSystemFileName() ) ) {
        if ( MainWindow::FeatureDialog::mplayerBinary().isNull() )
            return false;
        loadVideo( request );
    } else {
        loadImage( request );
    }
    return true;
}
コード例 #14
0
ファイル: MediaServer.cpp プロジェクト: ARKopp/ofxPiMapper
 BaseSource* MediaServer::loadMedia(string &path, int mediaType) {
   // Chose load method depending on type
   if (mediaType == SourceType::SOURCE_TYPE_IMAGE) {
     return loadImage(path);
   } else if (mediaType == SourceType::SOURCE_TYPE_VIDEO) {
     return loadVideo(path);
   } else if (mediaType == SourceType::SOURCE_TYPE_FBO) {
     return loadFboSource(path);
   } else {
     std::stringstream ss;
     ss << "Can not load media of unknown type: " << mediaType;
     ofLogFatalError("MediaServer") << ss.str();
     std::exit(EXIT_FAILURE);
   }
   return NULL;
 }
コード例 #15
0
void VideoPlayerApp::loadRandomVideo()
{
    string videoPath = "";
    ofDirectory currentVideoDirectory(ofToDataPath("videos", true));
    if (currentVideoDirectory.exists())
    {
        currentVideoDirectory.listDir();
        vector<ofFile> files = currentVideoDirectory.getFiles();
        if(files.size()>0)
        {
            if(files.size()==1)
            {
                videoPath = files[0].path();
            }else
            {
                videoPath = files[ofRandom(files.size())].path();
            }
        }
    }
    loadVideo(videoPath);
    
}
コード例 #16
0
ファイル: playlist.cpp プロジェクト: levil/QuantumPlayer
bool Playlist::addVideo(const QString &videoPath)
{
    int row = playlistVideos.size();

    if (!firstIndex.isValid())
        firstIndex = index(0, 0);

    beginInsertRows(QModelIndex(), row, row);
    playlistVideos.append(new Video(videoPath));
    endInsertRows();

    lastIndex = index(rowCount() - 1, 0);

    if (playRow == -1) {
        playRow = 0;
        emit loadVideo(videoPathAt(0));
    }

    emit nextVideoStatusChange(hasNext());
    emit previousVideoStatusChange(hasPrevious());
    return true;
}
コード例 #17
0
void PanoramicVideo::loadVideo(const std::string& path){
	if(mVideoSprite){
		mVideoSprite->release();
		mVideoSprite = nullptr;
	}

	// this does some dumb shit
	// any added children are listened to and linked to the video sprite
	// this is for the net sync stuff
	// there should be a better way to do this
	auto video_sprite = addChildPtr(new ds::ui::Video(mEngine));

	//Need to enable this to enable panning 
	video_sprite->generateAudioBuffer(true);
	video_sprite->setPan(mPanning);
	video_sprite->setPlayableInstances(mPlayableInstances);
	video_sprite->setAutoStart(true);
	video_sprite->setLooping(true);
	video_sprite->setAutoSynchronize(mAutoSync);
	video_sprite->loadVideo(path);
	video_sprite->setFinalRenderToTexture(true);

	resetCamera();
}
コード例 #18
0
ファイル: interplay.cpp プロジェクト: CadeLaRen/Interplay-2.0
//--------------------------------------------------------------
void Interplay::setup(){
	mask.setup(320, 240);
	// LOAD MULTIPLE VIDEOS TO PLAY AT ONCE
	videos.push_back(loadVideo("movies/test1.mov"));
	videos.push_back(loadVideo("movies/test2.mov"));
	videos.push_back(loadVideo("movies/test3.mov"));
	videos.push_back(loadVideo("movies/test4.mov"));
	videos.push_back(loadVideo("movies/test5.mov"));
	videos.push_back(loadVideo("movies/test6.mov"));
	videos.push_back(loadVideo("movies/test7.mov"));
	videos.push_back(loadVideo("movies/test8.mov"));
	videos.push_back(loadVideo("movies/test9.mov"));
	videos.push_back(loadVideo("movies/test10.mov"));
	videos.push_back(loadVideo("movies/test11.mov"));
	videos.push_back(loadVideo("movies/test12.mov"));
	videos.push_back(loadVideo("movies/atrium3peopleTest.mov"));

//	result.allocate(moviePlayer.getWidth(), moviePlayer.getHeight(), GL_RGBA);
	mask.setMaskParams(100, 200, 100, 200);
	// CREATE TEXTURES TO STORE RGBA
	for (int i=0; i<13; i++){
		videos.at(i)->play();
		w = videos.at(i)->getWidth();
		h = videos.at(i)->getHeight();



//		ofTexture * result = new ofTexture;
//		result->allocate(w, h, GL_RGBA);
//		results.push_back(result);
	}
}
コード例 #19
0
void Assets::loadAssets(){
    loadImage(&collisionMap, "images/map.png");
    loadImage(&progressMap, "images/progess-map.png");
    loadImage(&backgroundImg, "images/background-all.png");
    loadImage(&mask, "images/mask.png");
    
    loadImage(&bike1, "images/bikes/bike1.png");
    loadImage(&bike2, "images/bikes/bike2.png");
    loadImage(&bike3, "images/bikes/bike3.png");
    loadImage(&bike4, "images/bikes/bike4.png");
    loadImage(&bikeGlow, "images/bikes/bikeGlow.png");
    
    loadImage(&helmet1, "images/helmets/helmet1.png");
    loadImage(&helmet2, "images/helmets/helmet2.png");
    loadImage(&helmet3, "images/helmets/helmet3.png");
    loadImage(&helmet4, "images/helmets/helmet4.png");
    
    loadImage(&powerup1, "images/powerups/powerup1.png");
    loadImage(&powerup2, "images/powerups/powerup2.png");
    loadImage(&powerup3, "images/powerups/powerup3.png");
    loadImage(&powerup4, "images/powerups/powerup4.png");
    loadImage(&powerupGlow1, "images/powerups/powerup1-glow.png");
    loadImage(&powerupGlow2, "images/powerups/powerup2-glow.png");
    loadImage(&powerupGlow3, "images/powerups/powerup3-glow.png");
    loadImage(&powerupGlow4, "images/powerups/powerup4-glow.png");
    
    loadImage(&panelPower0, "images/panel-power/panelPower0.png");
    loadImage(&panelPower1, "images/panel-power/panelPower1.png");
    loadImage(&powerBar, "images/panel-power/powerBar.png");
    
    loadImage(&panelRankingBack, "images/ranking/rankingBack.png");
    
    loadImage(&panelWinnersBack, "images/winners/winnersBack.png");
    loadImage(&panelWinnersFront, "images/winners/winnersFront.png");
    
    loadImage(&cupBack, "images/cup/cupBack.png");
    loadImage(&cupFront, "images/cup/cupFront.png");
    
    loadImage(&panelLapsBack, "images/laps/lapsBack.png");
    loadImage(&panelLapsFront, "images/laps/lapsFront.png");
    
    for(int i = 0; i < 4; i++){
        motorSound[i].load("motorcycle.wav");
        motorSound[i].setLoop(true);
        motorSound[i].setPosition(ofRandom(1));
        //motorSound[i].setVolume(0.8);
    }
    
    cheer.load("cheer_short.wav");
    cheer.setVolume(0.4);
    cheer.setMultiPlay(true);
    
    countdownSound.load("countdown.mp3");
    countdownSound.setVolume(0.1);
    fanfare.load("fanfare.wav");
    fanfare.setVolume(0.4);
    go.load("go.wav");
    go.setVolume(0.6);
    
    
    presura25.load("GT-Pressura-Mono.otf", 25, true, true);
    
    countdown.load("countdown-no-alpha.mp4");
    
    loadVideo(&countdown, "countdown-no-alpha.mp4");
    loadVideo(&standby, "standby.mov");
}
コード例 #20
0
ファイル: SamplerFileIO.cpp プロジェクト: mazbox/CaptureAV
bool Sampler::load(string path) {
	CaptureAV::busy = true;
	bool ret = loadGig(ofToDataPath(path+".gig", true)) && loadVideo(path);
	CaptureAV::busy = false;
	return ret;
}
コード例 #21
0
ファイル: ofApp.cpp プロジェクト: harryhow/Farm-Window-Alpha
//--------------------------------------------------------------
void ofApp::draw(){
    
    int frameDiff;
    
    while (oscReceiver.hasWaitingMessages()) {
        cout << "New msg is coming + ";
        ofxOscMessage m;
        oscReceiver.getNextMessage(&m);
        
        if (m.getAddress() == "/sync/play/FW_SH_02_HBD_A/frameJump") {
            frameJump = m.getArgAsInt32(0);
            cout << "Frame sync, jump(" << frameJump << ") frames\r\n";
            
            // do move forward
            videoPlayers[i]->setFrame(frameJump);
        }
        else if (m.getAddress() == "/sync/play/nowPlayingFile") {
            // can I get all the file list from the beginning?
            loadVideo(m.getArgAsString(0));
        }
        else if (m.getAddress() == "/sync/play/nowPlayingStart") {
            isCellStart = m.getArgAsInt32(0);
        }
        else if (m.getAddress() == "/sync/play/nowPlayingStop") {
            isCellStop = m.getArgAsInt32(0);
        }
        else if (m.getAddress() == "/sync/play/type") {
            mediaType = m.getArgAsString(0);
        }
        else if (m.getAddress() == "/sync/play/nowPlayingKickTime") {
            cellKickTime = m.getArgAsInt32(0);
        }
        //dumpOSC(m);
    }
    
    strFruitString =  strFruitPrefix + ofToString(currentAppleAmount) + strUnit;
    
    if (!videoPause) {
//        if (((videoPlayers[i]->getPosition() * videoPlayers[i]->getDuration()) - videoPlayers[i]->getDuration()) == 0){
//            
//            if (i < N_VIDEO_PLAYERS-1) {
//                i++;
//            }
//            else {
//                i = 0;
//                loopCounter++;
//            }
//        }
        
        //for(auto p : videoPlayers) {
        //        p->draw(ofMap(i++, 0, videoPlayers.size(), 0, ofGetWidth()), ofGetHeight()/2 - 108*2, 192*4, 108*4);
        
        //ofPushMatrix();
        //ofSetColor(ofRandom(255), 0, 0);
//        ofRect(0,0,ofGetWidth(),ofGetHeight());
//        ofEnableAlphaBlending();
//        ofSetColor(255,255,255);
//        
#if 0   // for 1080 x 3840 video
        //---------------------------------------------------------- draw video texture to fullscreen.
        ofRectangle screenRect(0, 0, ofGetWidth(), ofGetHeight());
        ofRectangle videoRect(0, 0, videoPlayers[i]->getWidth(), videoPlayers[i]->getHeight());
        ofRectangle videoFullscreenRect = videoRect;
        videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING);
        
        videoPlayers[i]->draw(0, 0, videoFullscreenRect.getWidth(), videoFullscreenRect.getHeight());
        
#endif
        
#if 1
        // for 2 1080 x 1920 videos
        //---------------------------------------------------------- draw video texture to fullscreen.
        
        for (int videoNum = 0; videoNum < videoPlayers.size(); videoNum++) {
            if (videoNum == 1) { //top
                ofRectangle screenRect(0, 0, ofGetWidth(), ofGetHeight()/2);
                ofRectangle videoRect(0, 0, videoPlayers[videoNum]->getWidth(), videoPlayers[videoNum]->getHeight());
                ofRectangle videoFullscreenRect = videoRect;
                videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING);
                videoPlayers[videoNum]->draw(0, 0, videoFullscreenRect.getWidth(), videoFullscreenRect.getHeight());
            }
            else if (videoNum == 0) { //bottom
                ofRectangle screenRect(0, 0, ofGetWidth(), ofGetHeight()/2);
                ofRectangle videoRect(0, 0, videoPlayers[videoNum]->getWidth(), videoPlayers[videoNum]->getHeight());
                ofRectangle videoFullscreenRect = videoRect;
                videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING);
                videoPlayers[videoNum]->draw(0, ofGetHeight()/2, videoFullscreenRect.getWidth(), videoFullscreenRect.getHeight());
            }
        }

        
        if (i < N_VIDEO_PLAYERS-1) {
            i++;
        }
        else {
            i = 0;
            //loopCounter++;
        }

        
#endif
        
        
        //videoPlayers[i]->draw(0, 0, 900, 1400);
        
        
        //p->draw(ofMap(i++, 0, videoPlayers.size(), 0, ofGetWidth()), ofGetHeight()/2 - 108*2, 192*4, 108*4);
        
        // 2304x4096 sumsung tv @1
        // 2304x8192 ofScreen
        // 2026x3840 video
        
        //        cout << "w:" << w << "\n";
        //        p->draw(0, 0);
        //ofPopMatrix();
        //}
    }
    
    if (imageDisplay) {
        
        ofRectangle screenRect(0, 0, ofGetWidth()/2, ofGetHeight()/2);
        ofRectangle videoRect(0, 0, imgTop->width, imgTop->height);
        ofRectangle videoFullscreenRect = videoRect;
        videoFullscreenRect.scaleTo(screenRect, OF_ASPECT_RATIO_KEEP_BY_EXPANDING);
        
        if (imgTopPosters.size() == 1) {
            ofSetColor(255, 255, 255);  // very important, don't delete set color
            imgTopPosters[0]->draw(0, 0, imgTopPosters[0]->width, imgTopPosters[0]->height);
        }

        
        if (dbgImg) {
            ofSetColor(0, 0, 255);
            imgTop->drawBounds(0, 0, imgTop->width, imgTop->height);
            imgBottom->drawBounds(0, 4096, imgBottom->width, imgBottom->height);
        }
        else {
            
            ofPushMatrix();
                ofEnableAlphaBlending();
                    ofSetColor(255, 255, 255);  // very important, don't delete set color
            
                    // ensure first image is showing
                    //imgTopPosters[0]->draw(0, 0, imgTopPosters[0]->width, imgTopPosters[0]->height);
            
                    if ((ofGetElapsedTimef() - initTime) > 2) {
                        //videoPause = !videoPause;
                        //imageDisplay = !imageDisplay;
                        initTime = ofGetElapsedTimef();
                        
                        if (imgTopx && !isUpdateImg) {
                            //ofColor(255, 0, 0, 100);
                            int size = imgTopPosters.size(); // size limitation is 2 by design
                            if (size > 0) {
                                imgTopPosters[imgRotateIndex]->draw(0, 0, imgTopPosters[imgRotateIndex]->width, imgTopPosters[imgRotateIndex]->height);
                                imgRotateIndex++;
                                if (imgRotateIndex >= size)
                                    imgRotateIndex = 0;
                                
                                //ofSleepMillis(3000);// well...
                            }
                            
                            //                        for (auto i = imgTopPosters.begin(); i!= imgTopPosters.end();  ++i) {
                            //                            //(*i)->draw(0, 0, imgTopx->width, imgTopx->height);
                            //                            (*i)->draw(0, 0, (*i)->width, (*i)->height);
                            //                        }
                            //imgTopx->draw(0, 0, imgTopx->width, imgTopx->height);
                        }
                        else{
                            imgTopPosters[0]->draw(0, 0, imgTopPosters[0]->width, imgTopPosters[0]->height);
                            //imgTop->draw(0, 0, imgTop->width, imgTop->height);
                        }
                    }
            
                    ofNoFill();
                    ofSetColor(100, 0, 100, 100);
                    ofCircle(imgTop->width/2, imgTop->height/3+100, 400);
                ofDisableAlphaBlending();
                
                //ofRectangle bbox;
                float fontSize = 300;
                //ofTranslate(100, 2* ofGetHeight()/3);
                //ofRotateY(50 * ofGetElapsedTimef());
                ofSetColor(100, 0, 100, 128);
                FZLfont.draw(strPoster, fontSize, imgTop->width/2-100+0.1*x, imgTop->height/3+100+0.1*y);
                //bbox = unicodeFont.getStringBoundingBox(strFruitPrefix, 100, 2* ofGetHeight()/3);
                //ofSetColor(0, 200, 0);
                //ofFill();
                //ofRotateZ(-5);
                ofEnableAlphaBlending();
            ofPopMatrix();
            
            // Bottom poster
            //ofColor(255, 255, 255);
            //ofTranslate(0, 4096); // samgung tv
            ofSetColor(255, 255, 255); // very important, don't delete set color
            ofTranslate(0, 7680/2); // pptv
            imgBottom->draw(0, 0, imgBottom->width, imgBottom->height);
        }
    }
    
    ofDrawBitmapStringHighlight("FPS: " + ofToString(fps), 20, 360);
    ofDrawBitmapStringHighlight("Frame " + ofToString(videoPlayers[i]->getCurrentFrame()) + "/" + ofToString(videoPlayers[i]->getTotalNumFrames()), 20, 380);
    ofDrawBitmapStringHighlight("Duration " + ofToString(videoPlayers[i]->getPosition() * videoPlayers[i]->getDuration(), 2) + "/" + ofToString(videoPlayers[0]->getDuration(), 2), 20, 400);
    ofDrawBitmapStringHighlight("Speed " + ofToString(videoPlayers[i]->getSpeed(), 2), 20, 420);
    ofDrawBitmapStringHighlight("Canvas W:" + ofToString(ofGetWidth()) + " H:" + ofToString(ofGetHeight()), 20, 440);

    ofDrawBitmapString("Total Loop #" + ofToString(loopCounter) + " \nClip #" + ofToString(i), 20, 460);
    
    // send out frame number information
    msgSend.setAddress("/sync/play/FW_SH_02_HBD_A/currentFrame");
    msgSend.addIntArg(videoPlayers[i]->getCurrentFrame());
    oscSender.sendMessage(msgSend);
    
    
    
#if 0
#if 0
    ofPushMatrix();
    ofRectangle bbox;
    ofSetColor(255, 0, 0, 32);
    float fontSize = 20 /*134*/;
    //TIME_SAMPLE_START("bbox");
    //ofTranslate(100, 2* ofGetHeight()/3);
    bbox = unicodeFont.getBBox(strFruitString, fontSize, 500, 500);
    //TIME_SAMPLE_STOP("bbox");
    ofRect(bbox);
    ofPopMatrix();
#endif

    ofPushMatrix();
        ofRectangle bbox;
        float fontSize = 134;
        //ofTranslate(100, 2* ofGetHeight()/3);
        //ofRotateY(50 * ofGetElapsedTimef());
        ofSetColor(0xd3, 0xd3, 0xd3, 200);
        unicodeFont.draw(strFruitString, fontSize, 100, 2* ofGetHeight()/3);
        bbox = unicodeFont.getStringBoundingBox(strFruitPrefix, 100, 2* ofGetHeight()/3);
        ofSetColor(0, 200, 0);
        //ofFill();
        ofEnableAlphaBlending();
        ofRect(bbox);
        //ofRotateZ(-5);
    ofPopMatrix();
    
    // text background
    ofSetColor(153, 153, 153, 100);
    ofRect(100, 2* ofGetHeight()/3 - 130, ofGetWidth()-100, 160);
    
    if (isDemoMode) {
        if ((ofGetElapsedTimef() - initTimeDbg) > 3.0) {
            videoPause = !videoPause;
            imageDisplay = !imageDisplay;
            initTimeDbg = ofGetElapsedTimef();
        }
    }
    
    if (isDownloadImg){
        isDownloadImg = !isDownloadImg;
       
        cout << "Downloading..." << "\n";
        ofSetColor(100, 0, 100, 128);
        
        if (isUpdateImg) {
            string str = "Poster Updating...";
            FZLfont.draw(str, 200, ofGetWidth()/2, ofGetHeight()/4*2);
        }
        
        // read file name from iOS client
        char serverPath[512] = "http://192.168.43.155:8080/";
        if (strUpdateFileDate.length() != 0) {
            sprintf(updateURL, "%s%s.jpg", serverPath, strUpdateFileDate.c_str());
            cout << "Update URL: " << updateURL << "\n";
            ofSaveURLAsync(updateURL, "images/L1.jpg");
        }
        else{
            cout << "ERROR: No update date" << "\n";
        }
    }
    
    if (isUpdateImg) {
        // reload image here
        if (imgTopx)
            delete imgTopx;
        
        imgTopx = new ofxGiantImage();
        imgTopx->loadImage("images/L1.jpg");
        imgTopPosters.push_back(imgTopx); // push into image queues
        isUpdateImg = !isUpdateImg;
    }
#endif
    
}
コード例 #22
0
/*******************************************
  Read Headers to collect information 
********************************************/
uint8_t asfHeader::getHeaders(void)
{
  uint32_t i=0,nbSubChunk,hi,lo;
  const chunky *id;
  uint8_t gid[16];
  uint32_t mn=0,mx=0;
  asfChunk chunk(_fd);
  // The first header is header chunk
  chunk.nextChunk();
  id=chunk.chunkId();
  if(id->id!=ADM_CHUNK_HEADER_CHUNK)
  {
    printf("[ASF] expected header chunk\n"); 
    return 0;
  }
  printf("[ASF] getting headers\n");
  chunk.dump();
  nbSubChunk=chunk.read32();
  printf("NB subchunk :%u\n",nbSubChunk);
  chunk.read8();
  chunk.read8();
  for(i=0;i<nbSubChunk;i++)
  {
    asfChunk *s=new asfChunk(_fd);
    uint32_t skip;
    s->nextChunk();
    printf("***************\n");  
    id=s->chunkId();
    s->dump();
    switch(id->id)
    {
#if 0      
      case ADM_CHUNK_HEADER_EXTENSION_CHUNK:
      {
        s->skip(16); // Clock type extension ????
        printf("?? %d\n",s->read16());
        printf("?? %d\n",s->read32());
          
        uint32_t streamNameCount;
        uint32_t payloadCount;
          
          asfChunk *u=new asfChunk(_fd);
          for(int zzz=0;zzz<8;zzz++)
          {
              u->nextChunk();
              u->dump();
              id=u->chunkId();
              if(id->id==ADM_CHUNK_EXTENDED_STREAM_PROP)
              {
                  s->skip(8); // start time 
                  s->skip(8); // end time
                  printf("Bitrate         %u :\n",u->read32());
                  printf("Buffer Size     %u :\n",u->read32());
                  printf("BFill           %u :\n",u->read32());
                  printf("Alt Bitrate     %u :\n",u->read32());
                  printf("Alt Bsize       %u :\n",u->read32());
                  printf("Alt Bfullness   %u :\n",u->read32());
                  printf("Max object Size %u :\n",u->read32());
                  printf("Flags           0x%x :\n",u->read32());
                  printf("Stream no       %u :\n",u->read16());
                  printf("Stream lang     %u :\n",u->read16());
                  printf("Stream time/fra %lu :\n",u->read64());
                  streamNameCount=u->read16();
                  payloadCount=u->read16();
                  printf("Stream Nm Count %u :\n",streamNameCount);
                  printf("Payload count   %u :\n",payloadCount);
                  for(int stream=0;stream<streamNameCount;stream++)
                  {
                    u->read16();
                    skip=u->read16();
                    u->skip(skip);
                  }
                  uint32_t size;
                  for(int payload=0;payload<payloadCount;payload++)
                  {
                    for(int pp=0;pp<16;pp++) printf("0x%02x,",u->read8());
                    printf("\n");
                    skip=u->read16();
                    size=u->read32();
                    u->skip(size);
                    printf("Extra Data : %d, skipd %d\n",size,skip);
                  }
                  printf("We are at %x\n",ftello(_fd));
                }
                u->skipChunk();
          }
          delete u;
      }
      break;
#endif      
      case ADM_CHUNK_FILE_HEADER_CHUNK:
        {
            // Client GID
            printf("Client        :");
            for(int z=0;z<16;z++) printf(":%02x",s->read8());
            printf("\n");
            printf("File size     : %08lx\n",s->read64());
            printf("Creation time : %08lx\n",s->read64());
            printf("Number of pack: %08lx\n",s->read64());
            printf("Timestamp 1   : %08lx\n",s->read64());
            _duration=s->read64();
            printf("Timestamp 2   : %08lx\n",_duration);
            printf("Timestamp 3   : %04x\n",s->read32());
            printf("Preload       : %04x\n",s->read32());
            printf("Flags         : %04x\n",s->read32());
            mx=s->read32();
            mn=s->read32();
            if(mx!=mn)
            {
              printf("Variable packet size!!\n");
              delete s;
              return 0; 
            }
            _packetSize=mx;
            printf("Min size      : %04x\n",mx);
            printf("Max size      : %04x\n",mn);
            printf("Uncompres.size: %04x\n",s->read32());
          }
          break;
      case ADM_CHUNK_STREAM_HEADER_CHUNK:
      {
         // Client GID
        uint32_t audiovideo=0; // video=1, audio=2, 0=unknown
        uint32_t sid;
        s->read(gid,16);
        printf("Type            :");
        for(int z=0;z<16;z++) printf("0x%02x,",gid[z]);
        if(!memcmp(gid,asf_video,16))
        {
          printf("(video)");
          audiovideo=1;
        } else
        {
          if(!memcmp(gid,asf_audio,16))
          {
            printf("(audio)"); 
            audiovideo=2;
          } else printf("(? ? ? ?)"); 
        }
        printf("\nConceal       :");
        for(int z=0;z<16;z++) printf(":%02x",s->read8());
        printf("\n");
        printf("Reserved    : %08x\n",s->read64());
        printf("Total Size  : %04x\n",s->read32());
        printf("Size        : %04x\n",s->read32());
        sid=s->read16();
        printf("Stream nb   : %04x\n",sid);
        printf("Reserved    : %04x\n",s->read32());
        switch(audiovideo)
        {
          case 1: // Video
          {
                    _videoStreamId=sid;
                    if(!loadVideo(s))
                    {
                      delete s;
                      return 0; 
                    }
                    break;
          }
              break;
          case 2: // audio
          {
            asfAudioTrak *trk=&(_allAudioTracks[_nbAudioTrack]);
            ADM_assert(_nbAudioTrack<ASF_MAX_AUDIO_TRACK);
            trk->streamIndex=sid;
            s->read((uint8_t *)&(trk->wavHeader),sizeof(WAVHeader));
            trk->extraDataLen=s->read16();
            printf("Extension :%u bytes\n",trk->extraDataLen);
            if(trk->extraDataLen)
            {
              trk->extraData=new uint8_t[trk->extraDataLen];
              s->read(trk->extraData,trk->extraDataLen);
            }
              printf("#block in group   :%d\n",s->read8());
              printf("#byte in group    :%d\n",s->read16());
              printf("Align1            :%d\n",s->read16());
              printf("Align2            :%d\n",s->read16());
              _nbAudioTrack++;
            
          }
          break;
          default:break; 
          
        }
      }
      break;
       default:
         break;
    }
    s->skipChunk();
    delete s;
  }
  printf("End of headers\n");
  return 1;
}