void widget::draw() { if (hidden() || !dirty()) return; bg_restore(); clip_rect_setter clipper(video().getSurface(), &clip_rect_, clip_); draw_contents(); update_rect(rect_); set_dirty(false); }
void _setGLState() { glDisable(GL_DEPTH_TEST); glDisable(GL_BLEND); static const float vert[] = { -1.0, -1.0, +0.0, +0.0, +1.0, -1.0, +1.0, +0.0, -1.0, +1.0, +0.0, +1.0, +1.0, +1.0, +1.0, +1.0 }; glEnableVertexAttribArray(SC_POSITION); glVertexAttribPointer(SC_POSITION, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (float*)vert); glEnableVertexAttribArray(SC_TEXCOORD0); glVertexAttribPointer(SC_TEXCOORD0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (float*)vert + 2); glDisableVertexAttribArray(SC_COLOR); glDisableVertexAttribArray(SC_TEXCOORD1); glDisableVertexAttribArray(SC_NUMLIGHTS); glViewport(0, 0, video().getWidth(), video().getHeight()); gSP.changed |= CHANGED_VIEWPORT; gDP.changed |= CHANGED_RENDERMODE; }
bool game_launcher::play_screenshot_mode() { if(!cmdline_opts_.screenshot) { return true; } game_config_manager::get()->load_game_config_for_editor(); ::init_textdomains(game_config_manager::get()->game_config()); editor::start(game_config_manager::get()->game_config(), video(), screenshot_map_, true, screenshot_filename_); return false; }
nsresult nsRawReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime) { NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread."); MediaResource *resource = mDecoder->GetResource(); NS_ASSERTION(resource, "Decoder has no media resource"); uint32_t frame = mCurrentFrame; if (aTime >= UINT_MAX) return NS_ERROR_FAILURE; mCurrentFrame = aTime * mFrameRate / USECS_PER_S; CheckedUint32 offset = CheckedUint32(mCurrentFrame) * mFrameSize; offset += sizeof(nsRawVideoHeader); NS_ENSURE_TRUE(offset.isValid(), NS_ERROR_FAILURE); nsresult rv = resource->Seek(nsISeekableStream::NS_SEEK_SET, offset.value()); NS_ENSURE_SUCCESS(rv, rv); mVideoQueue.Erase(); while(mVideoQueue.GetSize() == 0) { bool keyframeSkip = false; if (!DecodeVideoFrame(keyframeSkip, 0)) { mCurrentFrame = frame; return NS_ERROR_FAILURE; } { mozilla::ReentrantMonitorAutoEnter autoMonitor(mDecoder->GetReentrantMonitor()); if (mDecoder->GetDecodeState() == nsBuiltinDecoderStateMachine::DECODER_STATE_SHUTDOWN) { mCurrentFrame = frame; return NS_ERROR_FAILURE; } } nsAutoPtr<VideoData> video(mVideoQueue.PeekFront()); if (video && video->mEndTime < aTime) { mVideoQueue.PopFront(); video = nullptr; } else { video.forget(); } } return NS_OK; }
static CachedTexture * _createTexture() { CachedTexture * pTexture = textureCache().addFrameBufferTexture(); pTexture->format = G_IM_FMT_RGBA; pTexture->clampS = 1; pTexture->clampT = 1; pTexture->frameBufferTexture = CachedTexture::fbOneSample; pTexture->maskS = 0; pTexture->maskT = 0; pTexture->mirrorS = 0; pTexture->mirrorT = 0; pTexture->realWidth = video().getWidth(); pTexture->realHeight = video().getHeight(); pTexture->textureBytes = pTexture->realWidth * pTexture->realHeight * 4; textureCache().addFrameBufferTextureSize(pTexture->textureBytes); glBindTexture(GL_TEXTURE_2D, pTexture->glName); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, pTexture->realWidth, pTexture->realHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glBindTexture(GL_TEXTURE_2D, 0); return pTexture; }
void widget::bg_restore() const { clip_rect_setter clipper(video().getSurface(), &clip_rect_, clip_); if (needs_restore_) { for(std::vector< surface_restorer >::const_iterator i = restorer_.begin(), i_end = restorer_.end(); i != i_end; ++i) i->restore(); needs_restore_ = false; } else { //this function should be able to be relied upon to update the rectangle, //so do that even if we don't restore update_rect(rect_); } }
void widget::draw() { if (hidden() || !dirty()) return; bg_restore(); util::scoped_ptr<clip_rect_setter> clipper(NULL); if (clip_) clipper.assign(new clip_rect_setter(video().getSurface(), clip_rect_)); draw_contents(); update_rect(rect_); set_dirty(false); }
MenuManager Presets::manager(GameState state) { map<GameState, Menu> menus; //PRESETS SettingCategory video(Presets::category("Video")); video=Config::getCategory(video); SettingCategory controls(Presets::category("Controls")); controls=Config::getCategory(controls); menus.insert(pair<GameState, Menu>(GameStates::Menu, Presets::menu(GameStates::Menu))); menus.insert(pair<GameState, Menu>(GameStates::Options, Presets::menu(GameStates::Options))); menus.insert(pair<GameState, Menu>(GameStates::Video, Menu(GameStates::Video, GameStates::Options, video))); menus.insert(pair<GameState, Menu>(GameStates::Controls, Menu(GameStates::Controls, GameStates::Options, controls))); return MenuManager(state, menus); }
std::shared_ptr<sftheora::Video> ResourceManager::getVideo(const std::string &key){ std::map<std::string, std::weak_ptr<sftheora::Video> >::iterator iter; iter = mVideoMap.find(key); if( iter != mVideoMap.end() && !iter->second.expired() ){ return iter->second.lock(); } else{ std::shared_ptr<sftheora::Video> video (new sftheora::Video); if (!video->load(mFilePath + key)){ assert(false); } mVideoMap.insert(std::pair<std::string, std::weak_ptr<sftheora::Video> >(key, video) ); return video; } }
int main() { try { // Vereinbarung einiger Farben RGB_Pixel red(255,0,0); RGB_Pixel green(0,255,0); RGB_Pixel blue(0,0,255); RGB_Pixel yellow(255,255,0); RGB_Pixel transparent; // Vereinbarung eines AVI-Objekts #if 1 const int height=240, width=320, frames=200; // (Erzeugt Video von ca. 45 MB) #else const int height=180, width=120, frames=60; // (Erzeugt Video von ca. 3,8 MB) #endif AviWrite video("P7.avi",width,height); // Erstes Fifo für Grafikelemente vereinbaren und füllen GrafikKiste f1; f1 << TextZeile(10,height/2,"OOP ist sch\x94n!",yellow,transparent); f1 << Rechteck(8,height/2-2,130,height/2+15,yellow); for (int k=0;k<frames;k++) { // Generierung der Frames cout << '.' << flush; // Zweites Fifo für Grafikelemente vereinbaren und füllen GrafikKiste f; int i=int(100.0*sin(3.0*k*2.0*M_PI/frames))+height/2; int j=int(10.0*sin(10.0*k*2.0*M_PI/frames))+width/2; f << Linie(2*i/3,4*i/3,width-i,height-3*i,green); f << RechteckGefuellt(3,i-2, width-3, i/2+15,green, blue); f << TextZeile(+j-30,i,"Testbild",red,transparent); f << Rechteck(0,0,width-1,height-1,green); // Bei erstem Fifo den Bezugspunkt aller Grafikelemente verschieben // und Inhalt des ersten Fifos in das zweite Fifo kopieren f1.add_offset(static_cast<int>(10*sin(10.0*k*2.0*M_PI/frames)), static_cast<int>(10*cos(10.0*k*2.0*M_PI/frames))); f << f1; // Bild I vereinbaren und zweites Fifo in das Bild zeichnen Image I(width,height); f.draw(I); // Bild in AVI-Video schreiben video << I; } } catch(...) { cout << "Ausnahme gefangen" << endl; } }
void create::draw_level_image() { boost::scoped_ptr<surface> image( engine_.current_level().create_image_surface(image_rect_)); if (image.get() != NULL) { SDL_Color back_color = {0,0,0,255}; draw_centered_on_background(*image, image_rect_, back_color, video().getSurface()); } else { surface display(disp_.get_screen_surface()); sdl_fill_rect(display, &image_rect_, SDL_MapRGB(display->format, 0, 0, 0)); update_rect(image_rect_); } }
void PluginAPI::RomClosed() { LOG(LOG_APIFUNC, "RomClosed\n"); #ifdef RSPTHREAD _callAPICommand(acRomClosed); delete m_pRspThread; m_pRspThread = NULL; #else TFH.shutdown(); video().stop(); GBI.destroy(); #endif #ifdef DEBUG CloseDebugDlg(); #endif }
void Minitel::refreshSettings() { // Common parameters serialprint7(_currentMode); textColor(_currentTextColor); bgColor(_currentBgColor); // Only in graphic mode ? blink(_currentBlink); cursor(_currentShowCursor); // Graphic mode specific parameters if (_currentMode == GRAPHIC_MODE) { pixelate(_currentUnderline); } // Text mode specific parameters if (_currentMode == TEXT_MODE) { video(_currentVideo); charSize(_currentSize); } }
void Graphic::Init(int keyboard, const SnakeGame::GameData& data) { int coeff = 20; sf::ContextSettings settings; settings.antialiasingLevel = 8; _keyboard = keyboard; if (getenv("TERM") == NULL) throw SnakeGame::GraphicException("Open window fail!"); sf::VideoMode video(data._map->GetLength() * coeff, data._map->GetWidth() * coeff); _win = new sf::RenderWindow(video, WINNAME, sf::Style::Default, settings); if (!_win) throw SnakeGame::GraphicException("Open window fail!"); _win->clear(); if (!(textures["yoshi_egg"].loadFromFile("./imgs/yoshi_egg.png")) || !(textures["yoshi_h"].loadFromFile("./imgs/yoshi_h.png")) || !(textures["yoshi_b"].loadFromFile("./imgs/yoshi_b.png")) || !(textures["yoshi_d"].loadFromFile("./imgs/yoshi_d.png")) || !(textures["yoshi_g"].loadFromFile("./imgs/yoshi_g.png")) || !(textures["wall"].loadFromFile("./imgs/wall.png")) || !(textures["map"].loadFromFile("./imgs/map.png")) || !(textures["apple"].loadFromFile("./imgs/apple.png"))) { throw new std::exception(); } textures["yoshi_egg"].setSmooth(true); textures["yoshi"].setSmooth(true); textures["wall"].setSmooth(true); textures["map"].setSmooth(true); sprites["yoshi_h"].setTexture(textures["yoshi_h"]); sprites["yoshi_h"].setScale(sf::Vector2f(1.2*coeff/34.0, 1.0*coeff/39.0)); sprites["yoshi_b"].setTexture(textures["yoshi_b"]); sprites["yoshi_b"].setScale(sf::Vector2f(1.2*coeff/30.0, 1.0*coeff/41.0)); sprites["yoshi_d"].setTexture(textures["yoshi_d"]); sprites["yoshi_d"].setScale(sf::Vector2f(1.2*coeff/40.0, 1.0*coeff/41.0)); sprites["yoshi_g"].setTexture(textures["yoshi_g"]); sprites["yoshi_g"].setScale(sf::Vector2f(1.2*coeff/40.0, 1.0*coeff/41.0)); sprites["yoshi_egg"].setTexture(textures["yoshi_egg"]); sprites["yoshi_egg"].setScale(sf::Vector2f(1.*coeff/200.0, 1.0*coeff/240.0)); sprites["wall"].setTexture(textures["wall"]); sprites["wall"].setScale(sf::Vector2f(1.0*coeff/1754.0, 1.0*coeff/1753.0)); sprites["apple"].setTexture(textures["apple"]); sprites["apple"].setScale(sf::Vector2f(1.0*coeff/483.0, 1.0*coeff/480.0)); sprites["map"].setTexture(textures["map"]); sprites["map"].setScale(sf::Vector2f((coeff * data._map->GetLength())/2654.0, (coeff * data._map->GetWidth())/1254.0)); sprites["map"].setColor(sf::Color(255, 255, 255, 40)); }
void create::hide_children(bool hide) { DBG_MP << (hide ? "hiding" : "showing" ) << " children widgets" << std::endl; ui::hide_children(hide); eras_menu_.hide(hide), levels_menu_.hide(hide); mods_menu_.hide(hide); filter_name_.hide(hide); filter_num_players_label_.hide(hide); map_generator_label_.hide(hide); map_size_label_.hide(hide); era_label_.hide(hide); mod_label_.hide(hide); num_players_label_.hide(hide); level_type_label_.hide(hide); level_type_combo_.hide(hide); cancel_game_.hide(hide); launch_game_.hide(hide); load_game_.hide(hide); choose_mods_.hide(hide); regenerate_map_.hide(hide); generator_settings_.hide(hide); filter_num_players_slider_.hide(hide); description_.hide(hide); filter_name_.hide(hide); if (hide) { image_restorer_.assign(NULL); } else { image_restorer_.assign(new surface_restorer(&video(), image_rect_)); engine_.current_level().set_metadata(); draw_level_image(); } }
editor::EXIT_STATUS game_launcher::start_editor(const std::string& filename) { while(true){ game_config_manager::get()->load_game_config_for_editor(); ::init_textdomains(game_config_manager::get()->game_config()); editor::EXIT_STATUS res = editor::start( game_config_manager::get()->game_config(), video(), filename); if(res != editor::EXIT_RELOAD_DATA) return res; game_config_manager::get()->reload_changed_game_config(); image::flush_cache(); } return editor::EXIT_ERROR; // not supposed to happen }
void RSP_ThreadProc(std::mutex * _pRspThreadMtx, std::mutex * _pPluginThreadMtx, std::condition_variable_any * _pRspThreadCv, std::condition_variable_any * _pPluginThreadCv, APICommand ** _pCommand) { _pRspThreadMtx->lock(); RSP_Init(); GBI.init(); Config_LoadConfig(); video().start(); assert(!isGLError()); while (true) { _pPluginThreadMtx->lock(); _pPluginThreadCv->notify_one(); _pPluginThreadMtx->unlock(); _pRspThreadCv->wait(*_pRspThreadMtx); if (*_pCommand != nullptr && !(*_pCommand)->run()) return; assert(!isGLError()); } }
void DepthBuffer::initDepthImageTexture(FrameBuffer * _pBuffer) { #ifdef GL_IMAGE_TEXTURES_SUPPORT if (!video().getRender().isImageTexturesSupported() || config.frameBufferEmulation.N64DepthCompare == 0 || m_pDepthImageTexture != NULL) return; m_pDepthImageTexture = textureCache().addFrameBufferTexture(); m_pDepthImageTexture->width = (uint32_t)(_pBuffer->m_pTexture->width); m_pDepthImageTexture->height = (uint32_t)(_pBuffer->m_pTexture->height); m_pDepthImageTexture->format = 0; m_pDepthImageTexture->size = 2; m_pDepthImageTexture->clampS = 1; m_pDepthImageTexture->clampT = 1; m_pDepthImageTexture->address = _pBuffer->m_startAddress; m_pDepthImageTexture->clampWidth = _pBuffer->m_width; m_pDepthImageTexture->clampHeight = _pBuffer->m_height; m_pDepthImageTexture->frameBufferTexture = CachedTexture::fbOneSample; m_pDepthImageTexture->maskS = 0; m_pDepthImageTexture->maskT = 0; m_pDepthImageTexture->mirrorS = 0; m_pDepthImageTexture->mirrorT = 0; m_pDepthImageTexture->realWidth = m_pDepthImageTexture->width; m_pDepthImageTexture->realHeight = m_pDepthImageTexture->height; m_pDepthImageTexture->textureBytes = m_pDepthImageTexture->realWidth * m_pDepthImageTexture->realHeight * fboFormats.depthImageFormatBytes; textureCache().addFrameBufferTextureSize(m_pDepthImageTexture->textureBytes); glBindTexture(GL_TEXTURE_2D, m_pDepthImageTexture->glName); glTexImage2D(GL_TEXTURE_2D, 0, fboFormats.depthImageInternalFormat, m_pDepthImageTexture->realWidth, m_pDepthImageTexture->realHeight, 0, fboFormats.depthImageFormat, fboFormats.depthImageType, NULL); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glBindTexture(GL_TEXTURE_2D, 0); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, m_FBO); glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_pDepthImageTexture->glName, 0); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _pBuffer->m_FBO); depthBufferList().clearBuffer(0, VI.height); #endif // GL_IMAGE_TEXTURES_SUPPORT }
void menu::draw() { if(hidden()) { return; } if(!dirty()) { for(std::set<int>::const_iterator i = invalid_.begin(); i != invalid_.end(); ++i) { if(*i == -1) { SDL_Rect heading_rect = inner_location(); heading_rect.h = heading_height(); bg_restore(heading_rect); style_->draw_row(*this,0,heading_rect,HEADING_ROW); update_rect(heading_rect); } else if(*i >= 0 && *i < int(item_pos_.size())) { const unsigned int pos = item_pos_[*i]; const SDL_Rect& rect = get_item_rect(*i); bg_restore(rect); style_->draw_row(*this,pos,rect, (!out_ && pos == selected_) ? SELECTED_ROW : NORMAL_ROW); update_rect(rect); } } invalid_.clear(); return; } invalid_.clear(); bg_restore(); util::scoped_ptr<clip_rect_setter> clipper(NULL); if(clip_rect()) clipper.assign(new clip_rect_setter(video().getSurface(), *clip_rect())); draw_contents(); update_rect(location()); set_dirty(false); }
void test_profile_update(){ HTTPClientSession s(HOST, PORT); HTTPRequest request(HTTPRequest::HTTP_PUT, std::string("/api/v1/profile/").append(created_profile_uuid)); JSONNode base(JSON_NODE); base.push_back(JSONNode("name","test profile")); base.push_back(JSONNode("description","test description")); JSONNode format(JSON_NODE); format.set_name("format"); format.push_back(JSONNode("id","matroska")); base.push_back(format); JSONNode video(JSON_NODE); video.set_name("video"); video.push_back(JSONNode("id","mpeg4")); base.push_back(video); JSONNode audio(JSON_NODE); audio.set_name("audio"); audio.push_back(JSONNode("id","mp2")); audio.push_back(JSONNode("ar","44100")); audio.push_back(JSONNode("ac","2")); base.push_back(audio); //request.write(os); std::ostream & os=s.sendRequest(request); os << base.write_formatted(); HTTPResponse response; std::istream& rs = s.receiveResponse(response); std::string data; StreamCopier::copyToString(rs, data); LOGDEBUG("response:"<<data); JSONNode node = getJson(data); assert_response(node); assert(node.contains("uuid")); assert(node["uuid"]==created_profile_uuid); }
void TextDrawer::getTextSize(const char *_pText, float & _w, float & _h) const { _w = _h = 0; if (m_pAtlas == nullptr) return; OGLVideo & ogl = video(); const float sx = 2.0f / ogl.getWidth(); const float sy = 2.0f / ogl.getHeight(); float bw, bh; for (const u8 *p = (const u8 *)_pText; *p; ++p) { bw = m_pAtlas->c[*p].bw * sx; bh = m_pAtlas->c[*p].bh * sy; _w += m_pAtlas->c[*p].ax * sx; _h += m_pAtlas->c[*p].ay * sy; } _w += bw; _h += bh; }
game_display::game_display(game_board& board, std::weak_ptr<wb::manager> wb, reports & reports_object, const config& theme_cfg, const config& level, bool) : display(&board, wb, reports_object, theme_cfg, level, false), overlay_map_(), attack_indicator_src_(), attack_indicator_dst_(), route_(), displayedUnitHex_(), sidebarScaling_(1.0), first_turn_(true), in_game_(false), chat_man_(new display_chat_manager(*this)), mode_(RUNNING), needs_rebuild_(false) { video().clear_screen(); }
void PluginAPI::RomOpen() { LOG(LOG_APIFUNC, "RomOpen\n"); #ifdef RSPTHREAD m_pluginThreadMtx.lock(); m_pRspThread = new std::thread(RSP_ThreadProc, &m_rspThreadMtx, &m_pluginThreadMtx, &m_rspThreadCv, &m_pluginThreadCv, &m_command); m_pRspThread->detach(); m_pluginThreadCv.wait(m_pluginThreadMtx); m_pluginThreadMtx.unlock(); #else RSP_Init(); GBI.init(); Config_LoadConfig(); video().start(); #endif #ifdef DEBUG OpenDebugDlg(); #endif }
void SiftExtractor::extract() { VideoCapture video(videoPath); if(!video.isOpened()) { cout.clear(); cout << "SiftExtractor: Error opening the video file" << endl; exit(-1); } int fNum = 0; int frameIndex = 0; vector<thread> pool; Mat frame; while(true) { bool status = video.read(frame); if(!status || frameIndex >= desiredFrames.size()) { break; } if(fNum == desiredFrames[frameIndex]) { //There is enough running threads already? if(pool.size() >= numThreads) { //There is! Wait then to complete... for(auto &t : pool) { t.join(); } pool.clear(); } Mat t = Mat(); frame.copyTo(t); pool.push_back(thread(&SiftExtractor::extractSift,this,std::ref(this->siftDescriptors[frameIndex]),t)); frameIndex++; } fNum++; } for(auto &t : pool) { t.join(); } pool.clear(); frame.release(); video.release(); }
void PostProcessor::_preDraw(FrameBuffer * _pBuffer) { _setGLState(); OGLVideo & ogl = video(); m_pResultBuffer->m_width = _pBuffer->m_width; m_pResultBuffer->m_height = _pBuffer->m_height; m_pResultBuffer->m_scaleX = ogl.getScaleX(); m_pResultBuffer->m_scaleY = ogl.getScaleY(); #ifdef GLES2 m_pTextureOriginal = _pBuffer->m_pTexture; #else if (_pBuffer->m_pTexture->frameBufferTexture == CachedTexture::fbMultiSample) { _pBuffer->resolveMultisampledTexture(true); m_pTextureOriginal = _pBuffer->m_pResolveTexture; } else m_pTextureOriginal = _pBuffer->m_pTexture; #endif glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); }
void Settings::SetWindowed() { sf::VideoMode video(1280, 720); std::vector<sf::VideoMode> supportedModes; supportedModes = video.getFullscreenModes(); auto it = supportedModes.begin(); while (it!=supportedModes.end()) { if(it->width/16 == it->height/9) { video = *it; //std::cout << "found: " << it->width << "*" << it->height << std::endl; break; } it++; } sf::ContextSettings setting; setting.antialiasingLevel = 8; ms_window->close(); ms_window->create(video, "SFML shapes", sf::Style::Default, setting); }
void VideoViewer(const std::string& input_uri, const std::string& output_uri) { pangolin::Var<bool> video_wait("video.wait", true); pangolin::Var<bool> video_newest("video.newest", false); // Open Video by URI pangolin::VideoInput video(input_uri, output_uri); const size_t num_streams = video.Streams().size(); pangolin::VideoPlaybackInterface* playback = pangolin::FindFirstMatchingVideoInterface<pangolin::VideoPlaybackInterface>(video); // Output details of video stream for(size_t s = 0; s < num_streams; ++s) { const pangolin::StreamInfo& si = video.Streams()[s]; std::cout << "Stream " << s << ": " << si.Width() << " x " << si.Height() << " " << si.PixFormat().format << " (pitch: " << si.Pitch() << " bytes)" << std::endl; } // Image buffers std::vector<pangolin::Image<unsigned char> > images; std::vector<unsigned char> buffer; buffer.resize(video.SizeBytes()+1); // Record all frames video.Record(); // Stream and display video while(true) { if( !video.Grab(&buffer[0], images, video_wait, video_newest) ) { break; } if( playback ) { std::cout << "Frames complete: " << playback->GetCurrentFrameId() << " / " << playback->GetTotalFrames() << '\r'; std::cout.flush(); } } }
void PostProcessor::_preDraw(FrameBuffer * _pBuffer) { _setGLState(); OGLVideo & ogl = video(); #ifdef HAVE_OPENGLES2 glBindFramebuffer(GL_DRAW_FRAMEBUFFER, m_FBO_original); textureCache().activateTexture(0, _pBuffer->m_pTexture); glUseProgram(m_copyProgram); glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); #else glBindFramebuffer(GL_READ_FRAMEBUFFER, _pBuffer->m_FBO); glBindFramebuffer(GL_DRAW_FRAMEBUFFER, m_FBO_original); glBlitFramebuffer( 0, 0, ogl.getWidth(), ogl.getHeight(), 0, 0, ogl.getWidth(), ogl.getHeight(), GL_COLOR_BUFFER_BIT, GL_LINEAR ); #endif glBindFramebuffer(GL_READ_FRAMEBUFFER, 0); }
QStringList extractFrames(QString file, QString path){ // extract the frames // save each frame as picture into m_originalDir and add the filename (e.g. img_1) to the combobox cv::Mat frame; cv::VideoCapture video(file.toStdString()); // open video file if(!video.isOpened()){ // check if we succeeded std::cout << "Video wurde nicht initialsiert!" << std::endl; return QStringList(); } int frameCounter = 0; QString frameName = path + "/img_"; QString frameType = ".bmp"; QString framePath; QStringList frameList; while(video.grab()){ video >> frame; //extract a frame if(frame.empty()){ break; } frameCounter++; framePath = frameName + QString::number(frameCounter) + frameType; // Save the frame into a file if(!imwrite(framePath.toStdString(), frame)){ std::cout << "Fehler beim Speichern des Frames: "<< frameCounter << std::endl; return QStringList(); } frameList.append(framePath); } return frameList; }
void AccurateSeekTask::OnVideoDecoded(MediaData* aVideoSample) { AssertOwnerThread(); MOZ_ASSERT(!mSeekTaskPromise.IsEmpty(), "Seek shouldn't be finished"); RefPtr<MediaData> video(aVideoSample); MOZ_ASSERT(video); // The MDSM::mDecodedVideoEndTime will be updated once the whole SeekTask is // resolved. SAMPLE_LOG("OnVideoDecoded [%lld,%lld] disc=%d", video->mTime, video->GetEndTime(), video->mDiscontinuity); if (mFirstVideoSample) { mFirstVideoSample = false; MOZ_ASSERT(video->mDiscontinuity); } AdjustFastSeekIfNeeded(video); if (mTarget.IsFast()) { // Non-precise seek. We can stop the seek at the first sample. mSeekedVideoData = video; mDoneVideoSeeking = true; } else if (NS_FAILED(DropVideoUpToSeekTarget(video.get()))) { CancelCallbacks(); RejectIfExist(__func__); return; } if (!mDoneVideoSeeking) { RequestVideoData(); return; } MaybeFinishSeek(); }