QString VdpauWidget::benchH264720p() { if ( !vc->isProfileSupported( VDPAUContext::ProfileH264High) ) return "Profile unsupported.\n"; QString directoryName(dataDirectory); directoryName.append("h264720p.dat"); H264Decoder *d = new H264Decoder( vc, directoryName ); if ( !d->init() ) { delete d; return "Can't initialize H264 decoder!\n"; } if ( mixerWidth!=d->width || mixerHeight!=d->height ) createMixer( d->width, d->height ); int i; for ( i=0; i<FRAMESINSAMPLE; ++i ) { displayFrame( d->getNextFrame(), d->width, d->height, d->ratio ); usleep( SHOWTIME ); } QTime t; t.start(); for ( i=0; i<FRAMESINLOOP; ++i ) { d->getNextFrame(); } int e = t.elapsed(); benchH264720pResult = QString("H264 DECODING (%1x%2): %3 frames/s\n").arg(d->width).arg(d->height).arg(FRAMESINLOOP*1000/e); delete d; return benchH264720pResult; }
QuickVideoPreview::QuickVideoPreview(QQuickItem *parent) : BaseQuickRenderer(parent) { connect(&m_extractor, SIGNAL(positionChanged()), this, SIGNAL(timestampChanged())); connect(&m_extractor, SIGNAL(frameExtracted(QtAV::VideoFrame)), SLOT(displayFrame(QtAV::VideoFrame))); connect(&m_extractor, SIGNAL(error()), SLOT(displayNoFrame())); connect(this, SIGNAL(fileChanged()), SLOT(displayNoFrame())); }
boolean Adafruit_IS31FL3731::begin(uint8_t addr) { Wire.begin(); _i2caddr = addr; _frame = 0; // shutdown writeRegister8(ISSI_BANK_FUNCTIONREG, ISSI_REG_SHUTDOWN, 0x00); delay(10); // out of shutdown writeRegister8(ISSI_BANK_FUNCTIONREG, ISSI_REG_SHUTDOWN, 0x01); // picture mode writeRegister8(ISSI_BANK_FUNCTIONREG, ISSI_REG_CONFIG, ISSI_REG_CONFIG_PICTUREMODE); displayFrame(_frame); // all LEDs on & 0 PWM clear(); // set each led to 0 PWM for (uint8_t f=0; f<8; f++) { for (uint8_t i=0; i<=0x11; i++) writeRegister8(f, i, 0xff); // each 8 LEDs on } audioSync(false); return true; }
void MainWindow::on_pushButtonSeekMillisecond_clicked() { // Check we've loaded a video successfully if(!checkVideoLoadOk()) return; bool ok; int ms = ui->lineEditMillisecond->text().toInt(&ok); if(!ok || ms < 0) { QMessageBox::critical(this,"Error","Invalid time"); return; } // Seek to the desired frame if(!decoder.seekMs(ms)) { QMessageBox::critical(this,"Error","Seek failed"); return; } // Display the frame displayFrame(); }
QString VdpauWidget::benchMPEG4() { if ( !vc->isProfileSupported( VDPAUContext::ProfileMPEG4ASP) ) return "Profile unsupported.\n"; #ifdef VDP_DECODER_PROFILE_MPEG4_PART2_ASP MPEG4Decoder *d = new MPEG4Decoder( vc, dataDirectory ); if ( !d->init() ) { delete d; return "Can't initialize MPEG4 decoder!\n"; } if ( mixerWidth!=d->width || mixerHeight!=d->height ) createMixer( d->width, d->height ); int i; for ( i=0; i<FRAMESINSAMPLE; ++i ) { displayFrame( d->getNextFrame(), d->width, d->height, d->ratio ); usleep( SHOWTIME ); } QTime t; t.start(); for ( i=0; i<FRAMESINLOOP; ++i ) { d->getNextFrame(); } int e = t.elapsed(); benchMPEG4Result = QString("MPEG4 DECODING (%1x%2): %3 frames/s\n").arg(d->width).arg(d->height).arg(FRAMESINLOOP*1000/e); delete d; #endif // VDP_DECODER_PROFILE_MPEG4_PART2_ASP return benchMPEG4Result; }
// ************************************************************************** // ************************************************************************** // dfm <frame> int P4_dumpFrame(int argc, char* argv[]) { int frame; printf("\nValidate arguments..."); // ?? validate arguments frame = INTEGER(argv[1]); displayFrame(frame%LC3_FRAMES); return 0; } // end P4_dumpFrame
void GLUTDisplay2::display() { cout << "frame" << endl; try { ptr->trace(); displayFrame(); } catch (optix::Exception& e) { sutilReportError(e.getErrorString().c_str()); exit(2); } // Swap buffers glutSwapBuffers(); }
/** Prompts the user for the video to load, and display the first frame **/ void MainWindow::loadVideo(QString fileName) { decoder.openFile(fileName); if(decoder.isOk()==false) { QMessageBox::critical(this,"Error","Error loading the video"); return; } // Get a new frame nextFrame(); // Display a frame displayFrame(); }
QuickVideoPreview::QuickVideoPreview(QQuickItem *parent) : #if CONFIG_FBO_ITEM QuickFBORenderer(parent) #else QQuickItemRenderer(parent) #endif { connect(&m_extractor, SIGNAL(positionChanged()), this, SIGNAL(timestampChanged())); connect(&m_extractor, SIGNAL(frameExtracted(QtAV::VideoFrame)), SLOT(displayFrame(QtAV::VideoFrame))); connect(&m_extractor, SIGNAL(error()), SLOT(displayNoFrame())); connect(this, SIGNAL(fileChanged()), SLOT(displayNoFrame())); } void QuickVideoPreview::setTimestamp(int value) { m_extractor.setPosition((qint64)value); }
cv::Mat FrameManager::convertTemperatureValuesToRGB(sensor_msgs::Image* frame, unsigned int* frameCount){ unsigned char* buffer = NULL; buffer = new unsigned char[frame->width * frame->height * 3]; iBuilder.setSize(frame->width, frame->height, false); const unsigned char* data = &(*frame).data[0]; iBuilder.convertTemperatureToPaletteImage((unsigned short*)data, buffer); // create RGB sensor_msgs::image sensor_msgs::Image rgb_img; rgb_img.header.frame_id = "thermal_image_view"; rgb_img.height = frame->height; rgb_img.width = frame->width; rgb_img.encoding = "rgb8"; rgb_img.step = frame->width*3; rgb_img.data.resize(rgb_img.height*rgb_img.step); *frameCount = *frameCount + 1; rgb_img.header.seq = *frameCount; rgb_img.header.stamp = ros::Time::now(); for(unsigned int i=0; i< frame->width* frame->height*3; i++) { rgb_img.data[i] = buffer[i]; } cv_bridge::CvImageConstPtr cvptrS; try { // convert the sensor_msgs::Image to cv_bridge::CvImageConstPtr (cv::Mat) cvptrS = cv_bridge::toCvShare(rgb_img, cvptrS, sensor_msgs::image_encodings::BGR8); cv::Mat mat = cvptrS->image; // show frame, if configured in the launch file if(showFrame) displayFrame(&mat); } catch (cv_bridge::Exception& e) { ROS_ERROR("cv_bridge exception: %s", e.what()); } return cvptrS->image; }
/** Prompts the user for the video to load, and display the first frame **/ void MainWindow::loadVideo(QString fileName) { decoder.openFile(fileName); if(decoder.isOk()==false) { QMessageBox::critical(this,"Error","Error loading the video"); return; } // Seek to the first frame if (!decoder.seekFrame(0)) { QMessageBox::critical(this, "Error", "Seek the first frame failed"); nextFrame(); // try to get the next frame instead of showing nothing } // Display a frame displayFrame(); }
void Display::run() { Uint32 newTime = SDL_GetTicks(); Uint32 oldTime, deltaTime; int lastFrameCount = 0; Uint32 fpsTime = 0; oldTime = newTime; while(!quit) { // Compute the delta. newTime = SDL_GetTicks(); deltaTime = newTime - oldTime; oldTime = newTime; // FPS display. fpsTime += deltaTime; if(fpsTime >= 1000) { char title[128]; float scale = 1000.0f/fpsTime; sprintf(title, "T3 Terrain Raytracer - %03.2f FPS", (lastDisplayedFrame - lastFrameCount)*scale); SDL_WM_SetCaption(title, NULL); lastFrameCount = lastDisplayedFrame; fpsTime = 0; } // Process the events and display. app->update(deltaTime*0.001f); receiveEvents(); displayFrame(); // Don't eat the CPU. SDL_Delay(5); } // Notify the application. app->quit(); }
QVideo::QVideo(QObject *parent) : QObject(parent), m_frame_rate(0.0f), m_looped(true), m_max_buffer_size(25000000), m_ready_to_play(false), m_advance_mode(QVideo::RealTime), m_play_timer(0), m_total_runtime(0), m_frameTimer(0), m_video_loaded(false), m_status(Paused), m_videoSpeedMultiplier(1) { QMutexLocker locker(&qvideo_mutex); av_register_all(); //m_video_encoder = new QVideoEncoder(this, this); m_video_decoder = new QVideoDecoder(this, this); m_video_decoder->start(); //connect(this, SIGNAL(startDecode()), m_video_decoder, SLOT(decode())); connect(m_video_decoder, SIGNAL(reachedEnd()), this, SLOT(determineVideoEndAction())); connect(m_video_decoder, SIGNAL(ready(bool)), this, SLOT(setReady(bool))); connect(&m_nextImageTimer, SIGNAL(timeout()), this, SLOT(displayFrame())); m_nextImageTimer.setSingleShot(true); setAdvanceMode(QVideo::RealTime); // m_screen = new QLabel(); // just a random default value m_expectedDelay = 999; m_last_frame_shown = 999; m_frame_counter = 0; }
/** Display next frame **/ void MainWindow::on_pushButtonNextFrame_clicked() { nextFrame(); displayFrame(); }
void SampleViewer::display() { int changedIndex; openni::Status rc = openni::OpenNI::waitForAnyStream(m_streams, 2, &changedIndex); if (rc != openni::STATUS_OK) { printf("Wait failed\n"); return; } switch (changedIndex) { case 0: m_depth1.readFrame(&m_depth1Frame); break; case 1: m_depth2.readFrame(&m_depth2Frame); break; default: printf("Error in wait\n"); } glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glMatrixMode(GL_PROJECTION); glPushMatrix(); glLoadIdentity(); glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0); if (m_depth1Frame.isValid() && m_eViewState != DISPLAY_MODE_DEPTH2) calculateHistogram(m_pDepthHist, MAX_DEPTH, m_depth1Frame); else calculateHistogram(m_pDepthHist, MAX_DEPTH, m_depth2Frame); memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel)); // check if we need to draw image frame to texture switch (m_eViewState) { case DISPLAY_MODE_DEPTH1: displayFrame(m_depth1Frame); break; case DISPLAY_MODE_DEPTH2: displayFrame(m_depth2Frame); break; default: displayBothFrames(); } glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap); // Display the OpenGL texture map glColor4f(1,1,1,1); glBegin(GL_QUADS); int nXRes = m_width; int nYRes = m_height; // upper left glTexCoord2f(0, 0); glVertex2f(0, 0); // upper right glTexCoord2f((float)nXRes/(float)m_nTexMapX, 0); glVertex2f(GL_WIN_SIZE_X, 0); // bottom right glTexCoord2f((float)nXRes/(float)m_nTexMapX, (float)nYRes/(float)m_nTexMapY); glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y); // bottom left glTexCoord2f(0, (float)nYRes/(float)m_nTexMapY); glVertex2f(0, GL_WIN_SIZE_Y); glEnd(); // Swap the OpenGL display buffers glutSwapBuffers(); }
/** Display prev frame **/ void MainWindow::on_pushButtonPrevFrame_clicked() { prevFrame(); displayFrame(); }