void VideoPlayer::writeVideoInfo(const Common::String &file, int16 varX, int16 varY, int16 varFrames, int16 varWidth, int16 varHeight) { Properties properties; int slot = openVideo(false, file, properties); if (slot >= 0) { Video &video = _videoSlots[slot]; int16 x = -1, y = -1, width = -1, height = -1; x = video.decoder->getDefaultX(); y = video.decoder->getDefaultY(); width = video.decoder->getWidth(); height = video.decoder->getHeight(); if (VAR_OFFSET(varX) == 0xFFFFFFFF) video.decoder->getFrameCoords(1, x, y, width, height); WRITE_VAR_OFFSET(varX , x); WRITE_VAR_OFFSET(varY , y); WRITE_VAR_OFFSET(varFrames, video.decoder->getFrameCount()); WRITE_VAR_OFFSET(varWidth , width); WRITE_VAR_OFFSET(varHeight, height); closeVideo(slot); } else { WRITE_VAR_OFFSET(varX , (uint32) -1); WRITE_VAR_OFFSET(varY , (uint32) -1); WRITE_VAR_OFFSET(varFrames, (uint32) -1); WRITE_VAR_OFFSET(varWidth , (uint32) -1); WRITE_VAR_OFFSET(varHeight, (uint32) -1); } }
void *videoThread(void *argc) { initVideo(); while(connected) video_decode(); closeVideo(); }
bool stop() { // 1. Tell all thread to exit keepWorking = false; // 2. Wait for threads to end if ( pongInstance.videocapThreadId!= 0 ) pthread_join( pongInstance.videocapThreadId, NULL ); if ( pongInstance.redFilterThreadId!= 0 ) pthread_join( pongInstance.redFilterThreadId, NULL ); // 3. Reset thread IDs pongInstance.videocapThreadId = 0; pongInstance.redFilterThreadId = 0; //pongInstance.centroidThreadId = 0; // Release queues, so the don't live on after the application ends mq_unlink( pongInstance.mqNameVideoFull ); mq_unlink( pongInstance.mqNameVideoEmpty ); mq_unlink( pongInstance.mqNameGreenFull ); mq_unlink( pongInstance.mqNameGreenEmpty ); mq_unlink( pongInstance.mqNameRedFull ); mq_unlink( pongInstance.mqNameRedEmpty ); mq_unlink( pongInstance.mqNameXYCoords ); if(closeVideo() == 0) logw("No video device opened"); else logw("Video device closed"); log("stop completed."); return true; }
void VideoPlayer::closeLiveSound() { for (int i = 1; i < kVideoSlotCount; i++) { Video *video = getVideoBySlot(i); if (!video) continue; if (video->live) closeVideo(i); } }
void VideoPlayer::stop() { if (eventloop == NULL) return; eventloop->exit(); closeVideo(); closeSDL(); curState = StoppedState; emit stateChanged(curState); }
void VideoPlayer::playVideo() { if (_vm->_timers[31]._flag) return; ++_vm->_timers[31]._flag; byte *pDest = _startCoord; byte *pLine = _startCoord; uint32 frameEnd = _videoData->_stream->pos() + _frameSize; while ((uint32)_videoData->_stream->pos() < frameEnd) { int count = _videoData->_stream->readByte(); if (count & 0x80) { count &= 0x7f; // Skip count number of pixels // Loop across lines if necessary while (count >= (pLine + _xCount - pDest)) { count -= (pLine + _xCount - pDest); pLine += _vidSurface->pitch; pDest = pLine; } // Skip any remaining pixels in the new line pDest += count; } else { // Read count number of pixels // Load across lines if necessary while (count >= (pLine + _xCount - pDest)) { int lineCount = (pLine + _xCount - pDest); _videoData->_stream->read(pDest, lineCount); count -= lineCount; pLine += _vidSurface->pitch; pDest = pLine; } // Load remainder of pixels on line if (count > 0) { _videoData->_stream->read(pDest, count); pDest += count; } } } // If the video is playing on the screen surface, add a dirty rect if (_vidSurface == _vm->_screen) _vm->_screen->addDirtyRect(_videoBounds); getFrame(); if (++_videoFrame == _frameCount) { closeVideo(); _videoEnd = true; } }
AndroidVideoDecoder::~AndroidVideoDecoder(){ //glDeleteTextures(1, &textureID); // OpenMAX AL destruction (*playerObj)->Destroy(playerObj); (*outputMixObject)->Destroy(outputMixObject); (*engineObject)->Destroy(engineObject); closeVideo(); instanceVideo = NULL; }
void FFMPEGInvoker::finish(EncodingContext* ctx, const SendRequest& req) { av_write_trailer(ctx->formatCtx); /* Close each codec. */ if (ctx->videoStream) closeVideo(ctx, ctx->formatCtx, ctx->videoStream); if (!(ctx->formatCtx->oformat->flags & AVFMT_NOFILE)) /* Close the output file. */ avio_close(ctx->formatCtx->pb); /* free the stream */ avformat_free_context(ctx->formatCtx); // read file std::ifstream movieFile(ctx->filename.c_str()); movieFile.seekg(0, std::ios::end); size_t length = movieFile.tellg(); movieFile.seekg(0, std::ios::beg); char* movieBuffer = (char*)malloc(length); movieFile.read(movieBuffer, length); // move to desktop for checking // int err = rename(ctx->filename.c_str(), "/Users/sradomski/Desktop/foo.mpg"); // if (err) { // printf("%s", strerror(errno)); // } std::string context; Event::getParam(req.params, "context", context); Event event; event.name = "render.done"; event.data.compound["context"] = Data(context, Data::INTERPRETED); event.data.compound["movie"] = Data(movieBuffer, length, "video/mpeg", true); event.data.compound["filename"] = Data(std::string("movie.") + ctx->extension, Data::VERBATIM); returnEvent(event); }
void VideoPlayer::closeAll() { for (int i = 0; i < kVideoSlotCount; i++) closeVideo(i); }
VideoPlayer::~VideoPlayer() { closeVideo(); }
bool VideoPlayer::openVideo(char *filename) { videoStream = -1; audioStream = -1; if(av_open_input_file(&pFormatCtx, filename, NULL, 0, NULL)!=0) { fprintf(stderr, "Couldn't open file\n"); return false; //Couldn't open file } if(av_find_stream_info(pFormatCtx)<0) { fprintf(stderr, "Couldn't find stream information\n"); return false ; // Couldn't find stream information } //dump_format(pFormatCtx, 0, filename, 0); //输出视频信息到终端 int i; for(i=0; i<pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO && videoStream < 0) { videoStream=i; } if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_AUDIO && audioStream < 0) { audioStream=i; } } if(audioStream==-1 && videoStream==-1) { closeVideo(); fprintf(stderr, "Didn't find a audio stream\n"); return false; // Didn't find a audio stream } if (videoStream != -1) { // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream AVCodec *pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { fprintf(stderr, "Unsupported codec!\n"); return false; // Codec not found } // Open codec if(avcodec_open(pCodecCtx, pCodec)<0) { fprintf(stderr, "Could not open audio codec!\n"); return false; // Could not open audio codec } curType = VideoType; } else { curType = AudioType; } if (audioStream != -1) { aCodecCtx = pFormatCtx->streams[audioStream]->codec; AVCodec *aCodec = avcodec_find_decoder(aCodecCtx->codec_id); if(!aCodec) { fprintf(stderr, "Unsupported codec!\n"); return false; } if(avcodec_open(aCodecCtx, aCodec)<0) { fprintf(stderr, "Could not open video codec!\n"); return false; // Could not open video codec } } totaltime = pFormatCtx->duration; return true; }
void FFMpegManager::create(const QString &filePath, int formatId, const QStringList &paths, const QSize &size, int fps) { #ifdef HAVE_FFMPEG AVOutputFormat *fmt = guess_format(0, filePath.toLatin1().data(), 0); if ( !fmt ) { fmt = guess_format("mpeg", NULL, NULL); } // AVFormatParameters params, *ap = ¶ms; switch(formatId) { case ExportInterface::ASF: { } break; case ExportInterface::AVI: { fmt->video_codec = CODEC_ID_MSMPEG4V3; // video_st->codec.codec_tag = 0; } break; case ExportInterface::MOV: { } break; case ExportInterface::MPEG: { } break; case ExportInterface::RM: { } break; case ExportInterface::SWF: { } break; case ExportInterface::GIF: { // AVImageFormat *imageFormat = guess_image_format(filePath.toLatin1().data()); // // memset(ap, 0, sizeof(*ap)); // ap->image_format = imageFormat; } break; default: break; } AVFormatContext *oc = av_alloc_format_context(); if ( !oc ) { dError() << "Error while export"; return; } oc->oformat = fmt; snprintf(oc->filename, sizeof(oc->filename), "%s", filePath.toLatin1().data()); AVStream *video_st = addVideoStream(oc, fmt->video_codec, size.width(), size.height(), fps); if ( !video_st ) { dError() << "Can't add video stream"; return; } if (av_set_parameters(oc, 0) < 0) { dError() << "Invalid output format parameters"; return ; } dump_format(oc, 0, filePath.toLatin1().data(), 1); if (!openVideo(oc, video_st) ) { dError() << "Can't open video"; return; } if (!(fmt->flags & AVFMT_NOFILE)) { if (url_fopen(&oc->pb, filePath.toLatin1().data(), URL_WRONLY) < 0) { dError() << "Could not open " << filePath.toLatin1().data(); return; } } av_write_header(oc); double video_pts = 0.0; foreach(QString imagePath, paths) { if (video_st) { video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den; } else { video_pts = 0.0; } if (!video_st || video_pts >= m_streamDuration ) { break; } if (! writeVideoFrame(imagePath, oc, video_st, fps) ) { break; } } closeVideo(oc, video_st); av_write_trailer(oc); for(int i = 0; i < oc->nb_streams; i++) { av_freep(&oc->streams[i]); } if (!(fmt->flags & AVFMT_NOFILE)) { /* close the output file */ url_fclose(&oc->pb); } av_free(oc); #endif }
AV::~AV() { closeVideo(ct); }