Exemplo n.º 1
0
bool initVideoCaptureFromSettings()
{
    QString devid = ttSettings->value(SETTINGS_VIDCAP_DEVICEID).toString();

    QStringList fps = ttSettings->value(SETTINGS_VIDCAP_FPS, SETTINGS_VIDCAP_FPS_DEFAULT).toString().split("/");
    QStringList res = ttSettings->value(SETTINGS_VIDCAP_RESOLUTION, SETTINGS_VIDCAP_RESOLUTION_DEFAULT).toString().split("x");
    FourCC fourcc = (FourCC)ttSettings->value(SETTINGS_VIDCAP_FOURCC, SETTINGS_VIDCAP_FOURCC_DEFAULT).toInt();

    if(fps.size() == 2 && res.size() == 2)
    {
        VideoFormat format;

        format.nFPS_Numerator = fps[0].toInt();
        format.nFPS_Denominator = fps[1].toInt();
        format.nWidth = res[0].toInt();
        format.nHeight = res[1].toInt();
        format.picFourCC = fourcc;

        return initVideoCapture(devid, format);
    }
    return false;
}
Exemplo n.º 2
0
int openAVDumping(void* window, bool video_opengl, char* dumpfile, int sf) {

    if (tasflags.framerate <= 0) {
        debuglog(LCF_DUMP | LCF_ERROR, "Not supporting non deterministic timer");
        return 1;
    }

    start_frame = sf;
    accum_samples = 0;

    int width, height;
    AVPixelFormat pixfmt = initVideoCapture(window, video_opengl, &width, &height);
    if (pixfmt == AV_PIX_FMT_NONE) {
        debuglog(LCF_DUMP | LCF_ERROR, "Unable to initialize video capture");
        return 1;
    }

    /* Initialize AVCodec and AVFormat libraries */
    av_register_all();

    /* Initialize AVOutputFormat */
    outputFormat = av_guess_format(NULL, dumpfile, NULL);
    if (!outputFormat) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not find suitable output format for file ", dumpfile);
        return 1;
    }

    /* Initialize AVFormatContext */

    formatContext = avformat_alloc_context();
    if (!formatContext) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not initialize AVFormatContext");
        return 1;
    }
    formatContext->oformat = outputFormat;

    /*** Create video stream ***/

    /* Initialize video AVCodec */

    AVCodec *video_codec = NULL;
    AVCodecID codec_id = AV_CODEC_ID_MPEG4;
    //int codec_id = AV_CODEC_ID_H264;
    video_codec = avcodec_find_encoder(codec_id);
    if (!video_codec) {
        debuglog(LCF_DUMP | LCF_ERROR, "Video codec not found");
        return 1;
    }
    outputFormat->video_codec = codec_id;

    /* Initialize video stream */

    video_st = avformat_new_stream(formatContext, video_codec);
    if (!video_st) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not initialize video AVStream");
        return 1;
    }

    /* Fill video stream parameters */
    video_st->id = formatContext->nb_streams - 1;
    video_st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
    video_st->codec->codec_id = codec_id;

    video_st->codec->bit_rate = 400000;
    video_st->codec->width = width;
    video_st->codec->height = height;
    video_st->time_base = (AVRational){1,static_cast<int>(tasflags.framerate)};
    video_st->codec->time_base = (AVRational){1,static_cast<int>(tasflags.framerate)};
    video_st->codec->gop_size = 10; /* emit one intra frame every ten frames */
    video_st->codec->max_b_frames = 1;
    video_st->codec->pix_fmt = AV_PIX_FMT_YUV420P;

    /* Some formats want stream headers to be separate. */
    if (formatContext->oformat->flags & AVFMT_GLOBALHEADER)
        video_st->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;

    /* Use a preset for h264 */
    if (codec_id == AV_CODEC_ID_H264)
        av_opt_set(video_st->codec->priv_data, "preset", "slow", 0);

    /* Open the codec */
    if (avcodec_open2(video_st->codec, video_codec, NULL) < 0) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not open video codec");
        return 1;
    }
    
    /*** Create audio stream ***/

    /* Initialize audio AVCodec */

    AVCodec *audio_codec = NULL;
    AVCodecID audio_codec_id = AV_CODEC_ID_PCM_S16LE;
    //AVCodecID audio_codec_id = AV_CODEC_ID_VORBIS;
    audio_codec = avcodec_find_encoder(audio_codec_id);
    if (!audio_codec) {
        debuglog(LCF_DUMP | LCF_ERROR, "Audio codec not found");
        return 1;
    }

    /* Initialize audio stream */

    audio_st = avformat_new_stream(formatContext, audio_codec);
    if (!audio_st) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not initialize video AVStream");
        return 1;
    }

    /* Fill audio stream parameters */

    audio_st->id = formatContext->nb_streams - 1;
    audio_st->codec->codec_type = AVMEDIA_TYPE_AUDIO;
    if (audiocontext.outBitDepth == 8)
        audio_st->codec->sample_fmt = AV_SAMPLE_FMT_U8;
    else if (audiocontext.outBitDepth == 16)
        audio_st->codec->sample_fmt = AV_SAMPLE_FMT_S16;
    else {
        debuglog(LCF_DUMP | LCF_ERROR, "Unknown audio format");
        return 1;
    }
    audio_st->codec->bit_rate = 64000;
    audio_st->codec->sample_rate = audiocontext.outFrequency;
    audio_st->codec->channels = audiocontext.outNbChannels;

    /* Some formats want stream headers to be separate. */

    if (formatContext->oformat->flags & AVFMT_GLOBALHEADER)
        audio_st->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;

    /* Open the codec */
    if (avcodec_open2(audio_st->codec, audio_codec, NULL) < 0) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not open audio codec");
        return 1;
    }

    /* Initialize video AVFrame */

    video_frame = av_frame_alloc();
    if (!video_frame) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not allocate AVFrame");
        return 1;
    }
    video_frame->format = video_st->codec->pix_fmt;
    video_frame->width  = video_st->codec->width;
    video_frame->height = video_st->codec->height;

    /* Initialize audio AVFrame */
    audio_frame = av_frame_alloc();

    /* Allocate the image buffer inside the AVFrame */

    int ret = av_image_alloc(video_frame->data, video_frame->linesize, video_st->codec->width, video_st->codec->height, video_st->codec->pix_fmt, 32);
    if (ret < 0) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not allocate raw picture buffer");
        return 1;
    }


    /* Initialize swscale context for pixel format conversion */

    toYUVctx = sws_getContext(video_frame->width, video_frame->height,  
                              pixfmt,
                              video_frame->width, video_frame->height, 
                              AV_PIX_FMT_YUV420P,
                              SWS_LANCZOS | SWS_ACCURATE_RND, NULL,NULL,NULL);

    if (toYUVctx == NULL) {
        debuglog(LCF_DUMP | LCF_ERROR, "Could not allocate swscale context");
        return 1;
    }

    /* Print informations on input and output streams */
    threadState.setOwnCode(true); // We protect the following code because it performs IO that we hook
    av_dump_format(formatContext, 0, dumpfile, 1);
    
    /* Set up output file */
    if (avio_open(&formatContext->pb, dumpfile, AVIO_FLAG_WRITE) < 0) {
        threadState.setOwnCode(false);
        debuglog(LCF_DUMP | LCF_ERROR, "Could not open video file");
        return 1;
    }

    /* Write header */
    if (avformat_write_header(formatContext, NULL) < 0) {
        threadState.setOwnCode(false);
        debuglog(LCF_DUMP | LCF_ERROR, "Could not write header");
        return 1;
    }

    threadState.setOwnCode(false);
    return 0;
}