Exemplo n.º 1
0
bool video_recording_state_t::initialize(uint16 video_nr, int width, int height, int depth)
{
	enum AVPixelFormat raw_fmt;
	if (depth == VIDEO_DEPTH_8BIT) raw_fmt = AV_PIX_FMT_PAL8;
	else if (depth == VIDEO_DEPTH_32BIT) raw_fmt = AV_PIX_FMT_ARGB;
	else return false;
	char filename[32];
	snprintf(filename, sizeof filename, "rec%hu.avi", video_nr);
	AVOutputFormat *fmt = av_guess_format(NULL, filename, NULL);
	if (!fmt) return false;
	if (fmt->flags & AVFMT_NOFILE) return false;

	output_context = avformat_alloc_context();
	if (!output_context) return false;
	output_context->oformat = fmt;
	snprintf(output_context->filename, sizeof(output_context->filename), "%s", filename);
	if (fmt->video_codec == AV_CODEC_ID_NONE) return false;

	if (!add_audio_stream(AV_CODEC_ID_PCM_S16LE)) return false;
	if (!add_video_stream(fmt->video_codec, width, height)) return false;
	if (!open_audio()) return false;
	if (!open_video()) return false;
	if (!(video_frame_raw = alloc_picture(video_stream, raw_fmt))) return false;
	if (!(video_frame = alloc_picture(video_stream, AV_PIX_FMT_YUV420P))) return false;
	if (!init_sws_context()) return false;

	if (avio_open(&output_context->pb, filename, AVIO_FLAG_WRITE) < 0) return false;
	avformat_write_header(output_context, NULL);
	return true;
}
Exemplo n.º 2
0
 Data(const std::string &filename, const std::string &fourcc, double fps, const Size &frame_size):
   video_st(),filename(filename),fps(fps),frame_size(frame_size){
   
   DEBUG_LOG("fps:" << fps);
   if(File(filename).exists()){
     throw ICLException("file already exists");
   }
   av_register_all();
   if(fourcc.length()) fmt = av_guess_format(fourcc.c_str(), filename.c_str(), 0);
   else fmt = av_guess_format(0, filename.c_str(), 0);
   if(!fmt) throw ICLException("Unkown format");
   oc = avformat_alloc_context();
   if (!oc) throw ICLException("Memory error");
   oc->oformat = fmt;
   snprintf(oc->filename, sizeof(oc->filename), "%s", filename.c_str());
   add_video_stream(&video_st, oc, fmt->video_codec);
   open_video(oc, &video_st);
   av_dump_format(oc, 0, filename.c_str(), 1);
   if (!(fmt->flags & AVFMT_NOFILE)) {
       if (avio_open(&oc->pb, filename.c_str(), AVIO_FLAG_WRITE) < 0) {
           throw ICLException("Could not open file");
       }
   }
   avformat_write_header(oc, 0);
 }
int32_t CEncoder::enc_open(void)
{
	avformat_alloc_output_context2(&oc, NULL, format_name, filename);
	if (oc == NULL)
	{
		av_log(NULL, AV_LOG_ERROR, "Could not find suitable output format:%s(%s)\n", format_name, filename);
		return -1;
	}

	fmt = oc->oformat;
	oc->max_delay = (int32_t)(mux_max_delay * AV_TIME_BASE);
	if (packet_size != NO_VALUE) oc->packet_size = packet_size;
	strcpy(oc->filename, filename);

	if (g_enc_opt.m_VideoDisable == 0)
	{
		if ((video_codec_id == CODEC_ID_NONE) && (video_codec_name == NULL))
		{
			video_codec_id = fmt->video_codec;
		}
		video_st = add_video_stream(video_codec_id, video_codec_name);
		if (video_st == NULL) return -1;
	}
	if (g_enc_opt.m_AudioDisable == 0)
	{
		if ((audio_codec_id == CODEC_ID_NONE) && (audio_codec_name == NULL))
		{
			audio_codec_id = fmt->audio_codec;
		}
		audio_st = add_audio_stream(audio_codec_id, audio_codec_name);
		if (audio_st == NULL) return -1;
	}

	if (!(oc->oformat->flags & AVFMT_NOSTREAMS))
	{
		av_dump_format(oc, 0, filename, 1);
	}

	/* open the output file, if needed */
	if (!(oc->oformat->flags & AVFMT_NOFILE))
	{
		if (avio_open(&oc->pb, filename, AVIO_FLAG_WRITE) < 0)
		{
			av_log(NULL, AV_LOG_ERROR, "Could not open '%s'\n", filename);
			return -8;
		}
	}

	/* write the stream header, if any */
	if (avformat_write_header(oc, NULL) != 0)
	{
		av_log(NULL, AV_LOG_ERROR, "Failed to av_write_header !\n");
		return -9;
	}

	return 0;
}
Exemplo n.º 4
0
int DirectorConference::add_stream(Stream *s, KVS &params)
{
	// FIXME: now, just pin mixer ...
	if (s->payload_type() == 100)
		return add_video_stream(s, params);
	else if (s->payload_type() == 110 || s->payload_type() == 0 || s->payload_type() == 102)
		return add_audio_stream(s, params);
	else
		return -1;
}
Exemplo n.º 5
0
static int
tfile (GeglProperties *o)
{
  Priv *p = (Priv*)o->user_data;

  p->fmt = av_guess_format (NULL, o->path, NULL);
  if (!p->fmt)
    {
      fprintf (stderr,
               "ff_save couldn't deduce outputformat from file extension: using MPEG.\n%s",
               "");
      p->fmt = av_guess_format ("mpeg", NULL, NULL);
    }
  p->oc = avformat_alloc_context ();
  if (!p->oc)
    {
      fprintf (stderr, "memory error\n%s", "");
      return -1;
    }

  p->oc->oformat = p->fmt;

  snprintf (p->oc->filename, sizeof (p->oc->filename), "%s", o->path);

  p->video_st = NULL;
  p->audio_st = NULL;

  if (p->fmt->video_codec != AV_CODEC_ID_NONE)
    {
      p->video_st = add_video_stream (o, p->oc, p->fmt->video_codec);
    }
  if (p->fmt->audio_codec != AV_CODEC_ID_NONE)
    {
     p->audio_st = add_audio_stream (o, p->oc, p->fmt->audio_codec);
    }


  if (p->video_st)
    open_video (p, p->oc, p->video_st);

  if (p->audio_st)
    open_audio (o, p->oc, p->audio_st);

  av_dump_format (p->oc, 0, o->path, 1);

  if (avio_open (&p->oc->pb, o->path, AVIO_FLAG_WRITE) < 0)
    {
      fprintf (stderr, "couldn't open '%s'\n", o->path);
      return -1;
    }

  avformat_write_header (p->oc, NULL);
  return 0;
}
Exemplo n.º 6
0
bool CFFMPEGLoader::CreateMovie(const char *filename, const AVOutputFormat *format, const AVCodecContext *VideoCon, const AVCodecContext *AudioCon) {
    if(!filename)
        return false;

    AVOutputFormat *fmt;
    //*fmt=*format;
    fmt = guess_format(NULL, filename, NULL);

    pFormatCon = av_alloc_format_context();
    if(!pFormatCon) {
        cout<<"Error while allocating format context\n";
        return false;
    }
    bOutput=true;
    strcpy(pFormatCon->filename,filename);

    pFormatCon->oformat=fmt;
    pAudioStream=pVideoStream=NULL;

    if (fmt->video_codec != CODEC_ID_NONE) {
        pVideoStream = add_video_stream(pFormatCon, fmt->video_codec,VideoCon);
    }
    if (fmt->audio_codec != CODEC_ID_NONE) {
        pAudioStream = add_audio_stream(pFormatCon, fmt->audio_codec,AudioCon);
    }

    if (av_set_parameters(pFormatCon, NULL) < 0) {
        cout<<"Invalid output format parameters\n";
        return false;
    }

    if (pVideoStream)
        open_stream(pFormatCon, pVideoStream);
    if (pAudioStream)
        open_stream(pFormatCon, pAudioStream);

    dump_format(pFormatCon, 0, filename, 1);

    if (!(fmt->flags & AVFMT_NOFILE)) {
        if (url_fopen(&pFormatCon->pb, filename, URL_WRONLY) < 0) {
            cout<<"Could not open '%s'"<<filename<<endl;
            return false;
        }
    }

    /* write the stream header, if any */
    av_write_header(pFormatCon);
    return true;
}
/*
 * Initialize the AVFormatContext
 * Called on encoder initialize and when beginning
 * each new video chunk
 */
int initializeAVFormatContext(AVFormatContext **out_oc, jbyte *output_filename, AVStream **out_video_st, AVFrame **out_picture, int video_width, int video_height, float video_crf, int *out_last_pts, int *out_video_frame_count, AVStream **out_audio_st, int16_t **out_samples, int audio_bitrate){
	AVFormatContext *oc;
	AVStream *video_st;
	AVStream *audio_st;
	AVFrame *picture;
	int16_t *samples;

	// TODO: Can we do this only once?
	/* Initialize libavcodec, and register all codecs and formats. */
	av_register_all();
	//LOGI("initializeAVFC with filename: %s", output_filename);
	if(!oc)
		LOGI("initializeAVFC, oc is properly null");

	/* allocate the output media context */
	avformat_alloc_output_context2(&oc, NULL, NULL, ((const char*) output_filename));
	if (!oc) {
		LOGI("Could not deduce output format, using mpeg");
		//printf("Could not deduce output format from file extension: using MPEG.\n");
		avformat_alloc_output_context2(&oc, NULL, "mpeg", ((const char*) output_filename));
	}
	if (!oc) {
		LOGE("Could not allocate output context");
		exit(1);
	}
	//else
		//LOGI("initializeAVFC, oc appears properly allocated");

	//LOGI("avformat_alloc_output_context2");
	fmt = oc->oformat;

	// Set AVOutputFormat video/audio codec
	fmt->video_codec = VIDEO_CODEC_ID;
	fmt->audio_codec = AUDIO_CODEC_ID;

	/* Add the audio and video streams using the default format codecs
	 * and initialize the codecs. */
	video_st = NULL;
	audio_st = NULL;
	if (fmt->video_codec != CODEC_ID_NONE) {
		video_st = add_video_stream(oc, fmt->video_codec, video_width, video_height, video_crf);
		//(AVFormatContext *oc, enum CodecID codec_id, int width, int height, float crf)
	}
	if (fmt->audio_codec != CODEC_ID_NONE) {
		audio_st = add_audio_stream(oc, fmt->audio_codec, audio_bitrate);
		//static AVStream *add_audio_stream(AVFormatContext *oc, enum CodecID codec_id, int bit_rate)
	}
	//LOGI("add_audio_stream / add_video_stream");
	/* Now that all the parameters are set, we can open the audio and
	 * video codecs and allocate the necessary encode buffers. */
	if (video_st){
		open_video(oc, video_st, &picture);
		//open_video(AVFormatContext *oc, AVStream *st, AVFrame *picture
	}
	if (audio_st){
		open_audio(oc, audio_st ,&samples);
		//open_audio(AVFormatContext *oc, AVStream *st, int16_t *samples)
	}

	av_dump_format(oc, 0, output_filename, 1);

	//LOGI("open audio / video");
	/* open the output file, if needed */
	if (!(fmt->flags & AVFMT_NOFILE)) {
		char *error_buffer_ptr;
		char error_buffer[90];
		error_buffer_ptr = error_buffer;
		//LOGI("pre avio_open2");
		int error = avio_open2(&oc->pb, output_filename, AVIO_FLAG_WRITE, NULL, NULL);
		//LOGI("post avio_open2");
		if ( error < 0) {
			av_strerror (error, error_buffer_ptr, 90);
			LOGE("Could not open %s. Error: %s", output_filename, error_buffer_ptr);
			//fprintf(stderr, "Could not open '%s'\n", native_output_file_lq1);
			exit(-420);
		}
	}

	/* Write the stream header, if any. */
	//LOGI("pre avformat_write_header");
	avformat_write_header(oc, NULL);
	//LOGI("avformat_write_header");
	//LOGI("end initializeAVFC: audio_input_frame_size: %d fps: %d", audio_input_frame_size, video_st->codec->time_base.den);

	// Set results to output arguments
	*out_oc = oc;
	*out_video_st = video_st;
	*out_audio_st = audio_st;
	*out_picture = picture;
	*out_samples = samples;
	*out_last_pts = -1;
	*out_video_frame_count = 0;

	return audio_input_frame_size;
}
Exemplo n.º 8
0
int main(int argc, char **argv)
{
    const char *filename;
    AVOutputFormat *fmt;
    AVFormatContext *oc;
    AVStream *audio_st, *video_st;
    double audio_pts, video_pts;
    int i;

    /* Initialize libavcodec, and register all codecs and formats. */
    av_register_all();

    if (argc != 2) {
        printf("usage: %s output_file\n"
               "API example program to output a media file with libavformat.\n"
               "The output format is automatically guessed according to the file extension.\n"
               "Raw images can also be output by using '%%d' in the filename\n"
               "\n", argv[0]);
        return 1;
    }

    filename = argv[1];

    /* Autodetect the output format from the name. default is MPEG. */
    fmt = av_guess_format(NULL, filename, NULL);
    if (!fmt) {
        printf("Could not deduce output format from file extension: using MPEG.\n");
        fmt = av_guess_format("mpeg", NULL, NULL);
    }
    if (!fmt) {
        fprintf(stderr, "Could not find suitable output format\n");
        return 1;
    }

    /* Allocate the output media context. */
    oc = avformat_alloc_context();
    if (!oc) {
        fprintf(stderr, "Memory error\n");
        return 1;
    }
    oc->oformat = fmt;
    snprintf(oc->filename, sizeof(oc->filename), "%s", filename);

    /* Add the audio and video streams using the default format codecs
     * and initialize the codecs. */
    video_st = NULL;
    audio_st = NULL;
    if (fmt->video_codec != AV_CODEC_ID_NONE) {
        video_st = add_video_stream(oc, fmt->video_codec);
    }
    if (fmt->audio_codec != AV_CODEC_ID_NONE) {
        audio_st = add_audio_stream(oc, fmt->audio_codec);
    }

    /* Now that all the parameters are set, we can open the audio and
     * video codecs and allocate the necessary encode buffers. */
    if (video_st)
        open_video(oc, video_st);
    if (audio_st)
        open_audio(oc, audio_st);

    av_dump_format(oc, 0, filename, 1);

    /* open the output file, if needed */
    if (!(fmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&oc->pb, filename, AVIO_FLAG_WRITE) < 0) {
            fprintf(stderr, "Could not open '%s'\n", filename);
            return 1;
        }
    }

    /* Write the stream header, if any. */
    avformat_write_header(oc, NULL);

    for (;;) {
        /* Compute current audio and video time. */
        if (audio_st)
            audio_pts = (double)audio_st->pts.val * audio_st->time_base.num / audio_st->time_base.den;
        else
            audio_pts = 0.0;

        if (video_st)
            video_pts = (double)video_st->pts.val * video_st->time_base.num /
                        video_st->time_base.den;
        else
            video_pts = 0.0;

        if ((!audio_st || audio_pts >= STREAM_DURATION) &&
            (!video_st || video_pts >= STREAM_DURATION))
            break;

        /* write interleaved audio and video frames */
        if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
            write_audio_frame(oc, audio_st);
        } else {
            write_video_frame(oc, video_st);
        }
    }

    /* Write the trailer, if any. The trailer must be written before you
     * close the CodecContexts open when you wrote the header; otherwise
     * av_write_trailer() may try to use memory that was freed on
     * av_codec_close(). */
    av_write_trailer(oc);

    /* Close each codec. */
    if (video_st)
        close_video(oc, video_st);
    if (audio_st)
        close_audio(oc, audio_st);

    /* Free the streams. */
    for (i = 0; i < oc->nb_streams; i++) {
        av_freep(&oc->streams[i]->codec);
        av_freep(&oc->streams[i]);
    }

    if (!(fmt->flags & AVFMT_NOFILE))
        /* Close the output file. */
        avio_close(oc->pb);

    /* free the stream */
    av_free(oc);

    return 0;
}
void create_video_file(const char*filename,int width,int height)
{
/* auto detect the output format from the name. default is
       mpeg. */
    //fmt = av_guess_format(NULL, filename, NULL);

#if (LIBAVFORMAT_VERSION_INT>=AV_VERSION_INT(52,81,0))
	#define libavformat_guess_format av_guess_format
#else
	#define libavformat_guess_format guess_format
#endif

	fmt = libavformat_guess_format(NULL, filename, NULL);

	if (!fmt) {
        printf("Could not deduce output format from file extension: using MPEG.\n");
        //fmt = av_guess_format("mpeg", NULL, NULL);
        fmt = libavformat_guess_format("mpeg", NULL, NULL);
    }
    if (!fmt) {
        fprintf(stderr, "Could not find suitable output format\n");
        exit(1);
    }

    /* allocate the output media context */
    oc = avformat_alloc_context();
    if (!oc) {
        fprintf(stderr, "Memory error\n");
        exit(1);
    }
    oc->oformat = fmt;
    snprintf(oc->filename, sizeof(oc->filename), "%s", filename);

    /* add the audio and video streams using the default format codecs
       and initialize the codecs */
    video_st = NULL;

    if (fmt->video_codec != CODEC_ID_NONE) {
        video_st = add_video_stream(oc, fmt->video_codec,width,height);
    }

    /* set the output parameters (must be done even if no
       parameters). */
    if (av_set_parameters(oc, NULL) < 0) {
        fprintf(stderr, "Invalid output format parameters\n");
        exit(1);
    }

    dump_format(oc, 0, filename, 1);

    /* now that all the parameters are set, we can open the audio and
       video codecs and allocate the necessary encode buffers */
    if (video_st)
        open_video(oc, video_st);

    /* open the output file, if needed */
    if (!(fmt->flags & AVFMT_NOFILE)) {
        if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
            fprintf(stderr, "Could not open '%s'\n", filename);
            exit(1);
        }
    }

    /* write the stream header, if any */
    av_write_header(oc);
 }
Exemplo n.º 10
0
int ff_example(const char *filename, const char *format)
{
    AVOutputFormat *fmt;
    AVFormatContext *oc;
    AVStream *video_st;
    double video_pts;
    int i;

    fmt = av_guess_format(format, NULL, NULL);
    if (!fmt) {
        fprintf(stderr, "Could not find suitable output format\n");
        exit(1);
    }

    fmt->video_codec = CODEC_ID_MJPEG;

    /* allocate the output media context */
    oc = avformat_alloc_context();
    if (!oc) {
        fprintf(stderr, "Memory error\n");
        exit(1);
    }
    oc->oformat = fmt;
    snprintf(oc->filename, sizeof(oc->filename), "%s", filename);

    video_st = NULL;
    if (fmt->video_codec != CODEC_ID_NONE)
        video_st = add_video_stream(oc, fmt->video_codec);

    av_dump_format(oc, 0, filename, 1);

    /* now that all the parameters are set, we can open the audio and
       video codecs and allocate the necessary encode buffers */
    if (video_st)
        open_video(oc, video_st);

    if (avio_open(&oc->pb, filename, URL_WRONLY) < 0) {
        fprintf(stderr, "Could not open '%s'\n", filename);
        exit(1);
    }


    /* write the stream header, if any */
    avformat_write_header(oc, NULL);

    for(;;) {

        video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;
        printf("pts: %f\n", video_pts);

        if (frame_count > STREAM_NB_FRAMES)
            break;

        /* write interleaved audio and video frames */
        if (write_video_frame(oc, video_st) < 0)
            break;
    }

    printf("%d frames written\n", frame_count);

    av_write_trailer(oc);

    /* close each codec */
    if (video_st)
        close_video(oc, video_st);

    /* free the streams */
    for(i = 0; i < oc->nb_streams; i++) {
        av_freep(&oc->streams[i]->codec);
        av_freep(&oc->streams[i]);
    }

    avio_close(oc->pb);

    /* free the stream */
    av_free(oc);

    return 0;
}
Exemplo n.º 11
0
int main(int argc, char *argv[])
{
    IDeckLinkIterator *deckLinkIterator = CreateDeckLinkIteratorInstance();
    DeckLinkCaptureDelegate *delegate;
    BMDDisplayMode selectedDisplayMode = bmdModeNTSC;
    int displayModeCount               = 0;
    int exitStatus                     = 1;
    int aconnection                    = 0, vconnection = 0, camera = 0, i = 0;
    int ch;
    AVDictionary *opts = NULL;
    BMDPixelFormat pix = bmdFormat8BitYUV;
    HRESULT result;
    pthread_t th;

    pthread_mutex_init(&sleepMutex, NULL);
    pthread_cond_init(&sleepCond, NULL);
    av_register_all();

    if (!deckLinkIterator) {
        fprintf(stderr,
                "This application requires the DeckLink drivers installed.\n");
        goto bail;
    }

    // Parse command line options
    while ((ch = getopt(argc, argv, "?hvc:s:f:a:m:n:p:M:F:C:A:V:o:")) != -1) {
        switch (ch) {
        case 'v':
            g_verbose = true;
            break;
        case 'm':
            g_videoModeIndex = atoi(optarg);
            break;
        case 'c':
            g_audioChannels = atoi(optarg);
            if (g_audioChannels != 2 &&
                g_audioChannels != 8 &&
                g_audioChannels != 16) {
                fprintf(
                    stderr,
                    "Invalid argument: Audio Channels must be either 2, 8 or 16\n");
                goto bail;
            }
            break;
        case 's':
            g_audioSampleDepth = atoi(optarg);
            switch (g_audioSampleDepth) {
            case 16:
                sample_fmt = AV_SAMPLE_FMT_S16;
                break;
            case 32:
                sample_fmt = AV_SAMPLE_FMT_S32;
                break;
            default:
                fprintf(stderr,
                        "Invalid argument:"
                        " Audio Sample Depth must be either 16 bits"
                        " or 32 bits\n");
                goto bail;
            }
            break;
        case 'p':
            switch (atoi(optarg)) {
            case  8:
                pix     = bmdFormat8BitYUV;
                pix_fmt = PIX_FMT_UYVY422;
                break;
            case 10:
                pix     = bmdFormat10BitYUV;
                pix_fmt = PIX_FMT_YUV422P10;
                break;
            default:
                fprintf(
                    stderr,
                    "Invalid argument: Pixel Format Depth must be either 8 bits or 10 bits\n");
                goto bail;
            }
            break;
        case 'f':
            g_videoOutputFile = optarg;
            break;
        case 'n':
            g_maxFrames = atoi(optarg);
            break;
        case 'M':
            g_memoryLimit = atoi(optarg) * 1024 * 1024 * 1024L;
            break;
        case 'F':
            fmt = av_guess_format(optarg, NULL, NULL);
            break;
        case 'A':
            aconnection = atoi(optarg);
            break;
        case 'V':
            vconnection = atoi(optarg);
            break;
        case 'C':
            camera = atoi(optarg);
            break;
        case 'S':
            serial_fd = open(optarg, O_RDWR | O_NONBLOCK);
            break;
        case 'o':
            if (av_dict_parse_string(&opts, optarg, "=", ":", 0) < 0) {
                fprintf(stderr, "Cannot parse option string %s\n",
                        optarg);
                goto bail;
            }
        case '?':
        case 'h':
            usage(0);
        }
    }

    /* Connect to the first DeckLink instance */
    do
        result = deckLinkIterator->Next(&deckLink);
    while (i++ < camera);

    if (result != S_OK) {
        fprintf(stderr, "No DeckLink PCI cards found.\n");
        goto bail;
    }

    if (deckLink->QueryInterface(IID_IDeckLinkInput,
                                 (void **)&deckLinkInput) != S_OK) {
        goto bail;
    }

    result = deckLink->QueryInterface(IID_IDeckLinkConfiguration,
                                      (void **)&deckLinkConfiguration);
    if (result != S_OK) {
        fprintf(
            stderr,
            "Could not obtain the IDeckLinkConfiguration interface - result = %08x\n",
            result);
        goto bail;
    }

    result = S_OK;
    switch (aconnection) {
    case 1:
        result = DECKLINK_SET_AUDIO_CONNECTION(bmdAudioConnectionAnalog);
        break;
    case 2:
        result = DECKLINK_SET_AUDIO_CONNECTION(bmdAudioConnectionEmbedded);
        break;
    case 3:
        result = DECKLINK_SET_AUDIO_CONNECTION(bmdAudioConnectionAESEBU);
        break;
    default:
        // do not change it
        break;
    }
    if (result != S_OK) {
        fprintf(stderr, "Failed to set audio input - result = %08x\n", result);
        goto bail;
    }

    result = S_OK;
    switch (vconnection) {
    case 1:
        result = DECKLINK_SET_VIDEO_CONNECTION(bmdVideoConnectionComposite);
        break;
    case 2:
        result = DECKLINK_SET_VIDEO_CONNECTION(bmdVideoConnectionComponent);
        break;
    case 3:
        result = DECKLINK_SET_VIDEO_CONNECTION(bmdVideoConnectionHDMI);
        break;
    case 4:
        result = DECKLINK_SET_VIDEO_CONNECTION(bmdVideoConnectionSDI);
        break;
    case 5:
        result = DECKLINK_SET_VIDEO_CONNECTION(bmdVideoConnectionOpticalSDI);
        break;
    case 6:
        result = DECKLINK_SET_VIDEO_CONNECTION(bmdVideoConnectionSVideo);
        break;
    default:
        // do not change it
        break;
    }
    if (result != S_OK) {
        fprintf(stderr, "Failed to set video input - result %08x\n", result);
        goto bail;
    }

    delegate = new DeckLinkCaptureDelegate();
    deckLinkInput->SetCallback(delegate);

    // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
    result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
    if (result != S_OK) {
        fprintf(
            stderr,
            "Could not obtain the video output display mode iterator - result = %08x\n",
            result);
        goto bail;
    }

    if (!g_videoOutputFile) {
        fprintf(stderr,
                "Missing argument: Please specify output path using -f\n");
        goto bail;
    }

    if (!fmt) {
        fmt = av_guess_format(NULL, g_videoOutputFile, NULL);
        if (!fmt) {
            fprintf(
                stderr,
                "Unable to guess output format, please specify explicitly using -F\n");
            goto bail;
        }
    }

    if (g_videoModeIndex < 0) {
        fprintf(stderr, "No video mode specified\n");
        usage(0);
    }

    selectedDisplayMode = -1;
    while (displayModeIterator->Next(&displayMode) == S_OK) {
        if (g_videoModeIndex == displayModeCount) {
            selectedDisplayMode = displayMode->GetDisplayMode();
            break;
        }
        displayModeCount++;
        displayMode->Release();
    }

    if (selectedDisplayMode < 0) {
        fprintf(stderr, "Invalid mode %d specified\n", g_videoModeIndex);
        goto bail;
    }

    result = deckLinkInput->EnableVideoInput(selectedDisplayMode, pix, 0);
    if (result != S_OK) {
        fprintf(stderr,
                "Failed to enable video input. Is another application using "
                "the card?\n");
        goto bail;
    }

    result = deckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz,
                                             g_audioSampleDepth,
                                             g_audioChannels);
    if (result != S_OK) {
        fprintf(stderr,
                "Failed to enable audio input. Is another application using "
                "the card?\n");
        goto bail;
    }

    oc          = avformat_alloc_context();
    oc->oformat = fmt;

    snprintf(oc->filename, sizeof(oc->filename), "%s", g_videoOutputFile);

    fmt->video_codec = (pix == bmdFormat8BitYUV ? AV_CODEC_ID_RAWVIDEO : AV_CODEC_ID_V210);
    fmt->audio_codec = (sample_fmt == AV_SAMPLE_FMT_S16 ? AV_CODEC_ID_PCM_S16LE : AV_CODEC_ID_PCM_S32LE);

    video_st = add_video_stream(oc, fmt->video_codec);
    audio_st = add_audio_stream(oc, fmt->audio_codec);

    if (serial_fd > 0)
        data_st = add_data_stream(oc, AV_CODEC_ID_TEXT);

    if (!(fmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&oc->pb, oc->filename, AVIO_FLAG_WRITE) < 0) {
            fprintf(stderr, "Could not open '%s'\n", oc->filename);
            exit(1);
        }
    }

    avformat_write_header(oc, NULL);
    avpacket_queue_init(&queue);

    result = deckLinkInput->StartStreams();
    if (result != S_OK) {
        goto bail;
    }
    // All Okay.
    exitStatus = 0;

    if (pthread_create(&th, NULL, push_packet, oc))
        goto bail;

    // Block main thread until signal occurs
    pthread_mutex_lock(&sleepMutex);
    pthread_cond_wait(&sleepCond, &sleepMutex);
    pthread_mutex_unlock(&sleepMutex);
    deckLinkInput->StopStreams();
    fprintf(stderr, "Stopping Capture\n");
    avpacket_queue_end(&queue);

bail:
    if (displayModeIterator != NULL) {
        displayModeIterator->Release();
        displayModeIterator = NULL;
    }

    if (deckLinkInput != NULL) {
        deckLinkInput->Release();
        deckLinkInput = NULL;
    }

    if (deckLink != NULL) {
        deckLink->Release();
        deckLink = NULL;
    }

    if (deckLinkIterator != NULL) {
        deckLinkIterator->Release();
    }

    if (oc != NULL) {
        av_write_trailer(oc);
        if (!(fmt->flags & AVFMT_NOFILE)) {
            /* close the output file */
            avio_close(oc->pb);
        }
    }

    return exitStatus;
}
Exemplo n.º 12
0
int main(int argc, char *argv[])
{
    int ret = 1;
    int ch, i;
    char *filename = NULL;
    pthread_mutex_t mux;

    DecklinkConf c  = { .video_cb = video_callback,
                        .audio_cb = audio_callback };
    DecklinkCapture *capture;
    pthread_t th;

    pthread_mutex_init(&mux, NULL);
    pthread_cond_init(&cond, NULL);
    av_register_all();

    // Parse command line options
    while ((ch = getopt(argc, argv, "?hvc:s:f:a:m:n:p:M:F:C:A:V:")) != -1) {
        switch (ch) {
        case 'v':
            verbose = 1;
            break;
        case 'm':
            c.video_mode = atoi(optarg);
            break;
        case 'c':
            c.audio_channels = atoi(optarg);
            break;
        case 's':
            c.audio_sample_depth = atoi(optarg);
            switch (c.audio_sample_depth) {
            case 16:
            case 32:
                break;
            default:
                goto bail;
            }
            break;
        case 'p':
            switch (atoi(optarg)) {
            case  8:
                c.pixel_format = 0;
                pix_fmt = AV_PIX_FMT_UYVY422;
                break;
            case 10:
                c.pixel_format = 1;
                pix_fmt = AV_PIX_FMT_YUV422P10;
                break;
            default:
                fprintf(
                    stderr,
                    "Invalid argument: Pixel Format Depth must be either 8 bits or 10 bits\n");
                goto bail;
            }
            break;
        case 'f':
            filename = optarg;
            break;
        case 'n':
            max_frames = atoi(optarg);
            break;
        case 'M':
            memory_limit = atoi(optarg) * 1024 * 1024 * 1024L;
            break;
        case 'F':
            fmt = av_guess_format(optarg, NULL, NULL);
            break;
        case 'A':
            c.audio_connection = atoi(optarg);
            break;
        case 'V':
            c.video_connection = atoi(optarg);
            break;
        case 'C':
            c.instance = atoi(optarg);
            break;
        case '?':
        case 'h':
            exit(0);
        }
    }

    c.priv = &c;

    capture = decklink_capture_alloc(&c);

    if (!filename) {
        fprintf(stderr,
                "Missing argument: Please specify output path using -f\n");
        goto bail;
    }

    if (!fmt) {
        fmt = av_guess_format(NULL, filename, NULL);
        if (!fmt) {
            fprintf(
                stderr,
                "Unable to guess output format, please specify explicitly using -F\n");
            goto bail;
        }
    }

    oc          = avformat_alloc_context();
    oc->oformat = fmt;

    snprintf(oc->filename, sizeof(oc->filename), "%s", filename);

    fmt->video_codec = (c.pixel_format == 0 ? AV_CODEC_ID_RAWVIDEO : AV_CODEC_ID_V210);
    switch (c.audio_sample_depth) {
    case 16:
        fmt->audio_codec = AV_CODEC_ID_PCM_S16LE;
        break;
    case 32:
        fmt->audio_codec = AV_CODEC_ID_PCM_S32LE;
        break;
    default:
        exit(1);
    }

    video_st = add_video_stream(&c, oc, fmt->video_codec);
    audio_st = add_audio_stream(&c, oc, fmt->audio_codec);

    if (!(fmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&oc->pb, oc->filename, AVIO_FLAG_WRITE) < 0) {
            fprintf(stderr, "Could not open '%s'\n", oc->filename);
            exit(1);
        }
    }

    avformat_write_header(oc, NULL);

    avpacket_queue_init(&queue);

    if (pthread_create(&th, NULL, push_packet, oc))
        goto bail;

    decklink_capture_start(capture);

    // Block main thread until signal occurs
    pthread_mutex_lock(&mux);
    pthread_cond_wait(&cond, &mux);
    pthread_mutex_unlock(&mux);
    fprintf(stderr, "Stopping Capture\n");

    decklink_capture_stop(capture);
    ret = 0;

bail:
    decklink_capture_free(capture);

    if (oc != NULL) {
        av_write_trailer(oc);
        if (!(fmt->flags & AVFMT_NOFILE)) {
            /* close the output file */
            avio_close(oc->pb);
        }
    }

    return ret;
}
Exemplo n.º 13
0
int start_up(EncoderJob &jobSpec) {

	jobSpec.p = new Pests();

	jobSpec.oc = avformat_alloc_context();
	if (!jobSpec.oc) {
		fprintf(stderr, "Memory error\n");
		jobSpec.IsValid = false;
		return 3;
	}
	jobSpec.oc->oformat = jobSpec.fmt;
	sprintf(jobSpec.oc->filename, "%s-%05u.ts", jobSpec.BaseDirectory, jobSpec.SegmentNumber);


	// Set video codecs:
	jobSpec.fmt->video_codec = CODEC_ID_H264; // Video codec. Requires FFmpeg to be built with libx264.
	jobSpec.fmt->audio_codec = CODEC_ID_MP3; //CODEC_ID_AAC; // AAC is not working so well. Will use MP3 instead.

	jobSpec.video_st = NULL;
	jobSpec.audio_st = NULL;
	if (jobSpec.fmt->video_codec != CODEC_ID_NONE) {
		jobSpec.video_st = add_video_stream(jobSpec.oc, jobSpec.fmt->video_codec, jobSpec);
	}
	if (jobSpec.fmt->audio_codec != CODEC_ID_NONE) {
		jobSpec.audio_st = add_audio_stream(jobSpec, jobSpec.oc, jobSpec.fmt->audio_codec);
	}

	/*if (av_set_parameters(jobSpec.oc, NULL) < 0) {
		fprintf(stderr, "Invalid output format parameters\n");
			jobSpec.IsValid = false;
			return 4;
	}*/

	/* now that all the parameters are set, we can open the audio and
	video codecs and allocate the necessary encode buffers */
	if (jobSpec.video_st) {
		open_video(jobSpec, jobSpec.oc, jobSpec.video_st);
	}
	if (jobSpec.audio_st) {
		open_audio(jobSpec.oc, jobSpec.audio_st);
	}

#ifdef NEW_M2TS
	jobSpec.fmt->flags |= AVFMT_NOFILE; // we'll write our own, thanks!
	int track_ids[2] = {120, 121};
	uint8_t track_types[2] = {Pests::TT_H264, Pests::TT_MpegAudio};
	jobSpec.p->StartFile(jobSpec.oc->filename, track_ids, track_types, 2); // 120 = video, 121 = audio
#else
	// open the output file, if needed
	if (!(jobSpec.fmt->flags & AVFMT_NOFILE)) {
		if (url_fopen(&jobSpec.oc->pb, jobSpec.oc->filename, URL_WRONLY) < 0) {
			fprintf(stderr, "Could not open '%s'\n", jobSpec.oc->filename);
			jobSpec.IsValid = false;
			return 5;
		}
		av_write_header(jobSpec.oc);
	}
#endif


	// All done OK, validate and return.
	// From this point on, the developer MUST call CloseEncoderJob() before exiting.
	jobSpec.IsValid = true;
	return 0;
}
Exemplo n.º 14
0
int main(int argc, char *argv[])
{
    IDeckLinkIterator            *deckLinkIterator = CreateDeckLinkIteratorInstance();
    DeckLinkCaptureDelegate     *delegate;
    BMDDisplayMode                selectedDisplayMode = bmdModeNTSC;
    int                            displayModeCount = 0;
    int                            exitStatus = 1;
    int                            connection = 0, camera = 0, i=0;
    int                            ch;
    HRESULT                        result;

    pthread_mutex_init(&sleepMutex, NULL);
    pthread_cond_init(&sleepCond, NULL);
    av_register_all();

    if (!deckLinkIterator)
    {
        fprintf(stderr, "This application requires the DeckLink drivers installed.\n");
        goto bail;
    }
    // Parse command line options
    while ((ch = getopt(argc, argv, "?hc:s:f:a:m:n:F:C:I:")) != -1)
    {
        switch (ch)
        {
            case 'm':
                g_videoModeIndex = atoi(optarg);
                break;
            case 'c':
                g_audioChannels = atoi(optarg);
                if (g_audioChannels != 2 &&
                    g_audioChannels != 8 &&
                    g_audioChannels != 16)
                {
                    fprintf(stderr, "Invalid argument: Audio Channels must be either 2, 8 or 16\n");
                    goto bail;
                }
                break;
            case 's':
                g_audioSampleDepth = atoi(optarg);
                if (g_audioSampleDepth != 16 && g_audioSampleDepth != 32)
                {
                    fprintf(stderr, "Invalid argument: Audio Sample Depth must be either 16 bits or 32 bits\n");
                    goto bail;
                }
                break;
            case 'f':
                g_videoOutputFile = optarg;
                break;
            case 'n':
                g_maxFrames = atoi(optarg);
                break;
            case 'F':
                fmt = av_guess_format(optarg, NULL, NULL);
		break;
            case 'I':
                connection = atoi(optarg);
		break;
	    case 'C':
		camera = atoi(optarg);
		break;
            case '?':
            case 'h':
                usage(0);
        }
    }

    /* Connect to the first DeckLink instance */
    do {
    	result = deckLinkIterator->Next(&deckLink);
    } while(i++<camera);

    if (result != S_OK)
    {
        fprintf(stderr, "No DeckLink PCI cards found.\n");
        goto bail;
    }

    if (deckLink->QueryInterface(IID_IDeckLinkInput, (void**)&deckLinkInput) != S_OK)
        goto bail;

    result = deckLink->QueryInterface(IID_IDeckLinkConfiguration, (void**)&deckLinkConfiguration);
    if (result != S_OK)
    {
        fprintf(stderr, "Could not obtain the IDeckLinkConfiguration interface - result = %08x\n", result);
        goto bail;
    }
    //XXX make it generic
    if (connection == 1) { // video compuesto + audio analogico
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigVideoInputConnection,
                                  bmdVideoConnectionComposite);
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigAudioInputConnection,
				  bmdAudioConnectionAnalog);
    }else if (connection == 2) { // video componentes + audio analogico
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigVideoInputConnection,
                                  bmdVideoConnectionComponent);
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigAudioInputConnection,
                                  bmdAudioConnectionAnalog);
    }else if (connection == 3) { // HDMI video + audio
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigVideoInputConnection,
                                  bmdVideoConnectionHDMI);
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigAudioInputConnection,
                                  bmdAudioConnectionEmbedded);
    }else if (connection == 4) { // SDI video + audio
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigVideoInputConnection,
                                  bmdVideoConnectionSDI);
    deckLinkConfiguration->SetInt(bmdDeckLinkConfigAudioInputConnection,
                                  bmdAudioConnectionEmbedded);
    }
    delegate = new DeckLinkCaptureDelegate();
    deckLinkInput->SetCallback(delegate);

    // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output
    result = deckLinkInput->GetDisplayModeIterator(&displayModeIterator);
    if (result != S_OK)
    {
        fprintf(stderr, "Could not obtain the video output display mode iterator - result = %08x\n", result);
        goto bail;
    }

    if (!fmt)
    fmt = av_guess_format(NULL, g_videoOutputFile, NULL);


    if (g_videoModeIndex < 0)
    {
        fprintf(stderr, "No video mode specified\n");
        usage(0);
    }


    selectedDisplayMode = -1;
    while (displayModeIterator->Next(&displayMode) == S_OK)
    {
        if (g_videoModeIndex == displayModeCount)
        {
            selectedDisplayMode = displayMode->GetDisplayMode();
            break;
        }
        displayModeCount++;
        displayMode->Release();
    }

    oc = avformat_alloc_context();
    oc->oformat = fmt;

    snprintf(oc->filename, sizeof(oc->filename), "%s", g_videoOutputFile);

    fmt->video_codec = CODEC_ID_RAWVIDEO;
    fmt->audio_codec = CODEC_ID_PCM_S16LE;

    video_st = add_video_stream(oc, fmt->video_codec);
    audio_st = add_audio_stream(oc, fmt->audio_codec);

    av_set_parameters(oc, NULL);

    if (!(fmt->flags & AVFMT_NOFILE)) {
        if (url_fopen(&oc->pb, oc->filename, URL_WRONLY) < 0) {
            fprintf(stderr, "Could not open '%s'\n", oc->filename);
            exit(1);
        }
    }

    if (selectedDisplayMode < 0)
    {
        fprintf(stderr, "Invalid mode %d specified\n", g_videoModeIndex);
        goto bail;
    }

    result = deckLinkInput->EnableVideoInput(selectedDisplayMode, bmdFormat8BitYUV, 0);
    if(result != S_OK)
    {
        fprintf(stderr, "Failed to enable video input. Is another application using the card?\n");
        goto bail;
    }

    result = deckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, g_audioSampleDepth, g_audioChannels);
    if(result != S_OK)
    {
        goto bail;
    }
    av_write_header(oc);

    result = deckLinkInput->StartStreams();
    if(result != S_OK)
    {
        goto bail;
    }
    // All Okay.
    exitStatus = 0;

    // Block main thread until signal occurs
    pthread_mutex_lock(&sleepMutex);
    pthread_cond_wait(&sleepCond, &sleepMutex);
    pthread_mutex_unlock(&sleepMutex);
    fprintf(stderr, "Stopping Capture\n");

bail:
    if (displayModeIterator != NULL)
    {
        displayModeIterator->Release();
        displayModeIterator = NULL;
    }

    if (deckLinkInput != NULL)
    {
        deckLinkInput->Release();
        deckLinkInput = NULL;
    }

    if (deckLink != NULL)
    {
        deckLink->Release();
        deckLink = NULL;
    }

    if (deckLinkIterator != NULL)
        deckLinkIterator->Release();

    if (oc != NULL)
    {
        av_write_trailer(oc);
        if (!(fmt->flags & AVFMT_NOFILE)) {
            /* close the output file */
            url_fclose(oc->pb);
        }

    }

    return exitStatus;
}
Exemplo n.º 15
0
int main(int argc, char **argv)
{
    OutputStream video_st = { 0 }, audio_st = { 0 };
    const char *filename;
    AVOutputFormat *fmt;
    AVFormatContext *oc;
    int have_video = 0, have_audio = 0;
    int encode_video = 0, encode_audio = 0;

    /* Initialize libavcodec, and register all codecs and formats. */
    av_register_all();

    if (argc != 2) {
        printf("usage: %s output_file\n"
               "API example program to output a media file with libavformat.\n"
               "The output format is automatically guessed according to the file extension.\n"
               "Raw images can also be output by using '%%d' in the filename\n"
               "\n", argv[0]);
        return 1;
    }

    filename = argv[1];

    /* Autodetect the output format from the name. default is MPEG. */
    fmt = av_guess_format(NULL, filename, NULL);
    if (!fmt) {
        printf("Could not deduce output format from file extension: using MPEG.\n");
        fmt = av_guess_format("mpeg", NULL, NULL);
    }
    if (!fmt) {
        fprintf(stderr, "Could not find suitable output format\n");
        return 1;
    }

    /* Allocate the output media context. */
    oc = avformat_alloc_context();
    if (!oc) {
        fprintf(stderr, "Memory error\n");
        return 1;
    }
    oc->oformat = fmt;
    snprintf(oc->filename, sizeof(oc->filename), "%s", filename);

    /* Add the audio and video streams using the default format codecs
     * and initialize the codecs. */
    if (fmt->video_codec != AV_CODEC_ID_NONE) {
        add_video_stream(&video_st, oc, fmt->video_codec);
        have_video = 1;
        encode_video = 1;
    }
    if (fmt->audio_codec != AV_CODEC_ID_NONE) {
        add_audio_stream(&audio_st, oc, fmt->audio_codec);
        have_audio = 1;
        encode_audio = 1;
    }

    /* Now that all the parameters are set, we can open the audio and
     * video codecs and allocate the necessary encode buffers. */
    if (have_video)
        open_video(oc, &video_st);
    if (have_audio)
        open_audio(oc, &audio_st);

    av_dump_format(oc, 0, filename, 1);

    /* open the output file, if needed */
    if (!(fmt->flags & AVFMT_NOFILE)) {
        if (avio_open(&oc->pb, filename, AVIO_FLAG_WRITE) < 0) {
            fprintf(stderr, "Could not open '%s'\n", filename);
            return 1;
        }
    }

    /* Write the stream header, if any. */
    avformat_write_header(oc, NULL);

    while (encode_video || encode_audio) {
        /* select the stream to encode */
        if (encode_video &&
            (!encode_audio || av_compare_ts(video_st.next_pts, video_st.enc->time_base,
                                            audio_st.next_pts, audio_st.enc->time_base) <= 0)) {
            encode_video = !write_video_frame(oc, &video_st);
        } else {
            encode_audio = !process_audio_stream(oc, &audio_st);
        }
    }

    /* Write the trailer, if any. The trailer must be written before you
     * close the CodecContexts open when you wrote the header; otherwise
     * av_write_trailer() may try to use memory that was freed on
     * av_codec_close(). */
    av_write_trailer(oc);

    /* Close each codec. */
    if (have_video)
        close_stream(oc, &video_st);
    if (have_audio)
        close_stream(oc, &audio_st);

    if (!(fmt->flags & AVFMT_NOFILE))
        /* Close the output file. */
        avio_close(oc->pb);

    /* free the stream */
    avformat_free_context(oc);

    return 0;
}
Exemplo n.º 16
0
int init_output(OUTPUT_CONTEXT *ptr_output_ctx, char* output_file ,INPUT_CONTEXT *ptr_input_ctx) {

    //set AVOutputFormat
    /* allocate the output media context */
    printf("output_file = %s \n" ,output_file);
    avformat_alloc_output_context2(&ptr_output_ctx->ptr_format_ctx, NULL, NULL, output_file);
    if (ptr_output_ctx->ptr_format_ctx == NULL) {
        printf("Could not deduce[推断] output format from file extension: using MPEG.\n");
        avformat_alloc_output_context2(&ptr_output_ctx->ptr_format_ctx, NULL, "mpeg", output_file);
        if(ptr_output_ctx->ptr_format_ctx == NULL) {
            printf("Could not find suitable output format\n");
            exit(NOT_GUESS_OUT_FORMAT);
        }
    }
    //in here ,if I get AVOutputFormat succeed ,the filed audio_codec and video_codec will be set default.
    ptr_output_ctx->fmt = ptr_output_ctx->ptr_format_ctx->oformat;


    /* add audio stream and video stream 	*/
    ptr_output_ctx->video_stream = NULL;
    ptr_output_ctx->audio_stream = NULL;

    ptr_output_ctx->audio_codec_id = CODEC_ID_AAC; //aac
    ptr_output_ctx->video_codec_id = CODEC_ID_H264; //h264

    if (ptr_output_ctx->fmt->video_codec != CODEC_ID_NONE) {

        ptr_output_ctx->video_stream = add_video_stream(ptr_output_ctx->ptr_format_ctx, ptr_output_ctx->video_codec_id);
        if(ptr_output_ctx->video_stream == NULL) {
            printf("in output ,add video stream failed \n");
            exit(ADD_VIDEO_STREAM_FAIL);
        }
    }

    if (ptr_output_ctx->fmt->audio_codec != CODEC_ID_NONE) {

        ptr_output_ctx->audio_stream = add_audio_stream(ptr_output_ctx->ptr_format_ctx, ptr_output_ctx->audio_codec_id ,ptr_input_ctx);
        if(ptr_output_ctx->audio_stream == NULL) {
            printf(".in output ,add audio stream failed \n");
            exit(ADD_AUDIO_STREAM_FAIL);
        }
    }


    /*	malloc buffer	*/
    ptr_output_ctx->encoded_yuv_pict = avcodec_alloc_frame();
    if(ptr_output_ctx->encoded_yuv_pict == NULL) {
        printf("yuv_frame allocate failed %s ,%d line\n" ,__FILE__ ,__LINE__);
        exit(MEMORY_MALLOC_FAIL);
    }
    int size = avpicture_get_size(ptr_output_ctx->video_stream->codec->pix_fmt ,
                                  ptr_output_ctx->video_stream->codec->width ,
                                  ptr_output_ctx->video_stream->codec->height);

    ptr_output_ctx->pict_buf = av_malloc(size);
    if(ptr_output_ctx->pict_buf == NULL) {
        printf("pict allocate failed ...\n");
        exit(MEMORY_MALLOC_FAIL);
    }
    //bind
    avpicture_fill((AVPicture *)ptr_output_ctx->encoded_yuv_pict ,ptr_output_ctx->pict_buf ,
                   ptr_output_ctx->video_stream->codec->pix_fmt ,
                   ptr_output_ctx->video_stream->codec->width ,
                   ptr_output_ctx->video_stream->codec->height);


    /*	init some member value */
    ptr_output_ctx->audio_resample = 0;
    ptr_output_ctx->swr = NULL;

    /*output the file information */
    av_dump_format(ptr_output_ctx->ptr_format_ctx, 0, output_file, 1);

}