int main(int argc, char **argv) { const char *output_type; /* register all the codecs */ avcodec_register_all(); if (argc < 2) { printf("usage: %s output_type\n" "API example program to decode/encode a media stream with libavcodec.\n" "This program generates a synthetic stream and encodes it to a file\n" "named test.h264, test.mp2 or test.mpg depending on output_type.\n" "The encoded stream is then decoded and written to a raw data output.\n" "output_type must be choosen between 'h264', 'mp2', 'mpg'.\n", argv[0]); return 1; } output_type = argv[1]; if (!strcmp(output_type, "h264")) { video_encode_example("test.h264", AV_CODEC_ID_H264); } else if (!strcmp(output_type, "mp2")) { audio_encode_example("test.mp2"); audio_decode_example("test.sw", "test.mp2"); } else if (!strcmp(output_type, "mpg")) { video_encode_example("test.mpg", AV_CODEC_ID_MPEG1VIDEO); video_decode_example("test%02d.pgm", "test.mpg"); } else { fprintf(stderr, "Invalid output type '%s', choose between 'h264', 'mp2', or 'mpg'\n", output_type); return 1; } return 0; }
int main(int argc, char **argv) { const char *filename; /* must be called before using avcodec lib */ avcodec_init(); /* register all the codecs */ avcodec_register_all(); if (argc <= 1) { audio_encode_example("/tmp/test.mp2"); audio_decode_example("/tmp/test.sw", "/tmp/test.mp2"); video_encode_example("/tmp/test.h264", CODEC_ID_H264); video_encode_example("/tmp/test.mpg", CODEC_ID_MPEG1VIDEO); filename = "/tmp/test.mpg"; } else { filename = argv[1]; } // audio_decode_example("/tmp/test.sw", filename); video_decode_example("/tmp/test%d.pgm", filename); return 0; }
int main(int argc, char **argv) { const char *filename; /* must be called before using avcodec lib */ avcodec_init(); /* register all the codecs (you can also register only the codec you wish to have smaller code */ avcodec_register_all(); if (argc <= 1) { audio_encode_example("/tmp/test.mp2"); audio_decode_example("/tmp/test.sw", "/tmp/test.mp2"); video_encode_example("/tmp/test.mpg"); filename = "/tmp/test.mpg"; } else { filename = argv[1]; } // audio_decode_example("/tmp/test.sw", filename); video_decode_example("/tmp/test%d.pgm", filename); return 0; }
int main(int argc, char **argv) { const char *filename; /* register all the codecs */ avcodec_register_all(); if (argc <= 1) { audio_encode_example("/tmp/test.mp2"); audio_decode_example("/tmp/test.sw", "/tmp/test.mp2"); video_encode_example("/tmp/test.mpg"); filename = "/tmp/test.mpg"; } else { filename = argv[1]; } // audio_decode_example("/tmp/test.sw", filename); video_decode_example("/tmp/test%d.pgm", filename); return 0; }
int main(int argc, char **argv) { video_encode_example("test.mpg", AV_CODEC_ID_MPEG1VIDEO); return 0; }
int main(int argc, char *argv[]) { AVFormatContext *pFormatCtx = NULL; int i, videoStream; AVCodecContext *pCodecCtx; AVCodec *pCodec; AVFrame *pFrame; AVFrame *pFrameCropped; AVFrame *pFrameRGB; struct SwsContext * pSwsCtx; AVPacket packet; int frameFinished; int numBytes; int numBytesCroped; uint8_t *buffer; AVDictionary * p_options = NULL; AVInputFormat * p_in_fmt = NULL; pFile = fopen("screencap.out", "wb"); if (pFile == NULL) return 0; // Register all formats and codecs av_register_all(); avcodec_register_all(); avdevice_register_all(); av_dict_set(&p_options, "framerate", "60", 0); av_dict_set(&p_options, "video_size", "1920x1080", 0); av_dict_set(&p_options, "qscale", "1", 0); p_in_fmt = av_find_input_format("x11grab"); // Open video file if (avformat_open_input(&pFormatCtx, ":0.0", p_in_fmt, &p_options) != 0) { printf("cannot open input file!\n"); return -1; // Couldn't open file } // Retrieve stream information if (avformat_find_stream_info(pFormatCtx, NULL) < 0) return -1; // Couldn't find stream information // Dump information about file onto standard error av_dump_format(pFormatCtx, 0, argv[1], 0); // Find the first video stream videoStream = -1; for (i = 0; i < pFormatCtx->nb_streams; i++) if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStream = i; break; } if (videoStream == -1) return -1; // Didn't find a video stream // Get a pointer to the codec context for the video stream pCodecCtx = pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if (pCodec == NULL) { fprintf(stderr, "Unsupported codec!\n"); return -1; // Codec not found } // Open codec if (avcodec_open2(pCodecCtx, pCodec, 0) < 0) return -1; // Could not open codec // Allocate video frame pFrame = avcodec_alloc_frame(); int crop_x = 0, crop_y = 0, crop_h = 1080, crop_w = 1920; pFrameCropped = avcodec_alloc_frame(); if (pFrameCropped == NULL) return -1; // Allocate an AVFrame structure pFrameRGB = avcodec_alloc_frame(); if (pFrameRGB == NULL) return -1; // Determine required buffer size and allocate buffer numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, crop_w, crop_h); buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); // Assign appropriate parts of buffer to image planes in pFrameRGB // Note that pFrameRGB is an AVFrame, but AVFrame is a superset // of AVPicture avpicture_fill((AVPicture *) pFrameRGB, buffer, AV_PIX_FMT_YUV420P, crop_w, crop_h); pSwsCtx = sws_getContext(crop_w, crop_h, pCodecCtx->pix_fmt, crop_w, crop_h, AV_PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL); if (pSwsCtx == NULL) { fprintf(stderr, "Cannot initialize the sws context\n"); return -1; } // Read frames and save first five frames to disk i = 0; FILE* fp = fopen("encodec.mpg", "wb"); while (av_read_frame(pFormatCtx, &packet) >= 0) { // Is this a packet from the video stream? if (packet.stream_index == videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // Did we get a video frame? if (frameFinished) { sws_scale(pSwsCtx, (const uint8_t * const *) pFrame->data, pFrame->linesize, 0, crop_h, pFrameRGB->data, pFrameRGB->linesize); int y, x; /* Y */ for (y = 0; y < crop_h; y++) { for (x = 0; x < crop_w; x++) { //fwrite(pFrameRGB->data[0] + y * pFrameRGB->linesize[0] + x, sizeof(uint8_t), 1, fp); } } /* Cb and Cr */ for (y = 0; y < crop_h / 2; y++) { for (x = 0; x < crop_w / 2; x++) { //fwrite(pFrameRGB->data[1] + y * pFrameRGB->linesize[1] + x, sizeof(uint8_t), 1, fp); //fwrite(pFrameRGB->data[2] + y * pFrameRGB->linesize[2] + x, sizeof(uint8_t), 1, fp); } } video_encode_example(pFrameRGB, fp); // Save the frame to disk if (++i >= 100) break; } } av_free_packet(&packet); } fclose(fp); printf("Frames read %d\n", i); // Free the RGB image av_free(buffer); av_free(pFrameRGB); // Free the YUV frame av_free(pFrame); // Close the codec avcodec_close(pCodecCtx); // Close the video file avformat_close_input(&pFormatCtx); // Close file fclose(pFile); return 0; }