boolean lives_yuv_stream_start_write(lives_yuv4m_t *yuv4mpeg, const char *filename, int hsize, int vsize, double fps) { int i; if (mainw->fixed_fpsd>-1.&&mainw->fixed_fpsd!=fps) { do_error_dialog(lives_strdup_printf(_("Unable to set display framerate to %.3f fps.\n\n"),fps)); return FALSE; } mainw->fixed_fpsd=fps; if (filename==NULL) filename=lives_strdup_printf("%s/streamout.yuv",prefs->tmpdir); // TODO - do_threaded_dialog if ((yuvout=creat(filename,O_CREAT))<0) { do_error_dialog(lives_strdup_printf(_("Unable to open yuv4mpeg out stream %s\n"),filename)); return FALSE; } if (mainw->fixed_fpsd>23.9999&&mainw->fixed_fpsd<24.0001) { y4m_si_set_framerate(&(yuv4mpeg->streaminfo),y4m_fps_FILM); } else return FALSE; y4m_si_set_interlace(&(yuv4mpeg->streaminfo), Y4M_ILACE_NONE); y4m_si_set_width(&(yuv4mpeg->streaminfo), (hsize_out=hsize)); y4m_si_set_height(&(yuv4mpeg->streaminfo), (vsize_out=vsize)); y4m_si_set_sampleaspect(&(yuv4mpeg->streaminfo), yuv4mpeg->sar); i = y4m_write_stream_header(yuvout, &(yuv4mpeg->streaminfo)); if (i != Y4M_OK) return FALSE; return TRUE; }
static void setup_output_stream(int fdout, cl_info_t *cl, y4m_stream_info_t *sinfo, ppm_info_t *ppm, int *field_height) { int err; int x_alignment = y4m_chroma_ss_x_ratio(cl->ss_mode).d; int y_alignment = y4m_chroma_ss_y_ratio(cl->ss_mode).d; if (cl->interlace != Y4M_ILACE_NONE) y_alignment *= 2; if ((ppm->width % x_alignment) != 0) { mjpeg_error_exit1("PPM width (%d) is not a multiple of %d!", ppm->width, x_alignment); } if ((ppm->height % y_alignment) != 0) { mjpeg_error_exit1("PPM height (%d) is not a multiple of %d!", ppm->height, y_alignment); } y4m_si_set_width(sinfo, ppm->width); if (cl->interlace == Y4M_ILACE_NONE) { y4m_si_set_height(sinfo, ppm->height); *field_height = ppm->height; } else if (cl->interleave) { y4m_si_set_height(sinfo, ppm->height); *field_height = ppm->height / 2; } else { y4m_si_set_height(sinfo, ppm->height * 2); *field_height = ppm->height; } y4m_si_set_sampleaspect(sinfo, cl->aspect); y4m_si_set_interlace(sinfo, cl->interlace); y4m_si_set_framerate(sinfo, cl->framerate); y4m_si_set_chroma(sinfo, cl->ss_mode); if ((err = y4m_write_stream_header(fdout, sinfo)) != Y4M_OK) mjpeg_error_exit1("Write header failed: %s", y4m_strerr(err)); mjpeg_info("Output Stream parameters:"); y4m_log_stream_info(mjpeg_loglev_t("info"), " ", sinfo); }
static int tc_y4m_open_video(Y4MPrivateData *pd, const char *filename, vob_t *vob) { int asr, ret; y4m_ratio_t framerate; y4m_ratio_t asr_rate; /* avoid fd loss in case of failed configuration */ if (pd->fd_vid == -1) { pd->fd_vid = open(filename, O_RDWR|O_CREAT|O_TRUNC, S_IRUSR|S_IWUSR|S_IRGRP|S_IROTH); if (pd->fd_vid == -1) { tc_log_error(MOD_NAME, "failed to open video stream file '%s'" " (reason: %s)", filename, strerror(errno)); return TC_ERROR; } } y4m_init_stream_info(&(pd->streaminfo)); //note: this is the real framerate of the raw stream framerate = (vob->ex_frc == 0) ?mpeg_conform_framerate(vob->ex_fps) :mpeg_framerate(vob->ex_frc); if (framerate.n == 0 && framerate.d == 0) { framerate.n = vob->ex_fps * 1000; framerate.d = 1000; } asr = (vob->ex_asr < 0) ?vob->im_asr :vob->ex_asr; tc_asr_code_to_ratio(asr, &asr_rate.n, &asr_rate.d); y4m_init_stream_info(&(pd->streaminfo)); y4m_si_set_framerate(&(pd->streaminfo), framerate); if (vob->encode_fields == TC_ENCODE_FIELDS_TOP_FIRST) { y4m_si_set_interlace(&(pd->streaminfo), Y4M_ILACE_TOP_FIRST); } else if (vob->encode_fields == TC_ENCODE_FIELDS_BOTTOM_FIRST) { y4m_si_set_interlace(&(pd->streaminfo), Y4M_ILACE_BOTTOM_FIRST); } else if (vob->encode_fields == TC_ENCODE_FIELDS_PROGRESSIVE) { y4m_si_set_interlace(&(pd->streaminfo), Y4M_ILACE_NONE); } /* XXX */ y4m_si_set_sampleaspect(&(pd->streaminfo), y4m_guess_sar(pd->width, pd->height, asr_rate)); y4m_si_set_height(&(pd->streaminfo), pd->height); y4m_si_set_width(&(pd->streaminfo), pd->width); /* Y4M_CHROMA_420JPEG 4:2:0, H/V centered, for JPEG/MPEG-1 */ /* Y4M_CHROMA_420MPEG2 4:2:0, H cosited, for MPEG-2 */ /* Y4M_CHROMA_420PALDV 4:2:0, alternating Cb/Cr, for PAL-DV */ y4m_si_set_chroma(&(pd->streaminfo), Y4M_CHROMA_420JPEG); // XXX ret = y4m_write_stream_header(pd->fd_vid, &(pd->streaminfo)); if (ret != Y4M_OK) { tc_log_warn(MOD_NAME, "failed to write video YUV4MPEG2 header: %s", y4m_strerr(ret)); return TC_ERROR; } return TC_OK; }
int main (int argc, char *argv[]) { extern char *optarg; int cpucap = cpu_accel (); char c; int fd_in = 0; int fd_out = 1; int errno = 0; int have_framerate = 0; int force_interlacing = 0; y4m_frame_info_t iframeinfo; y4m_stream_info_t istreaminfo; y4m_frame_info_t oframeinfo; y4m_stream_info_t ostreaminfo; int output_frame_number = 0; int input_frame_number = 0; y4m_ratio_t output_frame_rate, input_frame_rate, frame_rate_ratio; float ratio = 0; // input/output, output should be > input ) int scene_change; y4m_ratio_t ratio_percent_frame; float percent_threshold = 0.02; /* percent_threshold is there to avoid interpolating frames when the output frame * is very close to an input frame */ mjpeg_log (LOG_INFO, "-------------------------------------------------"); mjpeg_log (LOG_INFO, " Motion-Compensating-Frame-Rate-Converter "); mjpeg_log (LOG_INFO, "-------------------------------------------------"); while ((c = getopt (argc, argv, "hvb:p:r:t:s:f")) != -1) { switch (c) { case 'h': { mjpeg_log (LOG_INFO, "Usage "); mjpeg_log (LOG_INFO, "-------------------------"); mjpeg_log (LOG_INFO, " This program converts frame rates"); mjpeg_log (LOG_INFO, "with a smart algorithm that estimates the motion of the elements"); mjpeg_log (LOG_INFO, "to smooth the motion, rather than duplicating frames."); mjpeg_log (LOG_INFO, " It's way smoother, but introduces a bit of blocking and/or"); mjpeg_log (LOG_INFO, " maybe blurryness when things move too fast."); mjpeg_log (LOG_INFO, " "); mjpeg_log (LOG_INFO, " -r Frame rate for the resulting stream (in X:Y fractional form)"); mjpeg_log (LOG_INFO, " -b block size (default = 8, will be rounded to even number )"); mjpeg_log (LOG_INFO, " -p search path radius (default = 8, do not use high values ~ > 20)"); mjpeg_log (LOG_INFO, "-t frame approximation threshold (default=50, higher=better)"); mjpeg_log (LOG_INFO, "-s scene change threshold (default=8, 0=disable scene change detection)"); mjpeg_log (LOG_INFO, "-r Frame rate for the resulting stream (in X:Y fractional form)"); mjpeg_log (LOG_INFO, " -f force processing interlaced input (don't know what it does)"); mjpeg_log (LOG_INFO, " -v verbose/debug"); exit (0); break; } case 'v': { verbose = 1; break; } case 'f': { force_interlacing = 1; break; } case 'b': { block_size = strtol (optarg, (char **) NULL, 10); /* we only want even block sizes */ if (block_size % 1 != 0) { block_size = block_size + 1; mjpeg_log (LOG_WARN, "Block size changed to %d", block_size); } else mjpeg_log (LOG_INFO, "Block size: %d", block_size); break; } case 'p': { search_path_radius = strtol (optarg, (char **) NULL, 10); /* safer atoi */ mjpeg_log (LOG_INFO, "Search radius %d", search_path_radius); break; } case 'r': { if (Y4M_OK != y4m_parse_ratio (&output_frame_rate, optarg)) mjpeg_error_exit1 ("Syntax for frame rate should be Numerator:Denominator"); mjpeg_log (LOG_INFO, "New Frame rate %d:%d", output_frame_rate.n, output_frame_rate.d); have_framerate = 1; break; } case 't': { percent_threshold = strtol (optarg, (char **) NULL, 10); if ((percent_threshold > 1) && (percent_threshold <= 1024)) percent_threshold = 1.0 / percent_threshold; else mjpeg_error_exit1 ("Threshold should be between 2 and 1024"); mjpeg_log (LOG_INFO, "Approximation threshold %d", (int) ((float) 1.0 / percent_threshold)); break; } case 's': { scene_change_threshold = strtol (optarg, (char **) NULL, 10); if (scene_change_threshold == 0) mjpeg_log (LOG_INFO, "Scene change detection disabled"); else mjpeg_log (LOG_INFO, "Scene change threshold: %d00 percent", scene_change_threshold); break; } } } if (!have_framerate) { mjpeg_error_exit1 ("Please specify a frame rate; yuvmotionfps -h for more info"); } /* initialize motion_library */ init_motion_search (); /* initialize MMX transforms (fixme) */ if ((cpucap & ACCEL_X86_MMXEXT) != 0 || (cpucap & ACCEL_X86_SSE) != 0) { #if 0 mjpeg_log (LOG_INFO, "FIXME: could use MMX/SSE Block/Frame-Copy/Blend if I had one ;-)"); #endif } /* initialize stream-information */ y4m_accept_extensions (1); y4m_init_stream_info (&istreaminfo); y4m_init_frame_info (&iframeinfo); y4m_init_stream_info (&ostreaminfo); y4m_init_frame_info (&oframeinfo); /* open input stream */ if ((errno = y4m_read_stream_header (fd_in, &istreaminfo)) != Y4M_OK) { mjpeg_log (LOG_ERROR, "Couldn't read YUV4MPEG header: %s!", y4m_strerr (errno)); exit (1); } /* get format information */ width = y4m_si_get_width (&istreaminfo); height = y4m_si_get_height (&istreaminfo); input_chroma_subsampling = y4m_si_get_chroma (&istreaminfo); mjpeg_log (LOG_INFO, "Y4M-Stream is %ix%i(%s)", width, height, input_chroma_subsampling == Y4M_CHROMA_420JPEG ? "4:2:0 MPEG1" : input_chroma_subsampling == Y4M_CHROMA_420MPEG2 ? "4:2:0 MPEG2" : input_chroma_subsampling == Y4M_CHROMA_420PALDV ? "4:2:0 PAL-DV" : input_chroma_subsampling == Y4M_CHROMA_444 ? "4:4:4" : input_chroma_subsampling == Y4M_CHROMA_422 ? "4:2:2" : input_chroma_subsampling == Y4M_CHROMA_411 ? "4:1:1 NTSC-DV" : input_chroma_subsampling == Y4M_CHROMA_MONO ? "MONOCHROME" : input_chroma_subsampling == Y4M_CHROMA_444ALPHA ? "4:4:4:4 ALPHA" : "unknown"); /* if chroma-subsampling isn't supported bail out ... */ switch (input_chroma_subsampling) { case Y4M_CHROMA_420JPEG: break; case Y4M_CHROMA_420PALDV: case Y4M_CHROMA_420MPEG2: case Y4M_CHROMA_411: mjpeg_log (LOG_WARN, "This chroma subsampling mode has not been thoroughly tested"); break; default: mjpeg_error_exit1 ("Y4M-Stream is not 4:2:0. Other chroma-modes currently not allowed. Sorry."); } /* the output is progressive 4:2:0 MPEG 1 */ y4m_si_set_interlace (&ostreaminfo, Y4M_ILACE_NONE); y4m_si_set_chroma (&ostreaminfo, Y4M_CHROMA_420JPEG); y4m_si_set_width (&ostreaminfo, width); y4m_si_set_height (&ostreaminfo, height); y4m_si_set_sampleaspect (&ostreaminfo, y4m_si_get_sampleaspect (&istreaminfo)); input_frame_rate = y4m_si_get_framerate (&istreaminfo); y4m_si_set_framerate (&ostreaminfo, output_frame_rate); if (width % block_size != 0) { mjpeg_log (LOG_WARN, "Warning, stream width(%d) is not a multiple of block_size (%d)", width, block_size); mjpeg_log (LOG_WARN, "The right side of the image might not be what you want"); } if (height % block_size != 0) { mjpeg_log (LOG_WARN, "Warning, stream height(%d) is not a multiple of block_size (%d)", height, block_size); mjpeg_log (LOG_WARN, "The lower side of the image might not be what you want"); } /* Calculate the different ratios: * ratio is (input framerate / output framerate) * ratio_percent_frame is the fractional representation of percent frame */ frame_rate_ratio.n = input_frame_rate.n * output_frame_rate.d; frame_rate_ratio.d = input_frame_rate.d * output_frame_rate.n; y4m_ratio_reduce (&frame_rate_ratio); ratio = (float) frame_rate_ratio.n / frame_rate_ratio.d; ratio_percent_frame.d = 1; ratio_percent_frame.n = 0; if (ratio == 0) mjpeg_error_exit1 ("Cannot have ratio =0 "); else if (ratio > 128) mjpeg_error_exit1 ("Cannot have ratio >128 "); if ((y4m_si_get_interlace (&istreaminfo) != Y4M_ILACE_NONE) && (!force_interlacing)) { mjpeg_error_exit1 ("Sorry, can only convert progressive streams"); } /* write the outstream header */ y4m_write_stream_header (fd_out, &ostreaminfo); /* now allocate the needed buffers */ { /* calculate the memory offset needed to allow the processing * functions to overshot. The biggest overshot is needed for the * MC-functions, so we'll use 8*width... */ buff_offset = width * 8; buff_size = buff_offset * 2 + width * height; inframe[0] = buff_offset + (uint8_t *) malloc (buff_size); inframe[1] = buff_offset + (uint8_t *) malloc (buff_size); inframe[2] = buff_offset + (uint8_t *) malloc (buff_size); reconstructed[0] = buff_offset + (uint8_t *) malloc (buff_size); reconstructed[1] = buff_offset + (uint8_t *) malloc (buff_size); reconstructed[2] = buff_offset + (uint8_t *) malloc (buff_size); frame1[0] = buff_offset + (uint8_t *) malloc (buff_size); frame1[1] = buff_offset + (uint8_t *) malloc (buff_size); frame1[2] = buff_offset + (uint8_t *) malloc (buff_size); mjpeg_log (LOG_INFO, "Buffers allocated."); } /* initialize motion-search-pattern */ init_search_pattern (); errno = y4m_read_frame (fd_in, &istreaminfo, &iframeinfo, frame1); if (errno != Y4M_OK) goto The_end; /* read every frame until the end of the input stream and process it */ while (Y4M_OK == (errno = y4m_read_frame (fd_in, &istreaminfo, &iframeinfo, inframe))) { /* frame1 contains the previous input frame * inframe contains the current input frame * reconstructed contains the current output frame * percent_frame is the amount of time after which the output frame is sent * in percent of the time between input frames * * Input: * frame1 . . . . . . . . . . . . . . . . . . inframe * Output: * . . . . . . . . . . .reconstructed. . . . . . . * |<- - percent_frame - - - ->| * |< - - - - - - - - - -100% - - - - - - - - - >| * * The variable ratio_percent_frame is the fractional representation of * percent_frame; it is there to avoid rounding errors */ input_frame_number++; if (verbose) { mjpeg_log (LOG_INFO, "Input frame number %d", input_frame_number); } while (percent_frame < (1.0 - percent_threshold)) { output_frame_number++; if (verbose) { mjpeg_log (LOG_INFO, "Output frame number %d", output_frame_number); } #define ABS(value) ((value)<0)?-(value):(value) if (ABS (percent_frame) <= percent_threshold) { /* I put a threshold here to avoid wasting time */ /* The output frame coincides with the input frame * so there is no need to do any processing * just copy the input frame as is */ y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo, frame1); if (verbose) mjpeg_log (LOG_INFO, "Percent %f rounded to next frame", percent_frame); } else { /* We have to interpolate the frame (between the current inframe * and the previous frame1 * if there is a scene change, motion_compensate_field will * return 1 and we use the previous frame */ if (verbose) mjpeg_log (LOG_INFO, "Percent %f", percent_frame); scene_change = motion_compensate_field (); if (scene_change) { mjpeg_log (LOG_INFO, "Scene change at frame %d", input_frame_number); y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo, frame1); } else { y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo, reconstructed); } } ratio_percent_frame = add_ratio (ratio_percent_frame, frame_rate_ratio); percent_frame = Y4M_RATIO_DBL (ratio_percent_frame); } /* Skip input frames if downsampling (ratio > 1) * when upsampling, ratio < 1 * so we have ( 1< percent_frame < 2) at this point * hence we don't go in in the loop */ while (percent_frame >= 2) { percent_frame = percent_frame - 1; ratio_percent_frame = ratio_minus_1 (ratio_percent_frame); if (Y4M_OK != (errno = y4m_read_frame (fd_in, &istreaminfo, &iframeinfo, inframe))) goto The_end; } ratio_percent_frame = ratio_minus_1 (ratio_percent_frame); percent_frame = percent_frame - 1; /* store the previous frame */ memcpy (frame1[0], inframe[0], width * height); memcpy (frame1[1], inframe[1], width * height / 4); memcpy (frame1[2], inframe[2], width * height / 4); } The_end: /* free allocated buffers */ { free (inframe[0] - buff_offset); free (inframe[1] - buff_offset); free (inframe[2] - buff_offset); free (reconstructed[0] - buff_offset); free (reconstructed[1] - buff_offset); free (reconstructed[2] - buff_offset); free (frame1[0] - buff_offset); free (frame1[1] - buff_offset); free (frame1[2] - buff_offset); mjpeg_log (LOG_INFO, "Buffers freed."); } /* did stream end unexpectedly ? */ if (errno != Y4M_ERR_EOF) mjpeg_error_exit1 ("%s", y4m_strerr (errno)); /* Exit gently */ return (0); }
int main(int argc, char *argv[]) { AVFormatContext *pFormatCtx; AVInputFormat *avif = NULL; int i, videoStream; AVCodecContext *pCodecCtx; AVCodec *pCodec; AVFrame *pFrame; AVFrame *pFrame444; AVPacket packet; int frameFinished; int numBytes; uint8_t *buffer; int fdOut = 1 ; int yuv_interlacing = Y4M_UNKNOWN; int yuv_ss_mode = Y4M_UNKNOWN; y4m_ratio_t yuv_frame_rate; y4m_ratio_t yuv_aspect; // need something for chroma subsampling type. int write_error_code; int header_written = 0; int convert = 0; int stream = 0; enum PixelFormat convert_mode; const static char *legal_flags = "chI:F:A:S:o:s:f:"; int y; int frame_data_size ; uint8_t *yuv_data[3] ; y4m_stream_info_t streaminfo; y4m_frame_info_t frameinfo; y4m_init_stream_info(&streaminfo); y4m_init_frame_info(&frameinfo); yuv_frame_rate.d = 0; yuv_aspect.d = 0; // Register all formats and codecs av_register_all(); while ((i = getopt (argc, argv, legal_flags)) != -1) { switch (i) { case 'I': switch (optarg[0]) { case 'p': yuv_interlacing = Y4M_ILACE_NONE; break; case 't': yuv_interlacing = Y4M_ILACE_TOP_FIRST; break; case 'b': yuv_interlacing = Y4M_ILACE_BOTTOM_FIRST; break; default: mjpeg_error("Unknown value for interlace: '%c'", optarg[0]); return -1; break; } break; case 'F': if( Y4M_OK != y4m_parse_ratio(&yuv_frame_rate, optarg) ) mjpeg_error_exit1 ("Syntax for frame rate should be Numerator:Denominator"); break; case 'A': if( Y4M_OK != y4m_parse_ratio(&yuv_aspect, optarg) ) { if (!strcmp(optarg,PAL)) { y4m_parse_ratio(&yuv_aspect, "128:117"); } else if (!strcmp(optarg,PAL_WIDE)) { y4m_parse_ratio(&yuv_aspect, "640:351"); } else if (!strcmp(optarg,NTSC)) { y4m_parse_ratio(&yuv_aspect, "4320:4739"); } else if (!strcmp(optarg,NTSC_WIDE)) { y4m_parse_ratio(&yuv_aspect, "5760:4739"); } else { mjpeg_error_exit1 ("Syntax for aspect ratio should be Numerator:Denominator"); } } break; case 'S': yuv_ss_mode = y4m_chroma_parse_keyword(optarg); if (yuv_ss_mode == Y4M_UNKNOWN) { mjpeg_error("Unknown subsampling mode option: %s", optarg); mjpeg_error("Try: 420mpeg2 444 422 411"); return -1; } break; case 'o': fdOut = open (optarg,O_CREAT|O_WRONLY,0644); if (fdOut == -1) { mjpeg_error_exit1 ("Cannot open file for writing"); } break; case 'c': convert = 1; break; case 's': stream = atoi(optarg); break; case 'f': avif = av_find_input_format (optarg); break; case 'h': case '?': print_usage (argv); return 0 ; break; } } //fprintf (stderr,"optind: %d\n",optind); optind--; argc -= optind; argv += optind; if (argc == 1) { print_usage (argv); return 0 ; } // Open video file if(av_open_input_file(&pFormatCtx, argv[1], avif, 0, NULL)!=0) return -1; // Couldn't open file // Retrieve stream information if(av_find_stream_info(pFormatCtx)<0) return -1; // Couldn't find stream information // Dump information about file onto standard error dump_format(pFormatCtx, 0, argv[1], 0); // Find the first video stream videoStream=-1; for(i=0; i<pFormatCtx->nb_streams; i++) if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) { // mark debug //fprintf (stderr,"Video Codec ID: %d (%s)\n",pFormatCtx->streams[i]->codec->codec_id ,pFormatCtx->streams[i]->codec->codec_name); if (videoStream == -1 && stream == 0) { // May still be overridden by the -s option videoStream=i; } if (stream == i) { videoStream=i; break; } } if(videoStream==-1) return -1; // Didn't find a video stream // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; // Find the decoder for the video stream pCodec=avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) return -1; // Codec not found // Open codec if(avcodec_open(pCodecCtx, pCodec)<0) return -1; // Could not open codec // Read framerate, aspect ratio and chroma subsampling from Codec if (yuv_frame_rate.d == 0) { yuv_frame_rate.n = pFormatCtx->streams[videoStream]->r_frame_rate.num; yuv_frame_rate.d = pFormatCtx->streams[videoStream]->r_frame_rate.den; } if (yuv_aspect.d == 0) { yuv_aspect.n = pCodecCtx-> sample_aspect_ratio.num; yuv_aspect.d = pCodecCtx-> sample_aspect_ratio.den; } // 0:0 is an invalid aspect ratio default to 1:1 if (yuv_aspect.d == 0 || yuv_aspect.n == 0 ) { yuv_aspect.n=1; yuv_aspect.d=1; } if (convert) { if (yuv_ss_mode == Y4M_UNKNOWN) { print_usage(); return 0; } else { y4m_accept_extensions(1); switch (yuv_ss_mode) { case Y4M_CHROMA_420MPEG2: convert_mode = PIX_FMT_YUV420P; break; case Y4M_CHROMA_422: convert_mode = PIX_FMT_YUV422P; break; case Y4M_CHROMA_444: convert_mode = PIX_FMT_YUV444P; break; case Y4M_CHROMA_411: convert_mode = PIX_FMT_YUV411P; break; case Y4M_CHROMA_420JPEG: convert_mode = PIX_FMT_YUVJ420P; break; default: mjpeg_error_exit1("Cannot convert to this chroma mode"); break; } } } else if (yuv_ss_mode == Y4M_UNKNOWN) { switch (pCodecCtx->pix_fmt) { case PIX_FMT_YUV420P: yuv_ss_mode=Y4M_CHROMA_420MPEG2; break; case PIX_FMT_YUV422P: yuv_ss_mode=Y4M_CHROMA_422; break; case PIX_FMT_YUV444P: yuv_ss_mode=Y4M_CHROMA_444; break; case PIX_FMT_YUV411P: yuv_ss_mode=Y4M_CHROMA_411; break; case PIX_FMT_YUVJ420P: yuv_ss_mode=Y4M_CHROMA_420JPEG; break; default: yuv_ss_mode=Y4M_CHROMA_444; convert_mode = PIX_FMT_YUV444P; // is there a warning function mjpeg_error("Unsupported Chroma mode. Upsampling to YUV444\n"); // enable advanced yuv stream y4m_accept_extensions(1); convert = 1; break; } } // Allocate video frame pFrame=avcodec_alloc_frame(); // Output YUV format details // is there some mjpeg_info functions? fprintf (stderr,"YUV Aspect Ratio: %d:%d\n",yuv_aspect.n,yuv_aspect.d); fprintf (stderr,"YUV frame rate: %d:%d\n",yuv_frame_rate.n,yuv_frame_rate.d); fprintf (stderr,"YUV Chroma Subsampling: %d\n",yuv_ss_mode); // Set the YUV stream details // Interlace is handled when the first frame is read. y4m_si_set_sampleaspect(&streaminfo, yuv_aspect); y4m_si_set_framerate(&streaminfo, yuv_frame_rate); y4m_si_set_chroma(&streaminfo, yuv_ss_mode); // Loop until nothing read while(av_read_frame(pFormatCtx, &packet)>=0) { // Is this a packet from the video stream? if(packet.stream_index==videoStream) { // Decode video frame avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, packet.data, packet.size); // Did we get a video frame? if(frameFinished) { // Save the frame to disk // As we don't know interlacing until the first frame // we wait until the first frame is read before setting the interlace flag // and outputting the YUV header // It also appears that some codecs don't set width or height until the first frame either if (!header_written) { if (yuv_interlacing == Y4M_UNKNOWN) { if (pFrame->interlaced_frame) { if (pFrame->top_field_first) { yuv_interlacing = Y4M_ILACE_TOP_FIRST; } else { yuv_interlacing = Y4M_ILACE_BOTTOM_FIRST; } } else { yuv_interlacing = Y4M_ILACE_NONE; } } if (convert) { // initialise conversion to different chroma subsampling pFrame444=avcodec_alloc_frame(); numBytes=avpicture_get_size(convert_mode, pCodecCtx->width, pCodecCtx->height); buffer=(uint8_t *)malloc(numBytes); avpicture_fill((AVPicture *)pFrame444, buffer, convert_mode, pCodecCtx->width, pCodecCtx->height); } y4m_si_set_interlace(&streaminfo, yuv_interlacing); y4m_si_set_width(&streaminfo, pCodecCtx->width); y4m_si_set_height(&streaminfo, pCodecCtx->height); chromalloc(yuv_data,&streaminfo); fprintf (stderr,"YUV interlace: %d\n",yuv_interlacing); fprintf (stderr,"YUV Output Resolution: %dx%d\n",pCodecCtx->width, pCodecCtx->height); if ((write_error_code = y4m_write_stream_header(fdOut, &streaminfo)) != Y4M_OK) { mjpeg_error("Write header failed: %s", y4m_strerr(write_error_code)); } header_written = 1; } if (convert) { // convert to 444 /* +#ifdef HAVE_LIBSWSCALE + struct SwsContext* img_convert_ctx = + sws_getContext(context->width, context->height, PIX_FMT_RGB24, + context->width, context->height, context->pix_fmt, + SWS_BICUBIC, NULL, NULL, NULL); + + sws_scale(img_convert_ctx, pict->data, pict->linesize, + 0, context->height, encodable->data, + encodable->linesize); + + sws_freeContext (img_convert_ctx); +#else img_convert((AVPicture *)encodable, context->pix_fmt, (AVPicture *)pict, PIX_FMT_RGB24, context->width, context->height); - + (AVPicture *)pict, PIX_FMT_RGB24, + context->width, context->height); +#endif */ struct SwsContext* img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, SWS_BICUBIC, NULL, NULL, NULL); sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrame444->data, pFrame444->linesize); sws_freeContext (img_convert_ctx); //img_convert((AVPicture *)pFrame444, convert_mode, (AVPicture*)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); chromacpy(yuv_data,pFrame444,&streaminfo); } else { chromacpy(yuv_data,pFrame,&streaminfo); } write_error_code = y4m_write_frame( fdOut, &streaminfo, &frameinfo, yuv_data); } } // Free the packet that was allocated by av_read_frame av_free_packet(&packet); } y4m_fini_stream_info(&streaminfo); y4m_fini_frame_info(&frameinfo); free(yuv_data[0]); free(yuv_data[1]); free(yuv_data[2]); // Free the YUV frame av_free(pFrame); // Close the codec avcodec_close(pCodecCtx); // Close the video file av_close_input_file(pFormatCtx); return 0; }
int main(int argc, char **argv) { int c, err, ilace; int fd_in = fileno(stdin), fd_out = fileno(stdout); y4m_ratio_t rate; y4m_stream_info_t si, so; y4m_frame_info_t fi; uint8_t *top1[3], *bot1[3], *top2[3], *bot2[3]; opterr = 0; while ((c = getopt(argc, argv, "h")) != EOF) { switch (c) { case 'h': case '?': default: usage(); } } y4m_accept_extensions(1); y4m_init_stream_info(&si); y4m_init_stream_info(&so); y4m_init_frame_info(&fi); err = y4m_read_stream_header(fd_in, &si); if (err != Y4M_OK) mjpeg_error_exit1("Input stream error: %s\n", y4m_strerr(err)); if (y4m_si_get_plane_count(&si) != 3) mjpeg_error_exit1("only 3 plane formats supported"); rate = y4m_si_get_framerate(&si); if (!Y4M_RATIO_EQL(rate, y4m_fps_NTSC)) mjpeg_error_exit1("input stream not NTSC 30000:1001"); ilace = y4m_si_get_interlace(&si); if (ilace != Y4M_ILACE_BOTTOM_FIRST && ilace != Y4M_ILACE_TOP_FIRST) mjpeg_error_exit1("input stream not interlaced"); top1[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2); top1[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2); top1[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2); bot1[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2); bot1[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2); bot1[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2); top2[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2); top2[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2); top2[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2); bot2[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2); bot2[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2); bot2[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2); y4m_copy_stream_info(&so, &si); y4m_si_set_framerate(&so, y4m_fps_NTSC_FILM); y4m_si_set_interlace(&so, Y4M_ILACE_NONE); /* * At this point the input stream has been verified to be interlaced NTSC, * the output stream rate set to NTSC_FILM, interlacing tag changed to * progressive, and the field buffers allocated. * * Time to write the output stream header and commence processing input. */ y4m_write_stream_header(fd_out, &so); while (1) { err = y4m_read_fields(fd_in, &si, &fi, top1, bot1); if (err != Y4M_OK) goto done; y4m_write_fields(fd_out, &so, &fi, top1, bot1); /* A */ err = y4m_read_fields(fd_in, &si, &fi, top1, bot1); if (err != Y4M_OK) goto done; y4m_write_fields(fd_out, &so, &fi, top1, bot1); /* B */ err = y4m_read_fields(fd_in, &si, &fi, top1, bot1); if (err != Y4M_OK) goto done; err = y4m_read_fields(fd_in, &si, &fi, top2, bot2); if (err != Y4M_OK) { /* * End of input when reading the 2nd "mixed field" frame (C+D). The previous * frame was the first "mixed field" frame (B+C). Rather than emit a mixed * interlaced frame duplicate a field and output the previous frame. */ if (ilace == Y4M_ILACE_BOTTOM_FIRST) y4m_write_fields(fd_out, &so, &fi, bot1,bot1); else y4m_write_fields(fd_out, &so, &fi, top1,top1); goto done; } /* * Now the key part of the processing - effectively discarding the first mixed * frame with fields from frames B + C and creating the C frame from the two * mixed frames. For a BOTTOM FIELD FIRST stream use the 'top' field from * frame 3 and the 'bottom' fields from frame 4. With a TOP FIELD FIRST stream * it's the other way around - use the 'bottom' field from frame 3 and the * 'top' field from frame 4. */ if (ilace == Y4M_ILACE_BOTTOM_FIRST) y4m_write_fields(fd_out, &so, &fi, top1, bot2); /* C */ else y4m_write_fields(fd_out, &so, &fi, top2, bot1); /* C */ err = y4m_read_fields(fd_in, &si, &fi, top1, bot1); y4m_write_fields(fd_out, &so, &fi, top1, bot1); /* D */ } done: y4m_fini_frame_info(&fi); y4m_fini_stream_info(&si); y4m_fini_stream_info(&so); exit(0); }
int main(int argc, char **argv) { int sts, c, width = 640, height = 480, noheader = 0; int Y = 16, U = 128, V = 128, chroma_mode = Y4M_CHROMA_420MPEG2; int numframes = 1, force = 0; y4m_ratio_t rate_ratio = y4m_fps_NTSC; y4m_ratio_t aspect_ratio = y4m_sar_SQUARE; int plane_length[3]; u_char *yuv[3]; y4m_stream_info_t ostream; y4m_frame_info_t oframe; char interlace = Y4M_ILACE_NONE; opterr = 0; y4m_accept_extensions(1); while ((c = getopt(argc, argv, "Hfx:w:h:r:i:a:Y:U:V:n:")) != EOF) { switch (c) { case 'H': noheader = 1; break; case 'a': sts = y4m_parse_ratio(&aspect_ratio, optarg); if (sts != Y4M_OK) mjpeg_error_exit1("Invalid aspect: %s", optarg); break; case 'w': width = atoi(optarg); break; case 'h': height = atoi(optarg); break; case 'r': sts = y4m_parse_ratio(&rate_ratio, optarg); if (sts != Y4M_OK) mjpeg_error_exit1("Invalid rate: %s", optarg); break; case 'Y': Y = atoi(optarg); break; case 'U': U = atoi(optarg); break; case 'V': V = atoi(optarg); break; case 'i': switch (optarg[0]) { case 'p': interlace = Y4M_ILACE_NONE; break; case 't': interlace = Y4M_ILACE_TOP_FIRST; break; case 'b': interlace = Y4M_ILACE_BOTTOM_FIRST; break; default: usage(); } break; case 'x': chroma_mode = y4m_chroma_parse_keyword(optarg); if (chroma_mode == Y4M_UNKNOWN) { if (strcmp(optarg, "help") != 0) mjpeg_error("Invalid -x arg '%s'", optarg); chroma_usage(); } break; case 'f': force = 1; break; case 'n': numframes = atoi(optarg); break; case '?': default: usage(); } } if (width <= 0) mjpeg_error_exit1("Invalid Width: %d", width); if (height <= 0) mjpeg_error_exit1("Invalid Height: %d", height); if (!force && (Y < 16 || Y > 235)) mjpeg_error_exit1("16 < Y < 235"); if (!force && (U < 16 || U > 240)) mjpeg_error_exit1("16 < U < 240"); if (!force && (V < 16 || V > 240)) mjpeg_error_exit1("16 < V < 240"); y4m_init_stream_info(&ostream); y4m_init_frame_info(&oframe); y4m_si_set_width(&ostream, width); y4m_si_set_height(&ostream, height); y4m_si_set_interlace(&ostream, interlace); y4m_si_set_framerate(&ostream, rate_ratio); y4m_si_set_sampleaspect(&ostream, aspect_ratio); y4m_si_set_chroma(&ostream, chroma_mode); if (y4m_si_get_plane_count(&ostream) != 3) mjpeg_error_exit1("Only the 3 plane formats supported"); plane_length[0] = y4m_si_get_plane_length(&ostream, 0); plane_length[1] = y4m_si_get_plane_length(&ostream, 1); plane_length[2] = y4m_si_get_plane_length(&ostream, 2); yuv[0] = malloc(plane_length[0]); yuv[1] = malloc(plane_length[1]); yuv[2] = malloc(plane_length[2]); /* * Now fill the array once with black but use the provided Y, U and V values */ memset(yuv[0], Y, plane_length[0]); memset(yuv[1], U, plane_length[1]); memset(yuv[2], V, plane_length[2]); if (noheader == 0) y4m_write_stream_header(fileno(stdout), &ostream); while (numframes--) y4m_write_frame(fileno(stdout), &ostream, &oframe, yuv); free(yuv[0]); free(yuv[1]); free(yuv[2]); y4m_fini_stream_info(&ostream); y4m_fini_frame_info(&oframe); exit(0); }
void writeoutYUV4MPEGheader(int out_fd, LavParam *param, EditList el, y4m_stream_info_t *streaminfo) { int n; y4m_si_set_width(streaminfo, param->output_width); y4m_si_set_height(streaminfo, param->output_height); y4m_si_set_interlace(streaminfo, param->interlace); y4m_si_set_framerate(streaminfo, mpeg_conform_framerate(el.video_fps)); if (!Y4M_RATIO_EQL(param->sar, y4m_sar_UNKNOWN)) { y4m_si_set_sampleaspect(streaminfo, param->sar); } else if ((el.video_sar_width != 0) || (el.video_sar_height != 0)) { y4m_ratio_t sar; sar.n = el.video_sar_width; sar.d = el.video_sar_height; y4m_si_set_sampleaspect(streaminfo, sar); } else { /* no idea! ...eh, just guess. */ mjpeg_warn("unspecified sample-aspect-ratio --- taking a guess..."); y4m_si_set_sampleaspect(streaminfo, y4m_guess_sar(param->output_width, param->output_height, param->dar)); } switch (el_video_frame_data_format(0, &el)) { /* FIXME: checking only 0-th frame. */ case DATAFORMAT_YUV420: switch (param->chroma) { case Y4M_UNKNOWN: case Y4M_CHROMA_420JPEG: break; case Y4M_CHROMA_420MPEG2: case Y4M_CHROMA_420PALDV: mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input"); break; default: mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420jpeg') with this input"); break; } break; case DATAFORMAT_YUV422: switch (param->chroma) { case Y4M_CHROMA_422: break; default: mjpeg_error_exit1("must specify chroma '422' with this input"); break; } break; case DATAFORMAT_DV2: #ifndef HAVE_LIBDV mjpeg_error_exit1("DV input was not configured at compile time"); #else el_get_video_frame(jpeg_data, 0, &el); /* FIXME: checking only 0-th frame. */ dv_parse_header(decoder, jpeg_data); switch(decoder->sampling) { case e_dv_sample_420: switch (param->chroma) { case Y4M_UNKNOWN: mjpeg_info("set chroma '420paldv' from input"); param->chroma = Y4M_CHROMA_420PALDV; break; case Y4M_CHROMA_420PALDV: break; case Y4M_CHROMA_420JPEG: case Y4M_CHROMA_420MPEG2: mjpeg_warn("4:2:0 chroma should be '420paldv' with this input"); break; case Y4M_CHROMA_422: if(libdv_pal_yv12 == 1 ) mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input"); break; default: mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input"); break; } break; case e_dv_sample_411: if (param->chroma != Y4M_CHROMA_411) mjpeg_info("chroma '411' recommended with this input"); switch (param->chroma) { case Y4M_CHROMA_420MPEG2: case Y4M_CHROMA_420PALDV: mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input"); break; } break; case e_dv_sample_422: if (param->chroma != Y4M_CHROMA_422) mjpeg_info("chroma '422' recommended with this input"); switch (param->chroma) { case Y4M_CHROMA_420MPEG2: case Y4M_CHROMA_420PALDV: mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input"); break; } break; default: break; } #endif break; case DATAFORMAT_MJPG: if (param->chroma != Y4M_CHROMA_422 && el.chroma == Y4M_CHROMA_422) mjpeg_info("chroma '422' recommended with this input"); switch (param->chroma) { case Y4M_CHROMA_420MPEG2: case Y4M_CHROMA_420PALDV: mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input"); break; } break; } if (param->chroma == Y4M_UNKNOWN) { mjpeg_info("set default chroma '420jpeg'"); param->chroma = Y4M_CHROMA_420JPEG; } y4m_si_set_chroma(streaminfo, param->chroma); n = y4m_write_stream_header(out_fd, streaminfo); if (n != Y4M_OK) mjpeg_error("Failed to write stream header: %s", y4m_strerr(n)); }
static int generate_YUV4MPEG(parameters_t *param) { uint32_t frame; //size_t pngsize; char pngname[FILENAME_MAX]; uint8_t *yuv[3]; /* buffer for Y/U/V planes of decoded PNG */ y4m_stream_info_t streaminfo; y4m_frame_info_t frameinfo; if ((param->width % 2) == 0) param->new_width = param->width; else { param->new_width = ((param->width >> 1) + 1) << 1; printf("Setting new, even image width %d", param->new_width); } mjpeg_info("Now generating YUV4MPEG stream."); y4m_init_stream_info(&streaminfo); y4m_init_frame_info(&frameinfo); y4m_si_set_width(&streaminfo, param->new_width); y4m_si_set_height(&streaminfo, param->height); y4m_si_set_interlace(&streaminfo, param->interlace); y4m_si_set_framerate(&streaminfo, param->framerate); y4m_si_set_chroma(&streaminfo, param->ss_mode); yuv[0] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[0][0])); yuv[1] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[1][0])); yuv[2] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[2][0])); y4m_write_stream_header(STDOUT_FILENO, &streaminfo); for (frame = param->begin; (frame < param->numframes + param->begin) || (param->numframes == -1); frame++) { // if (frame < 25) // else //snprintf(pngname, sizeof(pngname), param->pngformatstr, frame - 25); snprintf(pngname, sizeof(pngname), param->pngformatstr, frame); raw0 = yuv[0]; raw1 = yuv[1]; raw2 = yuv[2]; if (decode_png(pngname, 1, param) == -1) { mjpeg_info("Read from '%s' failed: %s", pngname, strerror(errno)); if (param->numframes == -1) { mjpeg_info("No more frames. Stopping."); break; /* we are done; leave 'while' loop */ } else { mjpeg_info("Rewriting latest frame instead."); } } else { #if 0 mjpeg_debug("Preparing frame"); /* Now open this PNG file, and examine its header to retrieve the YUV4MPEG info that shall be written */ if ((param->interlace == Y4M_ILACE_NONE) || (param->interleave == 1)) { mjpeg_info("Processing non-interlaced/interleaved %s.", pngname, pngsize); decode_png(imagedata, 0, 420, yuv[0], yuv[1], yuv[2], param->width, param->height, param->new_width); #if 0 if (param->make_z_alpha) { mjpeg_info("Writing Z/Alpha data.\n"); za_write(real_z_imagemap, param->width, param->height,z_alpha_fp,frame); } #endif } else { mjpeg_error_exit1("Can't handle interlaced PNG information (yet) since there is no standard for it.\n" "Use interleaved mode (-L option) to create interlaced material."); switch (param->interlace) { case Y4M_ILACE_TOP_FIRST: mjpeg_info("Processing interlaced, top-first %s", pngname); #if 0 decode_jpeg_raw(jpegdata, jpegsize, Y4M_ILACE_TOP_FIRST, 420, param->width, param->height, yuv[0], yuv[1], yuv[2]); #endif break; case Y4M_ILACE_BOTTOM_FIRST: mjpeg_info("Processing interlaced, bottom-first %s", pngname); #if 0 decode_jpeg_raw(jpegdata, jpegsize, Y4M_ILACE_BOTTOM_FIRST, 420, param->width, param->height, yuv[0], yuv[1], yuv[2]); #endif break; default: mjpeg_error_exit1("FATAL logic error?!?"); break; } } #endif mjpeg_debug("Converting frame to YUV format."); /* Transform colorspace, then subsample (in place) */ convert_RGB_to_YCbCr(yuv, param->height * param->new_width); chroma_subsample(param->ss_mode, yuv, param->new_width, param->height); mjpeg_debug("Frame decoded, now writing to output stream."); } mjpeg_debug("Frame decoded, now writing to output stream."); y4m_write_frame(STDOUT_FILENO, &streaminfo, &frameinfo, yuv); } #if 0 if (param->make_z_alpha) { za_write_end(z_alpha_fp); fclose(z_alpha_fp); } #endif y4m_fini_stream_info(&streaminfo); y4m_fini_frame_info(&frameinfo); free(yuv[0]); free(yuv[1]); free(yuv[2]); return 0; }