コード例 #1
0
static int y4m_write_stream_header2(FILE *fd, y4m_stream_info_t *i)
{
  char s[Y4M_LINE_MAX+1];
  int n;
  int err;

  y4m_ratio_t tmpframerate = y4m_si_get_framerate(i);
  y4m_ratio_t tmpsamplerate = y4m_si_get_sampleaspect(i);
  y4m_ratio_reduce(&tmpframerate);
  y4m_ratio_reduce(&tmpsamplerate);
  n = tc_snprintf(s, sizeof(s), "%s W%d H%d F%d:%d I%s A%d:%d",
	       Y4M_MAGIC,
	       y4m_si_get_width(i),
	       y4m_si_get_height(i),
	       y4m_si_get_framerate(i).n, y4m_si_get_framerate(i).d,
	       (y4m_si_get_interlace(i) == Y4M_ILACE_NONE) ? "p" :
	       (y4m_si_get_interlace(i) == Y4M_ILACE_TOP_FIRST) ? "t" :
	       (y4m_si_get_interlace(i) == Y4M_ILACE_BOTTOM_FIRST) ? "b" : "?",
	       y4m_si_get_sampleaspect(i).n, y4m_si_get_sampleaspect(i).d);
  if (n < 0) return Y4M_ERR_HEADER;
  if ((err = y4m_snprint_xtags(s + n, sizeof(s) - n - 1, y4m_si_xtags(i)))
      != Y4M_OK)
    return err;
  /* zero on error */
  return (fwrite(s, strlen(s), 1, fd) ? Y4M_OK : Y4M_ERR_SYSTEM);

}
コード例 #2
0
ファイル: main.c プロジェクト: jlehtine/yuvmotionfps
int
main (int argc, char *argv[])
{
  extern char *optarg;
  int cpucap = cpu_accel ();
  char c;
  int fd_in = 0;
  int fd_out = 1;
  int errno = 0;
  int have_framerate = 0;
  int force_interlacing = 0;
  y4m_frame_info_t iframeinfo;
  y4m_stream_info_t istreaminfo;
  y4m_frame_info_t oframeinfo;
  y4m_stream_info_t ostreaminfo;
  int output_frame_number = 0;
  int input_frame_number = 0;
  y4m_ratio_t output_frame_rate, input_frame_rate, frame_rate_ratio;
  float ratio = 0;		// input/output, output should be > input )
  int scene_change;
  y4m_ratio_t ratio_percent_frame;
  float percent_threshold = 0.02;

/* percent_threshold is there to avoid interpolating frames when the output frame
 * is very close to an input frame
 */

  mjpeg_log (LOG_INFO, "-------------------------------------------------");
  mjpeg_log (LOG_INFO, "   Motion-Compensating-Frame-Rate-Converter     ");
  mjpeg_log (LOG_INFO, "-------------------------------------------------");

  while ((c = getopt (argc, argv, "hvb:p:r:t:s:f")) != -1)
    {
      switch (c)
	{
	case 'h':
	  {
	    mjpeg_log (LOG_INFO, "Usage ");
	    mjpeg_log (LOG_INFO, "-------------------------");
	    mjpeg_log (LOG_INFO, "  This program converts frame rates");
	    mjpeg_log (LOG_INFO,
		       "with a smart algorithm that estimates the motion of the elements");
	    mjpeg_log (LOG_INFO,
		       "to smooth the motion, rather than duplicating frames.");
	    mjpeg_log (LOG_INFO,
		       "  It's way smoother, but introduces a bit of blocking and/or");
	    mjpeg_log (LOG_INFO,
		       " maybe blurryness when things move too fast.");
	    mjpeg_log (LOG_INFO, " ");
	    mjpeg_log (LOG_INFO,
		       " -r Frame rate for the resulting stream (in X:Y fractional form)");
	    mjpeg_log (LOG_INFO,
		       " -b block size (default = 8, will be rounded to even number )");
	    mjpeg_log (LOG_INFO,
		       " -p search path radius (default = 8, do not use high values ~ > 20)");
	    mjpeg_log (LOG_INFO,
		       "-t frame approximation threshold (default=50, higher=better)");
	    mjpeg_log (LOG_INFO,
		       "-s scene change threshold (default=8, 0=disable scene change detection)");
	    mjpeg_log (LOG_INFO,
		       "-r Frame rate for the resulting stream (in X:Y fractional form)");
	    mjpeg_log (LOG_INFO,
		       " -f force processing interlaced input (don't know what it does)");

	    mjpeg_log (LOG_INFO, " -v verbose/debug");

	    exit (0);
	    break;
	  }
	case 'v':
	  {
	    verbose = 1;
	    break;
	  }
	case 'f':
	  {
	    force_interlacing = 1;
	    break;
	  }
	case 'b':
	  {
	    block_size = strtol (optarg, (char **) NULL, 10);
	    /* we only want even block sizes */
	    if (block_size % 1 != 0)
	      {
		block_size = block_size + 1;
		mjpeg_log (LOG_WARN, "Block size changed to %d", block_size);
	      }
	    else
	      mjpeg_log (LOG_INFO, "Block size: %d", block_size);
	    break;
	  }
	case 'p':
	  {
	    search_path_radius = strtol (optarg, (char **) NULL, 10);	/* safer atoi */
	    mjpeg_log (LOG_INFO, "Search radius %d", search_path_radius);

	    break;
	  }
	case 'r':
	  {
	    if (Y4M_OK != y4m_parse_ratio (&output_frame_rate, optarg))
	      mjpeg_error_exit1
		("Syntax for frame rate should be Numerator:Denominator");


	    mjpeg_log (LOG_INFO, "New Frame rate %d:%d",
		       output_frame_rate.n, output_frame_rate.d);
	    have_framerate = 1;
	    break;
	  }
	case 't':
	  {
	    percent_threshold = strtol (optarg, (char **) NULL, 10);
	    if ((percent_threshold > 1) && (percent_threshold <= 1024))
	      percent_threshold = 1.0 / percent_threshold;
	    else
	      mjpeg_error_exit1 ("Threshold should be between 2 and 1024");

	    mjpeg_log (LOG_INFO, "Approximation threshold %d",
		       (int) ((float) 1.0 / percent_threshold));
	    break;

	  }
	case 's':
	  {
	    scene_change_threshold = strtol (optarg, (char **) NULL, 10);
	    if (scene_change_threshold == 0)
	      mjpeg_log (LOG_INFO, "Scene change detection disabled");
	    else
	      mjpeg_log (LOG_INFO, "Scene change threshold: %d00 percent",
			 scene_change_threshold);
	    break;

	  }
	}
    }

  if (!have_framerate)
    {
      mjpeg_error_exit1
	("Please specify a frame rate; yuvmotionfps -h for more info");
    }

  /* initialize motion_library */
  init_motion_search ();

  /* initialize MMX transforms (fixme) */
  if ((cpucap & ACCEL_X86_MMXEXT) != 0 || (cpucap & ACCEL_X86_SSE) != 0)
    {
#if 0
      mjpeg_log (LOG_INFO,
		 "FIXME: could use MMX/SSE Block/Frame-Copy/Blend if I had one ;-)");
#endif
    }

  /* initialize stream-information */
  y4m_accept_extensions (1);
  y4m_init_stream_info (&istreaminfo);
  y4m_init_frame_info (&iframeinfo);
  y4m_init_stream_info (&ostreaminfo);
  y4m_init_frame_info (&oframeinfo);

  /* open input stream */
  if ((errno = y4m_read_stream_header (fd_in, &istreaminfo)) != Y4M_OK)
    {
      mjpeg_log (LOG_ERROR, "Couldn't read YUV4MPEG header: %s!",
		 y4m_strerr (errno));
      exit (1);
    }

  /* get format information */
  width = y4m_si_get_width (&istreaminfo);
  height = y4m_si_get_height (&istreaminfo);
  input_chroma_subsampling = y4m_si_get_chroma (&istreaminfo);
  mjpeg_log (LOG_INFO, "Y4M-Stream is %ix%i(%s)",
	     width,
	     height,
	     input_chroma_subsampling ==
	     Y4M_CHROMA_420JPEG ? "4:2:0 MPEG1" : input_chroma_subsampling
	     ==
	     Y4M_CHROMA_420MPEG2 ? "4:2:0 MPEG2" :
	     input_chroma_subsampling ==
	     Y4M_CHROMA_420PALDV ? "4:2:0 PAL-DV" :
	     input_chroma_subsampling ==
	     Y4M_CHROMA_444 ? "4:4:4" : input_chroma_subsampling ==
	     Y4M_CHROMA_422 ? "4:2:2" : input_chroma_subsampling ==
	     Y4M_CHROMA_411 ? "4:1:1 NTSC-DV" : input_chroma_subsampling
	     ==
	     Y4M_CHROMA_MONO ? "MONOCHROME" : input_chroma_subsampling ==
	     Y4M_CHROMA_444ALPHA ? "4:4:4:4 ALPHA" : "unknown");

  /* if chroma-subsampling isn't supported bail out ... */
  switch (input_chroma_subsampling)
    {
    case Y4M_CHROMA_420JPEG:
      break;
    case Y4M_CHROMA_420PALDV:
    case Y4M_CHROMA_420MPEG2:
    case Y4M_CHROMA_411:
      mjpeg_log (LOG_WARN,
		 "This chroma subsampling mode has not been thoroughly tested");
      break;
    default:

      mjpeg_error_exit1
	("Y4M-Stream is not 4:2:0. Other chroma-modes currently not allowed. Sorry.");
    }

  /* the output is progressive 4:2:0 MPEG 1 */
  y4m_si_set_interlace (&ostreaminfo, Y4M_ILACE_NONE);
  y4m_si_set_chroma (&ostreaminfo, Y4M_CHROMA_420JPEG);
  y4m_si_set_width (&ostreaminfo, width);
  y4m_si_set_height (&ostreaminfo, height);
  y4m_si_set_sampleaspect (&ostreaminfo,
			   y4m_si_get_sampleaspect (&istreaminfo));

  input_frame_rate = y4m_si_get_framerate (&istreaminfo);

  y4m_si_set_framerate (&ostreaminfo, output_frame_rate);

  if (width % block_size != 0)
    {
      mjpeg_log (LOG_WARN,
		 "Warning, stream width(%d) is not a multiple of block_size (%d)",
		 width, block_size);
      mjpeg_log (LOG_WARN,
		 "The right side of the image might not be what you want");
    }
  if (height % block_size != 0)
    {
      mjpeg_log (LOG_WARN,
		 "Warning, stream height(%d) is not a multiple of block_size (%d)",
		 height, block_size);
      mjpeg_log (LOG_WARN,
		 "The lower side of the image might not be what you want");
    }



  /* Calculate the different ratios:
   * ratio is (input framerate / output framerate)
   * ratio_percent_frame is the fractional representation of percent frame
   */
  frame_rate_ratio.n = input_frame_rate.n * output_frame_rate.d;
  frame_rate_ratio.d = input_frame_rate.d * output_frame_rate.n;
  y4m_ratio_reduce (&frame_rate_ratio);
  ratio = (float) frame_rate_ratio.n / frame_rate_ratio.d;

  ratio_percent_frame.d = 1;
  ratio_percent_frame.n = 0;

  if (ratio == 0)
    mjpeg_error_exit1 ("Cannot have ratio =0 ");
  else if (ratio > 128)
    mjpeg_error_exit1 ("Cannot have ratio >128  ");


  if ((y4m_si_get_interlace (&istreaminfo) != Y4M_ILACE_NONE)
      && (!force_interlacing))
    {
      mjpeg_error_exit1 ("Sorry, can only convert progressive streams");
    }

  /* write the outstream header */
  y4m_write_stream_header (fd_out, &ostreaminfo);

  /* now allocate the needed buffers */
  {
    /* calculate the memory offset needed to allow the processing
     * functions to overshot. The biggest overshot is needed for the
     * MC-functions, so we'll use 8*width...
     */
    buff_offset = width * 8;
    buff_size = buff_offset * 2 + width * height;

    inframe[0] = buff_offset + (uint8_t *) malloc (buff_size);
    inframe[1] = buff_offset + (uint8_t *) malloc (buff_size);
    inframe[2] = buff_offset + (uint8_t *) malloc (buff_size);

    reconstructed[0] = buff_offset + (uint8_t *) malloc (buff_size);
    reconstructed[1] = buff_offset + (uint8_t *) malloc (buff_size);
    reconstructed[2] = buff_offset + (uint8_t *) malloc (buff_size);

    frame1[0] = buff_offset + (uint8_t *) malloc (buff_size);
    frame1[1] = buff_offset + (uint8_t *) malloc (buff_size);
    frame1[2] = buff_offset + (uint8_t *) malloc (buff_size);

    mjpeg_log (LOG_INFO, "Buffers allocated.");
  }

  /* initialize motion-search-pattern */
  init_search_pattern ();

  errno = y4m_read_frame (fd_in, &istreaminfo, &iframeinfo, frame1);
  if (errno != Y4M_OK)
    goto The_end;

  /* read every frame until the end of the input stream and process it */
  while (Y4M_OK == (errno = y4m_read_frame (fd_in,
					    &istreaminfo,
					    &iframeinfo, inframe)))
    {
/* frame1 contains the previous input frame
 * inframe contains the current input frame
 * reconstructed contains the current output frame
 * percent_frame is the amount of time after which the output frame is sent 
 * 	in percent of the time between input frames
 *
 * Input:
 * frame1 . . . . . . . . . . . . . . . . . . inframe
 * Output: 
 * . . . . . . . . . . .reconstructed. . . . . . . 
 * |<- - percent_frame - - - ->|
 * |< - - - - - - - - - -100% - - - - - - - - - >|
 *
 * The variable ratio_percent_frame is the fractional representation of
 * percent_frame; it is there to avoid rounding errors 
 */
      input_frame_number++;

      if (verbose)
	{
	  mjpeg_log (LOG_INFO, "Input frame number %d", input_frame_number);
	}

      while (percent_frame < (1.0 - percent_threshold))
	{
	  output_frame_number++;
	  if (verbose)
	    {
	      mjpeg_log (LOG_INFO, "Output frame number %d",
			 output_frame_number);
	    }

#define ABS(value) ((value)<0)?-(value):(value)

	  if (ABS (percent_frame) <= percent_threshold)
	    {
	      /* I put a threshold here to avoid wasting time */
	      /* The output frame coincides with the input frame
	       * so there is no need to do any processing 
	       * just copy the input frame as is */
	      y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo, frame1);
	      if (verbose)
		mjpeg_log (LOG_INFO, "Percent %f rounded to next frame",
			   percent_frame);
	    }
	  else
	    {
	      /* We have to interpolate the frame (between the current inframe
	       * and the previous frame1 
	       * if there is a scene change, motion_compensate_field will
	       * return 1 and we use the previous frame */

	      if (verbose)
		mjpeg_log (LOG_INFO, "Percent %f", percent_frame);

	      scene_change = motion_compensate_field ();
	      if (scene_change)
		{
		  mjpeg_log (LOG_INFO, "Scene change at frame %d",
			     input_frame_number);
		  y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo, frame1);
		}
	      else
		{
		  y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo,
				   reconstructed);
		}
	    }
	  ratio_percent_frame =
	    add_ratio (ratio_percent_frame, frame_rate_ratio);
	  percent_frame = Y4M_RATIO_DBL (ratio_percent_frame);

	}

      /* Skip input frames if downsampling  (ratio > 1)
       * when upsampling, ratio < 1
       *    so we have ( 1< percent_frame < 2) at this point 
       *    hence we don't go in in the loop */
      while (percent_frame >= 2)
	{
	  percent_frame = percent_frame - 1;
	  ratio_percent_frame = ratio_minus_1 (ratio_percent_frame);
	  if (Y4M_OK !=
	      (errno =
	       y4m_read_frame (fd_in, &istreaminfo, &iframeinfo, inframe)))
	    goto The_end;
	}
      ratio_percent_frame = ratio_minus_1 (ratio_percent_frame);
      percent_frame = percent_frame - 1;

      /* store the previous frame */
      memcpy (frame1[0], inframe[0], width * height);
      memcpy (frame1[1], inframe[1], width * height / 4);
      memcpy (frame1[2], inframe[2], width * height / 4);

    }

The_end:

  /* free allocated buffers */
  {
    free (inframe[0] - buff_offset);
    free (inframe[1] - buff_offset);
    free (inframe[2] - buff_offset);

    free (reconstructed[0] - buff_offset);
    free (reconstructed[1] - buff_offset);
    free (reconstructed[2] - buff_offset);

    free (frame1[0] - buff_offset);
    free (frame1[1] - buff_offset);
    free (frame1[2] - buff_offset);


    mjpeg_log (LOG_INFO, "Buffers freed.");
  }

  /* did stream end unexpectedly ? */
  if (errno != Y4M_ERR_EOF)
    mjpeg_error_exit1 ("%s", y4m_strerr (errno));

  /* Exit gently */
  return (0);
}
コード例 #3
0
ファイル: demux_y4m.c プロジェクト: Gamer125/wiibrowser
static void read_streaminfo(demuxer_t *demuxer)
{
    y4m_priv_t *priv = demuxer->priv;
    sh_video_t *sh = demuxer->video->sh;
    y4m_ratio_t ratio;
    int err;

    if (priv->is_older)
    {
	char buf[4];
	int frame_rate_code;

	stream_skip(demuxer->stream, 8); /* YUV4MPEG */
	stream_skip(demuxer->stream, 1); /* space */
	stream_read(demuxer->stream, (char *)&buf[0], 3);
	buf[3] = 0;
	sh->disp_w = atoi(buf);
	stream_skip(demuxer->stream, 1); /* space */
	stream_read(demuxer->stream, (char *)&buf[0], 3);
	buf[3] = 0;
	sh->disp_h = atoi(buf);
	stream_skip(demuxer->stream, 1); /* space */
	stream_read(demuxer->stream, (char *)&buf[0], 1);
	buf[1] = 0;
	frame_rate_code = atoi(buf);
	stream_skip(demuxer->stream, 1); /* new-line */

	if (!sh->fps)
	{
	    /* values from xawtv */
	    switch(frame_rate_code)
	    {
		case 1:
		    sh->fps = 23.976f;
		    break;
		case 2:
		    sh->fps = 24.0f;
		    break;
		case 3:
		    sh->fps = 25.0f;
		    break;
		case 4:
		    sh->fps = 29.97f;
		    break;
		case 5:
		    sh->fps = 30.0f;
		    break;
		case 6:
		    sh->fps = 50.0f;
		    break;
		case 7:
		    sh->fps = 59.94f;
		    break;
		case 8:
		    sh->fps = 60.0f;
		    break;
		default:
		    sh->fps = 25.0f;
	    }
	}
	sh->frametime = 1.0f/sh->fps;
    }
    else
    {
	y4m_init_stream_info(priv->si);
	if ((err=y4m_read_stream_header(demuxer->stream, priv->si)) != Y4M_OK)
	    mp_msg(MSGT_DEMUXER, MSGL_FATAL, "error parsing YUV4MPEG header: %s\n", y4m_strerr(err));

	if(!sh->fps) {
    	    ratio = y4m_si_get_framerate(priv->si);
    	    if (ratio.d != 0)
        	sh->fps=(float)ratio.n/(float)ratio.d;
    	    else
        	sh->fps=15.0f;
	}
	sh->frametime=1.0f/sh->fps;

	ratio = y4m_si_get_sampleaspect(priv->si);

	sh->disp_w = y4m_si_get_width(priv->si);
	sh->disp_h = y4m_si_get_height(priv->si);

	if (ratio.d != 0 && ratio.n != 0)
	    sh->aspect = (float)(sh->disp_w*ratio.n)/(float)(sh->disp_h*ratio.d);

    	demuxer->seekable = 0;
    }

    sh->format = mmioFOURCC('Y', 'V', '1', '2');

    sh->bih->biSize=40;
    sh->bih->biWidth = sh->disp_w;
    sh->bih->biHeight = sh->disp_h;
    sh->bih->biPlanes=3;
    sh->bih->biBitCount=12;
    sh->bih->biCompression=sh->format;
    sh->bih->biSizeImage=sh->bih->biWidth*sh->bih->biHeight*3/2; /* YV12 */

    mp_msg(MSGT_DEMUX, MSGL_INFO, "YUV4MPEG2 Video stream %d size: display: %dx%d, codec: %ux%u\n",
            demuxer->video->id, sh->disp_w, sh->disp_h, sh->bih->biWidth,
            sh->bih->biHeight);
}
コード例 #4
0
ファイル: lives-yuv4mpeg.c プロジェクト: tbe/LiVES
static boolean lives_yuv_stream_start_read(lives_clip_t *sfile) {
  double ofps=sfile->fps;

  lives_yuv4m_t *yuv4mpeg=(lives_yuv4m_t *)sfile->ext_src;

  pthread_t y4thread;

  char *filename=yuv4mpeg->filename,*tmp;

  int alarm_handle=0;

  int ohsize=sfile->hsize;
  int ovsize=sfile->vsize;

  y4data thread_data;

  register int i;


  if (filename==NULL) return FALSE;

  if (yuv4mpeg->fd==-1) {
    // create a thread to open the fifo

    thread_data.filename=filename;

    pthread_create(&y4thread,NULL,y4open_thread,(void *)&thread_data);

    alarm_handle=lives_alarm_set(YUV4_O_TIME);

    d_print("");
    d_print(_("Waiting for yuv4mpeg frames..."));

    while (!lives_alarm_get(alarm_handle)&&!pthread_kill(y4thread,0)) {
      // wait for thread to complete or timeout
      lives_usleep(prefs->sleep_time);
      lives_widget_context_update();
    }

    if (lives_alarm_get(alarm_handle)) {
      // timeout - kill thread and wait for it to terminate
      pthread_cancel(y4thread);
      pthread_join(y4thread,NULL);
      lives_alarm_clear(alarm_handle);

      d_print_failed();
      d_print(_("Unable to open the incoming video stream\n"));

      yuv4mpeg->fd=thread_data.fd;

      if (yuv4mpeg->fd>=0) {
        close(yuv4mpeg->fd);
        yuv4mpeg->fd=-1;
      }

      return FALSE;
    }

    pthread_join(y4thread,NULL);
    lives_alarm_clear(alarm_handle);

    yuv4mpeg->fd=thread_data.fd;

    if (yuv4mpeg->fd<0) {
      return FALSE;
    }
  }

  // create a thread to open the stream header
  thread_data.yuv4mpeg=yuv4mpeg;
  pthread_create(&y4thread,NULL,y4header_thread,&thread_data);
  alarm_handle=lives_alarm_set(YUV4_H_TIME);

  while (!lives_alarm_get(alarm_handle)&&!pthread_kill(y4thread,0)) {
    // wait for thread to complete or timeout
    lives_usleep(prefs->sleep_time);
    lives_widget_context_update();
  }

  if (lives_alarm_get(alarm_handle)) {
    // timeout - kill thread and wait for it to terminate
    pthread_cancel(y4thread);
    pthread_join(y4thread,NULL);
    lives_alarm_clear(alarm_handle);
    d_print(_("Unable to read the stream header\n"));
    return FALSE;
  }

  pthread_join(y4thread,NULL);
  lives_alarm_clear(alarm_handle);

  i=thread_data.i;

  if (i != Y4M_OK) {
    char *tmp;
    d_print((tmp=lives_strdup_printf("yuv4mpeg: %s\n", y4m_strerr(i))));
    lives_free(tmp);
    return FALSE;
  }

  d_print(_("got header\n"));

  sfile->hsize = yuv4mpeg->hsize = y4m_si_get_width(&(yuv4mpeg->streaminfo));
  sfile->vsize = yuv4mpeg->vsize = y4m_si_get_height(&(yuv4mpeg->streaminfo));

  sfile->fps=cfile->pb_fps=lives_strtod(lives_strdup_printf("%.8f",Y4M_RATIO_DBL
                                        (y4m_si_get_framerate(&(yuv4mpeg->streaminfo)))),NULL);

  if (!(sfile->hsize*sfile->vsize)) {
    do_error_dialog(lives_strdup_printf(_("Video dimensions: %d x %d are invalid. Stream cannot be opened"),
                                        sfile->hsize,sfile->vsize));
    return FALSE;
  }

  if (sfile->hsize!=ohsize||sfile->vsize!=ovsize||sfile->fps!=ofps) {
    set_main_title(sfile->file_name,0);
  }

  d_print((tmp=lives_strdup_printf(_("Reset clip values for %s: size=%dx%d fps=%.3f\n"),yuv4mpeg->name,
                                   cfile->hsize,yuv4mpeg->vsize,cfile->bpp,cfile->fps)));
  lives_free(tmp);

  yuv4mpeg->ready=TRUE;

  return TRUE;
}
コード例 #5
0
ファイル: yuvplay.c プロジェクト: AquaSoftGmbH/mjpeg
int main(int argc, char *argv[])
{
   int verbosity = 1;
   double time_between_frames = 0.0;
   double frame_rate = 0.0;
   struct timeval time_now;
   int n, frame;
   unsigned char *yuv[3];
   int in_fd = 0;
   int screenwidth=0, screenheight=0;
   y4m_stream_info_t streaminfo;
   y4m_frame_info_t frameinfo;
   int frame_width;
   int frame_height;
   int wait_for_sync = 1;
   char *window_title = NULL;

   while ((n = getopt(argc, argv, "hs:t:f:cv:")) != EOF) {
      switch (n) {
         case 'c':
            wait_for_sync = 0;
            break;
         case 's':
            if (sscanf(optarg, "%dx%d", &screenwidth, &screenheight) != 2) {
               mjpeg_error_exit1( "-s option needs two arguments: -s 10x10");
               exit(1);
            }
            break;
	  case 't':
	    window_title = optarg;
	    break;
	  case 'f':
		  frame_rate = atof(optarg);
		  if( frame_rate <= 0.0 || frame_rate > 200.0 )
			  mjpeg_error_exit1( "-f option needs argument > 0.0 and < 200.0");
		  break;
          case 'v':
	    verbosity = atoi(optarg);
	    if ((verbosity < 0) || (verbosity > 2))
	      mjpeg_error_exit1("-v needs argument from {0, 1, 2} (not %d)",
				verbosity);
	    break;
	  case 'h':
	  case '?':
            usage();
            exit(1);
            break;
         default:
            usage();
            exit(1);
      }
   }

   mjpeg_default_handler_verbosity(verbosity);

   y4m_accept_extensions(1);
   y4m_init_stream_info(&streaminfo);
   y4m_init_frame_info(&frameinfo);
   if ((n = y4m_read_stream_header(in_fd, &streaminfo)) != Y4M_OK) {
      mjpeg_error("Couldn't read YUV4MPEG2 header: %s!",
         y4m_strerr(n));
      exit (1);
   }

   switch (y4m_si_get_chroma(&streaminfo)) {
   case Y4M_CHROMA_420JPEG:
   case Y4M_CHROMA_420MPEG2:
   case Y4M_CHROMA_420PALDV:
     break;
   default:
     mjpeg_error_exit1("Cannot handle non-4:2:0 streams yet!");
   }

   frame_width = y4m_si_get_width(&streaminfo);
   frame_height = y4m_si_get_height(&streaminfo);

   if ((screenwidth <= 0) || (screenheight <= 0)) {
     /* no user supplied screen size, so let's use the stream info */
     y4m_ratio_t aspect = y4m_si_get_sampleaspect(&streaminfo);
       
     if (!(Y4M_RATIO_EQL(aspect, y4m_sar_UNKNOWN))) {
       /* if pixel aspect ratio present, use it */
#if 1
       /* scale width, but maintain height (line count) */
       screenheight = frame_height;
       screenwidth = frame_width * aspect.n / aspect.d;
#else
       if ((frame_width * aspect.d) < (frame_height * aspect.n)) {
	 screenwidth = frame_width;
	 screenheight = frame_width * aspect.d / aspect.n;
       } else {
	 screenheight = frame_height;
	 screenwidth = frame_height * aspect.n / aspect.d;
       }
#endif
     } else {
       /* unknown aspect ratio -- assume square pixels */
       screenwidth = frame_width;
       screenheight = frame_height;
     }
   }

   /* Initialize the SDL library */
   if( SDL_Init(SDL_INIT_VIDEO) < 0 ) {
      mjpeg_error("Couldn't initialize SDL: %s", SDL_GetError());
      exit(1);
   }

   /* set window title */
   SDL_WM_SetCaption(window_title, NULL);

   /* yuv params */
   yuv[0] = malloc(frame_width * frame_height * sizeof(unsigned char));
   yuv[1] = malloc(frame_width * frame_height / 4 * sizeof(unsigned char));
   yuv[2] = malloc(frame_width * frame_height / 4 * sizeof(unsigned char));

   screen = SDL_SetVideoMode(screenwidth, screenheight, 0, SDL_SWSURFACE);
   if ( screen == NULL ) {
      mjpeg_error("SDL: Couldn't set %dx%d: %s",
		  screenwidth, screenheight, SDL_GetError());
      exit(1);
   }
   else {
      mjpeg_debug("SDL: Set %dx%d @ %d bpp",
		  screenwidth, screenheight, screen->format->BitsPerPixel);
   }

   /* since IYUV ordering is not supported by Xv accel on maddog's system
    *  (Matrox G400 --- although, the alias I420 is, but this is not
    *  recognized by SDL), we use YV12 instead, which is identical,
    *  except for ordering of Cb and Cr planes...
    * we swap those when we copy the data to the display buffer...
    */
   yuv_overlay = SDL_CreateYUVOverlay(frame_width, frame_height,
				      SDL_YV12_OVERLAY,
				      screen);
   if ( yuv_overlay == NULL ) {
      mjpeg_error("SDL: Couldn't create SDL_yuv_overlay: %s",
		      SDL_GetError());
      exit(1);
   }
   if ( yuv_overlay->hw_overlay ) 
     mjpeg_debug("SDL: Using hardware overlay.");

   rect.x = 0;
   rect.y = 0;
   rect.w = screenwidth;
   rect.h = screenheight;

   SDL_DisplayYUVOverlay(yuv_overlay, &rect);

   signal (SIGINT, sigint_handler);

   frame = 0;
   if ( frame_rate == 0.0 ) 
   {
	   /* frame rate has not been set from command-line... */
	   if (Y4M_RATIO_EQL(y4m_fps_UNKNOWN, y4m_si_get_framerate(&streaminfo))) {
	     mjpeg_info("Frame-rate undefined in stream... assuming 25Hz!" );
	     frame_rate = 25.0;
	   } else {
	     frame_rate = Y4M_RATIO_DBL(y4m_si_get_framerate(&streaminfo));
	   }
   }
   time_between_frames = 1.e6 / frame_rate;

   gettimeofday(&time_now,0);

   while ((n = y4m_read_frame(in_fd, &streaminfo, &frameinfo, yuv)) == Y4M_OK && (!got_sigint)) {

      /* Lock SDL_yuv_overlay */
      if ( SDL_MUSTLOCK(screen) ) {
         if ( SDL_LockSurface(screen) < 0 ) break;
      }
      if (SDL_LockYUVOverlay(yuv_overlay) < 0) break;

      /* let's draw the data (*yuv[3]) on a SDL screen (*screen) */
      memcpy(yuv_overlay->pixels[0], yuv[0], frame_width * frame_height);
      memcpy(yuv_overlay->pixels[1], yuv[2], frame_width * frame_height / 4);
      memcpy(yuv_overlay->pixels[2], yuv[1], frame_width * frame_height / 4);

      /* Unlock SDL_yuv_overlay */
      if ( SDL_MUSTLOCK(screen) ) {
         SDL_UnlockSurface(screen);
      }
      SDL_UnlockYUVOverlay(yuv_overlay);

      /* Show, baby, show! */
      SDL_DisplayYUVOverlay(yuv_overlay, &rect);
      mjpeg_info("Playing frame %4.4d - %s",
		 frame, print_status(frame, frame_rate));

      if (wait_for_sync)
         while(get_time_diff(time_now) < time_between_frames) {
            usleep(1000);
         }
      frame++;

      gettimeofday(&time_now,0);
   }

   if ((n != Y4M_OK) && (n != Y4M_ERR_EOF))
      mjpeg_error("Couldn't read frame: %s", y4m_strerr(n));

   for (n=0; n<3; n++) {
      free(yuv[n]);
   }

   mjpeg_info("Played %4.4d frames (%s)",
	      frame, print_status(frame, frame_rate));

   SDL_FreeYUVOverlay(yuv_overlay);
   SDL_Quit();

   y4m_fini_frame_info(&frameinfo);
   y4m_fini_stream_info(&streaminfo);
   return 0;
}
コード例 #6
0
ファイル: yuvhsync.c プロジェクト: zyh329/lavtools
// *************************************************************************************
// MAIN
// *************************************************************************************
int main (int argc, char *argv[])
{

    int verbose = 1 ; // LOG_ERROR ?
    int drop_frames = 0;
    int fdIn = 0 ;
    int fdOut = 1 ;
    y4m_stream_info_t in_streaminfo,out_streaminfo;
    int src_interlacing = Y4M_UNKNOWN;
    y4m_ratio_t src_frame_rate;
    const static char *legal_flags = "v:m:s:n";
    int max_shift = 0, search = 0;
    int noshift=0;
    int c;

    while ((c = getopt (argc, argv, legal_flags)) != -1) {
        switch (c) {
        case 'v':
            verbose = atoi (optarg);
            if (verbose < 0 || verbose > 2)
                mjpeg_error_exit1 ("Verbose level must be [0..2]");
            break;
        case 'm':
            max_shift = atof(optarg);
            break;
        case 's':
            search = atof(optarg);
            break;
        case 'n':
            noshift=1;
            break;
        case '?':
            print_usage (argv);
            return 0 ;
            break;
        }
    }


    // mjpeg tools global initialisations
    mjpeg_default_handler_verbosity (verbose);

    // Initialize input streams
    y4m_init_stream_info (&in_streaminfo);
    y4m_init_stream_info (&out_streaminfo);

    // ***************************************************************
    // Get video stream informations (size, framerate, interlacing, aspect ratio).
    // The streaminfo structure is filled in
    // ***************************************************************
    // INPUT comes from stdin, we check for a correct file header
    if (y4m_read_stream_header (fdIn, &in_streaminfo) != Y4M_OK)
        mjpeg_error_exit1 ("Could'nt read YUV4MPEG header!");

    src_frame_rate = y4m_si_get_framerate( &in_streaminfo );
    y4m_copy_stream_info( &out_streaminfo, &in_streaminfo );


    // Information output

    /* in that function we do all the important work */
    if (!noshift)
        y4m_write_stream_header(fdOut,&out_streaminfo);

    process( fdIn,&in_streaminfo,fdOut,&out_streaminfo,max_shift,search,noshift);

    y4m_fini_stream_info (&in_streaminfo);
    y4m_fini_stream_info (&out_streaminfo);

    return 0;
}
コード例 #7
0
ファイル: yuvconvolve.c プロジェクト: silicontrip/lavtools
// *************************************************************************************
// MAIN
// *************************************************************************************
int main (int argc, char *argv[])
{

	int verbose = 4; // LOG_ERROR ;
	int fdIn = 0 ;
	int fdOut = 1 ;
	y4m_stream_info_t in_streaminfo,out_streaminfo;
	const static char *legal_flags = "d:m:V:";
	int c, *matrix,matlen;
	float divisor=0;

  while ((c = getopt (argc, argv, legal_flags)) != -1) {
    switch (c) {
      case 'V':
        verbose = atoi (optarg);
        if (verbose < 0 || verbose > 2)
          mjpeg_error_exit1 ("Verbose level must be [0..2]");
        break;
    case 'd':
	    divisor = atof(optarg);
		if (divisor == 0) {
			mjpeg_error_exit1 ("Divisor must not be 0");
		}

		break;
	case 'm':
		// strlen should be longer than the
		matrix = (int *) malloc (sizeof(int) * strlen(optarg));
		matlen = parse_matrix(optarg,matrix);
		if (matlen == 0) {
			mjpeg_error_exit1 ("Invalid matrix");
		}
		break;

	case '?':
          print_usage (argv);
          return 0 ;
          break;
    }
  }

	if (divisor == 0) {
		divisor = sum_matrix(matrix,matlen);
	}

	if (divisor == 0) {
		mjpeg_warn("divisor defaulting to 1\n");
		divisor = 1;
	}

  // mjpeg tools global initialisations
  mjpeg_default_handler_verbosity (verbose);

  // Initialize input streams
  y4m_init_stream_info (&in_streaminfo);
  y4m_init_stream_info (&out_streaminfo);

  // ***************************************************************
  // Get video stream informations (size, framerate, interlacing, aspect ratio).
  // The streaminfo structure is filled in
  // ***************************************************************
  // INPUT comes from stdin, we check for a correct file header
	if (y4m_read_stream_header (fdIn, &in_streaminfo) != Y4M_OK)
		mjpeg_error_exit1 ("Could'nt read YUV4MPEG header!");

	y4m_ratio_t src_frame_rate = y4m_si_get_framerate( &in_streaminfo );
	y4m_copy_stream_info( &out_streaminfo, &in_streaminfo );

  // Information output
  mjpeg_info ("yuvconvolve (version " YUVRFPS_VERSION ") performs a convolution matrix on yuv streams");
  mjpeg_info ("yuvconvolve -? for help");

	y4m_write_stream_header(fdOut,&out_streaminfo);

  /* in that function we do all the important work */

  fprintf (stderr,"matrix square: %d\n",matlen);

	convolve( fdIn,&in_streaminfo,fdOut,&out_streaminfo,matrix,divisor,matlen);

  y4m_fini_stream_info (&in_streaminfo);
  y4m_fini_stream_info (&out_streaminfo);

  return 0;
}
コード例 #8
0
int main(int argc, char **argv)
	{
	int	c, err, ilace;
	int	fd_in = fileno(stdin), fd_out = fileno(stdout);
	y4m_ratio_t rate;
	y4m_stream_info_t si, so;
	y4m_frame_info_t fi;
	uint8_t *top1[3], *bot1[3], *top2[3], *bot2[3];

	opterr = 0;
	while	((c = getopt(argc, argv, "h")) != EOF)
		{
		switch	(c)
			{
			case	'h':
			case	'?':
			default:
				usage();
			}
		}

	y4m_accept_extensions(1);
	y4m_init_stream_info(&si);
	y4m_init_stream_info(&so);
	y4m_init_frame_info(&fi);

	err = y4m_read_stream_header(fd_in, &si);
	if	(err != Y4M_OK)
		mjpeg_error_exit1("Input stream error: %s\n", y4m_strerr(err));

	if	(y4m_si_get_plane_count(&si) != 3)
		mjpeg_error_exit1("only 3 plane formats supported");

	rate = y4m_si_get_framerate(&si);
	if	(!Y4M_RATIO_EQL(rate, y4m_fps_NTSC))
		mjpeg_error_exit1("input stream not NTSC 30000:1001");

	ilace = y4m_si_get_interlace(&si);
	if	(ilace != Y4M_ILACE_BOTTOM_FIRST && ilace != Y4M_ILACE_TOP_FIRST)
		mjpeg_error_exit1("input stream not interlaced");

	top1[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	top1[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	top1[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	bot1[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	bot1[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	bot1[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	top2[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	top2[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	top2[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	bot2[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	bot2[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	bot2[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	y4m_copy_stream_info(&so, &si);
	y4m_si_set_framerate(&so, y4m_fps_NTSC_FILM);
	y4m_si_set_interlace(&so, Y4M_ILACE_NONE);

/*
 * At this point the input stream has been verified to be interlaced NTSC,
 * the output stream rate set to NTSC_FILM, interlacing tag changed to 
 * progressive, and the field buffers allocated.
 *
 * Time to write the output stream header and commence processing input.
*/
	y4m_write_stream_header(fd_out, &so);

	while	(1)
		{
		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		if	(err != Y4M_OK)
			goto done;
		y4m_write_fields(fd_out, &so, &fi, top1, bot1);		/* A */

		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		if	(err != Y4M_OK)
			goto done;
		y4m_write_fields(fd_out, &so, &fi, top1, bot1);		/* B */

		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		if	(err != Y4M_OK)
			goto done;
		err = y4m_read_fields(fd_in, &si, &fi, top2, bot2);
		if	(err != Y4M_OK)
			{
/*
 * End of input when reading the 2nd "mixed field" frame (C+D).  The previous
 * frame was the first "mixed field" frame (B+C).  Rather than emit a mixed
 * interlaced frame duplicate a field and output the previous frame.
*/
			if	(ilace == Y4M_ILACE_BOTTOM_FIRST)
				y4m_write_fields(fd_out, &so, &fi, bot1,bot1);
			else
				y4m_write_fields(fd_out, &so, &fi, top1,top1);
			goto done;
			}
/*
 * Now the key part of the processing - effectively discarding the first mixed
 * frame with fields from frames B + C and creating the C frame from the two
 * mixed frames.  For a BOTTOM FIELD FIRST stream use the 'top' field from
 * frame 3 and the 'bottom' fields from frame 4.  With a TOP FIELD FIRST stream
 * it's the other way around - use the 'bottom' field from frame 3 and the
 * 'top' field from frame 4.
*/
		if	(ilace == Y4M_ILACE_BOTTOM_FIRST)
			y4m_write_fields(fd_out, &so, &fi, top1, bot2);	/* C */
		else
			y4m_write_fields(fd_out, &so, &fi, top2, bot1); /* C */
		
		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		y4m_write_fields(fd_out, &so, &fi, top1, bot1);		/* D */
		}
done:	y4m_fini_frame_info(&fi);
	y4m_fini_stream_info(&si);
	y4m_fini_stream_info(&so);
	exit(0);
	}