static int y4m_write_stream_header2(FILE *fd, y4m_stream_info_t *i)
{
  char s[Y4M_LINE_MAX+1];
  int n;
  int err;

  y4m_ratio_t tmpframerate = y4m_si_get_framerate(i);
  y4m_ratio_t tmpsamplerate = y4m_si_get_sampleaspect(i);
  y4m_ratio_reduce(&tmpframerate);
  y4m_ratio_reduce(&tmpsamplerate);
  n = tc_snprintf(s, sizeof(s), "%s W%d H%d F%d:%d I%s A%d:%d",
	       Y4M_MAGIC,
	       y4m_si_get_width(i),
	       y4m_si_get_height(i),
	       y4m_si_get_framerate(i).n, y4m_si_get_framerate(i).d,
	       (y4m_si_get_interlace(i) == Y4M_ILACE_NONE) ? "p" :
	       (y4m_si_get_interlace(i) == Y4M_ILACE_TOP_FIRST) ? "t" :
	       (y4m_si_get_interlace(i) == Y4M_ILACE_BOTTOM_FIRST) ? "b" : "?",
	       y4m_si_get_sampleaspect(i).n, y4m_si_get_sampleaspect(i).d);
  if (n < 0) return Y4M_ERR_HEADER;
  if ((err = y4m_snprint_xtags(s + n, sizeof(s) - n - 1, y4m_si_xtags(i)))
      != Y4M_OK)
    return err;
  /* zero on error */
  return (fwrite(s, strlen(s), 1, fd) ? Y4M_OK : Y4M_ERR_SYSTEM);

}
示例#2
0
y4m_ratio_t
ratio_minus_1 (y4m_ratio_t a)
{
  y4m_ratio_t sum;
  sum.n = a.n - a.d;
  sum.d = a.d;
  y4m_ratio_reduce (&sum);
  return sum;
}
示例#3
0
/* Helper function that add numers represented by fractions */
y4m_ratio_t
add_ratio (y4m_ratio_t a, y4m_ratio_t b)
{
  y4m_ratio_t sum;

  sum.n = a.n * b.d + b.n * a.d;
  sum.d = a.d * b.d;
  y4m_ratio_reduce (&sum);
  return sum;
}
示例#4
0
mpeg_aspect_code_t 
mpeg_frame_aspect_code( int mpeg_version, y4m_ratio_t aspect_ratio )
{
	mpeg_aspect_code_t i;
	y4m_ratio_t red_ratio = aspect_ratio;
	y4m_ratio_reduce( &red_ratio );
	if( mpeg_version < 1 || mpeg_version > 2 )
		return 0;
    /* (start at '1', because 0 is unknown/illegal) */
	for( i = 1; i < mpeg_num_aspect_ratios[mpeg_version-1]; ++i )
	{
		y4m_ratio_t red_entry =  mpeg_aspect_ratios[mpeg_version-1][i];
		y4m_ratio_reduce( &red_entry );
		if(  Y4M_RATIO_EQL( red_entry, red_ratio) )
			return i;
	}

	return 0;
			
}
示例#5
0
int y4m_parse_ratio(y4m_ratio_t *r, const char *s)
{
    char *t = strchr(s, ':');
    if (t == NULL) return Y4M_ERR_RANGE;
    r->n = atoi(s);
    r->d = atoi(t+1);
    if (r->d < 0) return Y4M_ERR_RANGE;
    /* 0:0 == unknown, so that is ok, otherwise zero denominator is bad */
    if ((r->d == 0) && (r->n != 0)) return Y4M_ERR_RANGE;
    y4m_ratio_reduce(r);
    return Y4M_OK;
}
示例#6
0
文件: yuv4mpeg.c 项目: c0ntrol/veejay
int y4m_write_stream_header_cb(y4m_cb_writer_t * fd, const y4m_stream_info_t *i)
{
  char s[Y4M_LINE_MAX+1];
  int n;
  int err;
  y4m_ratio_t rate = i->framerate;
  y4m_ratio_t aspect = i->sampleaspect;
  const char *chroma_keyword = y4m_chroma_keyword(i->chroma);

  if ((i->chroma == Y4M_UNKNOWN) || (chroma_keyword == NULL))
    return Y4M_ERR_HEADER;
  if (_y4mparam_feature_level < 1) {
    if ((i->chroma != Y4M_CHROMA_420JPEG) &&
	(i->chroma != Y4M_CHROMA_420MPEG2) &&
	(i->chroma != Y4M_CHROMA_420PALDV))
      return Y4M_ERR_FEATURE;
    if (i->interlace == Y4M_ILACE_MIXED)
      return Y4M_ERR_FEATURE;
  }
  y4m_ratio_reduce(&rate);
  y4m_ratio_reduce(&aspect);
  n = snprintf(s, sizeof(s), "%s W%d H%d F%d:%d I%s A%d:%d C%s",
	       Y4M_MAGIC,
	       i->width,
	       i->height,
	       rate.n, rate.d,
	       (i->interlace == Y4M_ILACE_NONE) ? "p" :
	       (i->interlace == Y4M_ILACE_TOP_FIRST) ? "t" :
	       (i->interlace == Y4M_ILACE_BOTTOM_FIRST) ? "b" :
	       (i->interlace == Y4M_ILACE_MIXED) ? "m" : "?",
	       aspect.n, aspect.d,
	       chroma_keyword
	       );
  if ((n < 0) || (n > Y4M_LINE_MAX)) return Y4M_ERR_HEADER;
  if ((err = y4m_snprint_xtags(s + n, sizeof(s) - n - 1, &(i->x_tags))) 
      != Y4M_OK) 
    return err;
  /* non-zero on error */
  return (y4m_write_cb(fd, s, strlen(s)) ? Y4M_ERR_SYSTEM : Y4M_OK);
}
示例#7
0
mpeg_framerate_code_t 
mpeg_framerate_code( y4m_ratio_t framerate )
{
	mpeg_framerate_code_t i;
  
	y4m_ratio_reduce(&framerate);
    /* start at '1', because 0 is unknown/illegal */
	for (i = 1; i < mpeg_num_framerates; ++i) {
		if (Y4M_RATIO_EQL(framerate, mpeg_framerates[i]))
			return i;
	}
	return 0;
}
示例#8
0
y4m_ratio_t 
mpeg_aspect_ratio( int mpeg_version,  mpeg_aspect_code_t code )
{
	y4m_ratio_t ratio;
    if ((mpeg_version >= 1) && (mpeg_version <= 2) &&
        (code > 0) && (code < mpeg_num_aspect_ratios[mpeg_version-1]))
	{
		ratio = mpeg_aspect_ratios[mpeg_version-1][code];
		y4m_ratio_reduce(&ratio);
		return ratio;
	}
    else
		return y4m_sar_UNKNOWN;
}
示例#9
0
int y4m_write_stream_header(int fd, y4m_stream_info_t *i)
{
  char s[Y4M_LINE_MAX+1];
  int n;
  int err;

  y4m_ratio_reduce(&(i->framerate));
  y4m_ratio_reduce(&(i->sampleaspect));
  n = snprintf(s, sizeof(s), "%s W%d H%d F%d:%d I%s A%d:%d",
	       Y4M_MAGIC,
	       i->width,
	       i->height,
	       i->framerate.n, i->framerate.d,
	       (i->interlace == Y4M_ILACE_NONE) ? "p" :
	       (i->interlace == Y4M_ILACE_TOP_FIRST) ? "t" :
	       (i->interlace == Y4M_ILACE_BOTTOM_FIRST) ? "b" : "?",
	       i->sampleaspect.n, i->sampleaspect.d);
  if ((n < 0) || (n > Y4M_LINE_MAX)) return Y4M_ERR_HEADER;
  if ((err = y4m_snprint_xtags(s + n, sizeof(s) - n - 1, &(i->x_tags))) 
      != Y4M_OK) 
    return err;
  /* non-zero on error */
  return (y4m_write(fd, s, strlen(s)) ? Y4M_ERR_SYSTEM : Y4M_OK);
}
示例#10
0
y4m_ratio_t
mpeg_conform_framerate( double fps )
{
	mpeg_framerate_code_t i;
	y4m_ratio_t result;

	/* try to match it to a standard frame rate */
    /* (start at '1', because 0 is unknown/illegal) */
	for (i = 1; i < mpeg_num_framerates; i++) 
	{
		double deviation = 1.0 - (Y4M_RATIO_DBL(mpeg_framerates[i]) / fps);
		if ( (deviation > -MPEG_FPS_TOLERANCE) &&
			 (deviation < +MPEG_FPS_TOLERANCE) )
			return mpeg_framerates[i];
	}
	/* no luck?  just turn it into a ratio (6 decimal place accuracy) */
	result.n = (int)((fps * 1000000.0) + 0.5);
	result.d = 1000000;
	y4m_ratio_reduce(&result);
	return result;
}
示例#11
0
int
main (int argc, char *argv[])
{
  extern char *optarg;
  int cpucap = cpu_accel ();
  char c;
  int fd_in = 0;
  int fd_out = 1;
  int errno = 0;
  int have_framerate = 0;
  int force_interlacing = 0;
  y4m_frame_info_t iframeinfo;
  y4m_stream_info_t istreaminfo;
  y4m_frame_info_t oframeinfo;
  y4m_stream_info_t ostreaminfo;
  int output_frame_number = 0;
  int input_frame_number = 0;
  y4m_ratio_t output_frame_rate, input_frame_rate, frame_rate_ratio;
  float ratio = 0;		// input/output, output should be > input )
  int scene_change;
  y4m_ratio_t ratio_percent_frame;
  float percent_threshold = 0.02;

/* percent_threshold is there to avoid interpolating frames when the output frame
 * is very close to an input frame
 */

  mjpeg_log (LOG_INFO, "-------------------------------------------------");
  mjpeg_log (LOG_INFO, "   Motion-Compensating-Frame-Rate-Converter     ");
  mjpeg_log (LOG_INFO, "-------------------------------------------------");

  while ((c = getopt (argc, argv, "hvb:p:r:t:s:f")) != -1)
    {
      switch (c)
	{
	case 'h':
	  {
	    mjpeg_log (LOG_INFO, "Usage ");
	    mjpeg_log (LOG_INFO, "-------------------------");
	    mjpeg_log (LOG_INFO, "  This program converts frame rates");
	    mjpeg_log (LOG_INFO,
		       "with a smart algorithm that estimates the motion of the elements");
	    mjpeg_log (LOG_INFO,
		       "to smooth the motion, rather than duplicating frames.");
	    mjpeg_log (LOG_INFO,
		       "  It's way smoother, but introduces a bit of blocking and/or");
	    mjpeg_log (LOG_INFO,
		       " maybe blurryness when things move too fast.");
	    mjpeg_log (LOG_INFO, " ");
	    mjpeg_log (LOG_INFO,
		       " -r Frame rate for the resulting stream (in X:Y fractional form)");
	    mjpeg_log (LOG_INFO,
		       " -b block size (default = 8, will be rounded to even number )");
	    mjpeg_log (LOG_INFO,
		       " -p search path radius (default = 8, do not use high values ~ > 20)");
	    mjpeg_log (LOG_INFO,
		       "-t frame approximation threshold (default=50, higher=better)");
	    mjpeg_log (LOG_INFO,
		       "-s scene change threshold (default=8, 0=disable scene change detection)");
	    mjpeg_log (LOG_INFO,
		       "-r Frame rate for the resulting stream (in X:Y fractional form)");
	    mjpeg_log (LOG_INFO,
		       " -f force processing interlaced input (don't know what it does)");

	    mjpeg_log (LOG_INFO, " -v verbose/debug");

	    exit (0);
	    break;
	  }
	case 'v':
	  {
	    verbose = 1;
	    break;
	  }
	case 'f':
	  {
	    force_interlacing = 1;
	    break;
	  }
	case 'b':
	  {
	    block_size = strtol (optarg, (char **) NULL, 10);
	    /* we only want even block sizes */
	    if (block_size % 1 != 0)
	      {
		block_size = block_size + 1;
		mjpeg_log (LOG_WARN, "Block size changed to %d", block_size);
	      }
	    else
	      mjpeg_log (LOG_INFO, "Block size: %d", block_size);
	    break;
	  }
	case 'p':
	  {
	    search_path_radius = strtol (optarg, (char **) NULL, 10);	/* safer atoi */
	    mjpeg_log (LOG_INFO, "Search radius %d", search_path_radius);

	    break;
	  }
	case 'r':
	  {
	    if (Y4M_OK != y4m_parse_ratio (&output_frame_rate, optarg))
	      mjpeg_error_exit1
		("Syntax for frame rate should be Numerator:Denominator");


	    mjpeg_log (LOG_INFO, "New Frame rate %d:%d",
		       output_frame_rate.n, output_frame_rate.d);
	    have_framerate = 1;
	    break;
	  }
	case 't':
	  {
	    percent_threshold = strtol (optarg, (char **) NULL, 10);
	    if ((percent_threshold > 1) && (percent_threshold <= 1024))
	      percent_threshold = 1.0 / percent_threshold;
	    else
	      mjpeg_error_exit1 ("Threshold should be between 2 and 1024");

	    mjpeg_log (LOG_INFO, "Approximation threshold %d",
		       (int) ((float) 1.0 / percent_threshold));
	    break;

	  }
	case 's':
	  {
	    scene_change_threshold = strtol (optarg, (char **) NULL, 10);
	    if (scene_change_threshold == 0)
	      mjpeg_log (LOG_INFO, "Scene change detection disabled");
	    else
	      mjpeg_log (LOG_INFO, "Scene change threshold: %d00 percent",
			 scene_change_threshold);
	    break;

	  }
	}
    }

  if (!have_framerate)
    {
      mjpeg_error_exit1
	("Please specify a frame rate; yuvmotionfps -h for more info");
    }

  /* initialize motion_library */
  init_motion_search ();

  /* initialize MMX transforms (fixme) */
  if ((cpucap & ACCEL_X86_MMXEXT) != 0 || (cpucap & ACCEL_X86_SSE) != 0)
    {
#if 0
      mjpeg_log (LOG_INFO,
		 "FIXME: could use MMX/SSE Block/Frame-Copy/Blend if I had one ;-)");
#endif
    }

  /* initialize stream-information */
  y4m_accept_extensions (1);
  y4m_init_stream_info (&istreaminfo);
  y4m_init_frame_info (&iframeinfo);
  y4m_init_stream_info (&ostreaminfo);
  y4m_init_frame_info (&oframeinfo);

  /* open input stream */
  if ((errno = y4m_read_stream_header (fd_in, &istreaminfo)) != Y4M_OK)
    {
      mjpeg_log (LOG_ERROR, "Couldn't read YUV4MPEG header: %s!",
		 y4m_strerr (errno));
      exit (1);
    }

  /* get format information */
  width = y4m_si_get_width (&istreaminfo);
  height = y4m_si_get_height (&istreaminfo);
  input_chroma_subsampling = y4m_si_get_chroma (&istreaminfo);
  mjpeg_log (LOG_INFO, "Y4M-Stream is %ix%i(%s)",
	     width,
	     height,
	     input_chroma_subsampling ==
	     Y4M_CHROMA_420JPEG ? "4:2:0 MPEG1" : input_chroma_subsampling
	     ==
	     Y4M_CHROMA_420MPEG2 ? "4:2:0 MPEG2" :
	     input_chroma_subsampling ==
	     Y4M_CHROMA_420PALDV ? "4:2:0 PAL-DV" :
	     input_chroma_subsampling ==
	     Y4M_CHROMA_444 ? "4:4:4" : input_chroma_subsampling ==
	     Y4M_CHROMA_422 ? "4:2:2" : input_chroma_subsampling ==
	     Y4M_CHROMA_411 ? "4:1:1 NTSC-DV" : input_chroma_subsampling
	     ==
	     Y4M_CHROMA_MONO ? "MONOCHROME" : input_chroma_subsampling ==
	     Y4M_CHROMA_444ALPHA ? "4:4:4:4 ALPHA" : "unknown");

  /* if chroma-subsampling isn't supported bail out ... */
  switch (input_chroma_subsampling)
    {
    case Y4M_CHROMA_420JPEG:
      break;
    case Y4M_CHROMA_420PALDV:
    case Y4M_CHROMA_420MPEG2:
    case Y4M_CHROMA_411:
      mjpeg_log (LOG_WARN,
		 "This chroma subsampling mode has not been thoroughly tested");
      break;
    default:

      mjpeg_error_exit1
	("Y4M-Stream is not 4:2:0. Other chroma-modes currently not allowed. Sorry.");
    }

  /* the output is progressive 4:2:0 MPEG 1 */
  y4m_si_set_interlace (&ostreaminfo, Y4M_ILACE_NONE);
  y4m_si_set_chroma (&ostreaminfo, Y4M_CHROMA_420JPEG);
  y4m_si_set_width (&ostreaminfo, width);
  y4m_si_set_height (&ostreaminfo, height);
  y4m_si_set_sampleaspect (&ostreaminfo,
			   y4m_si_get_sampleaspect (&istreaminfo));

  input_frame_rate = y4m_si_get_framerate (&istreaminfo);

  y4m_si_set_framerate (&ostreaminfo, output_frame_rate);

  if (width % block_size != 0)
    {
      mjpeg_log (LOG_WARN,
		 "Warning, stream width(%d) is not a multiple of block_size (%d)",
		 width, block_size);
      mjpeg_log (LOG_WARN,
		 "The right side of the image might not be what you want");
    }
  if (height % block_size != 0)
    {
      mjpeg_log (LOG_WARN,
		 "Warning, stream height(%d) is not a multiple of block_size (%d)",
		 height, block_size);
      mjpeg_log (LOG_WARN,
		 "The lower side of the image might not be what you want");
    }



  /* Calculate the different ratios:
   * ratio is (input framerate / output framerate)
   * ratio_percent_frame is the fractional representation of percent frame
   */
  frame_rate_ratio.n = input_frame_rate.n * output_frame_rate.d;
  frame_rate_ratio.d = input_frame_rate.d * output_frame_rate.n;
  y4m_ratio_reduce (&frame_rate_ratio);
  ratio = (float) frame_rate_ratio.n / frame_rate_ratio.d;

  ratio_percent_frame.d = 1;
  ratio_percent_frame.n = 0;

  if (ratio == 0)
    mjpeg_error_exit1 ("Cannot have ratio =0 ");
  else if (ratio > 128)
    mjpeg_error_exit1 ("Cannot have ratio >128  ");


  if ((y4m_si_get_interlace (&istreaminfo) != Y4M_ILACE_NONE)
      && (!force_interlacing))
    {
      mjpeg_error_exit1 ("Sorry, can only convert progressive streams");
    }

  /* write the outstream header */
  y4m_write_stream_header (fd_out, &ostreaminfo);

  /* now allocate the needed buffers */
  {
    /* calculate the memory offset needed to allow the processing
     * functions to overshot. The biggest overshot is needed for the
     * MC-functions, so we'll use 8*width...
     */
    buff_offset = width * 8;
    buff_size = buff_offset * 2 + width * height;

    inframe[0] = buff_offset + (uint8_t *) malloc (buff_size);
    inframe[1] = buff_offset + (uint8_t *) malloc (buff_size);
    inframe[2] = buff_offset + (uint8_t *) malloc (buff_size);

    reconstructed[0] = buff_offset + (uint8_t *) malloc (buff_size);
    reconstructed[1] = buff_offset + (uint8_t *) malloc (buff_size);
    reconstructed[2] = buff_offset + (uint8_t *) malloc (buff_size);

    frame1[0] = buff_offset + (uint8_t *) malloc (buff_size);
    frame1[1] = buff_offset + (uint8_t *) malloc (buff_size);
    frame1[2] = buff_offset + (uint8_t *) malloc (buff_size);

    mjpeg_log (LOG_INFO, "Buffers allocated.");
  }

  /* initialize motion-search-pattern */
  init_search_pattern ();

  errno = y4m_read_frame (fd_in, &istreaminfo, &iframeinfo, frame1);
  if (errno != Y4M_OK)
    goto The_end;

  /* read every frame until the end of the input stream and process it */
  while (Y4M_OK == (errno = y4m_read_frame (fd_in,
					    &istreaminfo,
					    &iframeinfo, inframe)))
    {
/* frame1 contains the previous input frame
 * inframe contains the current input frame
 * reconstructed contains the current output frame
 * percent_frame is the amount of time after which the output frame is sent 
 * 	in percent of the time between input frames
 *
 * Input:
 * frame1 . . . . . . . . . . . . . . . . . . inframe
 * Output: 
 * . . . . . . . . . . .reconstructed. . . . . . . 
 * |<- - percent_frame - - - ->|
 * |< - - - - - - - - - -100% - - - - - - - - - >|
 *
 * The variable ratio_percent_frame is the fractional representation of
 * percent_frame; it is there to avoid rounding errors 
 */
      input_frame_number++;

      if (verbose)
	{
	  mjpeg_log (LOG_INFO, "Input frame number %d", input_frame_number);
	}

      while (percent_frame < (1.0 - percent_threshold))
	{
	  output_frame_number++;
	  if (verbose)
	    {
	      mjpeg_log (LOG_INFO, "Output frame number %d",
			 output_frame_number);
	    }

#define ABS(value) ((value)<0)?-(value):(value)

	  if (ABS (percent_frame) <= percent_threshold)
	    {
	      /* I put a threshold here to avoid wasting time */
	      /* The output frame coincides with the input frame
	       * so there is no need to do any processing 
	       * just copy the input frame as is */
	      y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo, frame1);
	      if (verbose)
		mjpeg_log (LOG_INFO, "Percent %f rounded to next frame",
			   percent_frame);
	    }
	  else
	    {
	      /* We have to interpolate the frame (between the current inframe
	       * and the previous frame1 
	       * if there is a scene change, motion_compensate_field will
	       * return 1 and we use the previous frame */

	      if (verbose)
		mjpeg_log (LOG_INFO, "Percent %f", percent_frame);

	      scene_change = motion_compensate_field ();
	      if (scene_change)
		{
		  mjpeg_log (LOG_INFO, "Scene change at frame %d",
			     input_frame_number);
		  y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo, frame1);
		}
	      else
		{
		  y4m_write_frame (fd_out, &ostreaminfo, &oframeinfo,
				   reconstructed);
		}
	    }
	  ratio_percent_frame =
	    add_ratio (ratio_percent_frame, frame_rate_ratio);
	  percent_frame = Y4M_RATIO_DBL (ratio_percent_frame);

	}

      /* Skip input frames if downsampling  (ratio > 1)
       * when upsampling, ratio < 1
       *    so we have ( 1< percent_frame < 2) at this point 
       *    hence we don't go in in the loop */
      while (percent_frame >= 2)
	{
	  percent_frame = percent_frame - 1;
	  ratio_percent_frame = ratio_minus_1 (ratio_percent_frame);
	  if (Y4M_OK !=
	      (errno =
	       y4m_read_frame (fd_in, &istreaminfo, &iframeinfo, inframe)))
	    goto The_end;
	}
      ratio_percent_frame = ratio_minus_1 (ratio_percent_frame);
      percent_frame = percent_frame - 1;

      /* store the previous frame */
      memcpy (frame1[0], inframe[0], width * height);
      memcpy (frame1[1], inframe[1], width * height / 4);
      memcpy (frame1[2], inframe[2], width * height / 4);

    }

The_end:

  /* free allocated buffers */
  {
    free (inframe[0] - buff_offset);
    free (inframe[1] - buff_offset);
    free (inframe[2] - buff_offset);

    free (reconstructed[0] - buff_offset);
    free (reconstructed[1] - buff_offset);
    free (reconstructed[2] - buff_offset);

    free (frame1[0] - buff_offset);
    free (frame1[1] - buff_offset);
    free (frame1[2] - buff_offset);


    mjpeg_log (LOG_INFO, "Buffers freed.");
  }

  /* did stream end unexpectedly ? */
  if (errno != Y4M_ERR_EOF)
    mjpeg_error_exit1 ("%s", y4m_strerr (errno));

  /* Exit gently */
  return (0);
}