Пример #1
0
y4m_ratio_t y4m_guess_sar(int width, int height, y4m_ratio_t dar)
{
    int i;
    double implicit_sar = (double)(dar.n * height) / (double)(dar.d * width);
    const y4m_ratio_t *sarray[] =
    {
        &y4m_sar_SQUARE,
        &y4m_sar_NTSC_CCIR601,
        &y4m_sar_NTSC_16_9,
        &y4m_sar_NTSC_SVCD_4_3,
        &y4m_sar_NTSC_SVCD_16_9,
        &y4m_sar_PAL_CCIR601,
        &y4m_sar_PAL_16_9,
        &y4m_sar_PAL_SVCD_4_3,
        &y4m_sar_PAL_SVCD_16_9,
        &y4m_sar_UNKNOWN
    };
    for (i = 0; !(Y4M_RATIO_EQL(*(sarray[i]),y4m_sar_UNKNOWN)); i++) {
        double ratio = implicit_sar / Y4M_RATIO_DBL(*(sarray[i]));
        if ( (ratio > (1.0 - GUESS_ASPECT_TOLERANCE)) &&
                (ratio < (1.0 + GUESS_ASPECT_TOLERANCE)) )
            return *(sarray[i]);
    }
    return y4m_sar_UNKNOWN;
}
Пример #2
0
mpeg_framerate_code_t 
mpeg_framerate_code( y4m_ratio_t framerate )
{
	mpeg_framerate_code_t i;
  
	y4m_ratio_reduce(&framerate);
    /* start at '1', because 0 is unknown/illegal */
	for (i = 1; i < mpeg_num_framerates; ++i) {
		if (Y4M_RATIO_EQL(framerate, mpeg_framerates[i]))
			return i;
	}
	return 0;
}
Пример #3
0
mpeg_aspect_code_t 
mpeg_frame_aspect_code( int mpeg_version, y4m_ratio_t aspect_ratio )
{
	mpeg_aspect_code_t i;
	y4m_ratio_t red_ratio = aspect_ratio;
	y4m_ratio_reduce( &red_ratio );
	if( mpeg_version < 1 || mpeg_version > 2 )
		return 0;
    /* (start at '1', because 0 is unknown/illegal) */
	for( i = 1; i < mpeg_num_aspect_ratios[mpeg_version-1]; ++i )
	{
		y4m_ratio_t red_entry =  mpeg_aspect_ratios[mpeg_version-1][i];
		y4m_ratio_reduce( &red_entry );
		if(  Y4M_RATIO_EQL( red_entry, red_ratio) )
			return i;
	}

	return 0;
			
}
Пример #4
0
mpeg_aspect_code_t 
mpeg_guess_mpeg_aspect_code(int mpeg_version, y4m_ratio_t sampleaspect,
							int frame_width, int frame_height)
{
	if (Y4M_RATIO_EQL(sampleaspect, y4m_sar_UNKNOWN))
    {
		return 0;
    }
	switch (mpeg_version) {
	case 1:
		if (Y4M_RATIO_EQL(sampleaspect, y4m_sar_SQUARE))
		{
			return 1;
		} 
		else if (Y4M_RATIO_EQL(sampleaspect, y4m_sar_NTSC_CCIR601))
		{
			return 12;
		} 
		else if (Y4M_RATIO_EQL(sampleaspect, y4m_sar_NTSC_16_9))
		{
			return 6;
		} 
		else if (Y4M_RATIO_EQL(sampleaspect, y4m_sar_PAL_CCIR601))
		{
			return 8;
		} 
		else if (Y4M_RATIO_EQL(sampleaspect, y4m_sar_PAL_16_9))
		{
			return 3;
		} 
		return 0;
		break;
	case 2:
		if (Y4M_RATIO_EQL(sampleaspect, y4m_sar_SQUARE))
		{
			return 1;  /* '1' means square *pixels* in MPEG-2; go figure. */
		}
		else
		{
			int i;
			double true_far;  /* true frame aspect ratio */
			true_far = 
				(double)(sampleaspect.n * frame_width) /
				(double)(sampleaspect.d * frame_height);
			/* start at '2'... */
			for (i = 2; i < (int)(mpeg_num_aspect_ratios[mpeg_version-1]); i++) 
			{
				double ratio = 
					true_far / Y4M_RATIO_DBL(mpeg_aspect_ratios[mpeg_version-1][i]);
				if ( (ratio > (1.0 - GUESS_ASPECT_TOLERANCE)) &&
					 (ratio < (1.0 + GUESS_ASPECT_TOLERANCE)) )
					return i;
			}
			return 0;
		}
		break;
	default:
		return 0;
		break;
	}
}
Пример #5
0
int main(int argc, char *argv[])
{
   int verbosity = 1;
   double time_between_frames = 0.0;
   double frame_rate = 0.0;
   struct timeval time_now;
   int n, frame;
   unsigned char *yuv[3];
   int in_fd = 0;
   int screenwidth=0, screenheight=0;
   y4m_stream_info_t streaminfo;
   y4m_frame_info_t frameinfo;
   int frame_width;
   int frame_height;
   int wait_for_sync = 1;
   char *window_title = NULL;

   while ((n = getopt(argc, argv, "hs:t:f:cv:")) != EOF) {
      switch (n) {
         case 'c':
            wait_for_sync = 0;
            break;
         case 's':
            if (sscanf(optarg, "%dx%d", &screenwidth, &screenheight) != 2) {
               mjpeg_error_exit1( "-s option needs two arguments: -s 10x10");
               exit(1);
            }
            break;
	  case 't':
	    window_title = optarg;
	    break;
	  case 'f':
		  frame_rate = atof(optarg);
		  if( frame_rate <= 0.0 || frame_rate > 200.0 )
			  mjpeg_error_exit1( "-f option needs argument > 0.0 and < 200.0");
		  break;
          case 'v':
	    verbosity = atoi(optarg);
	    if ((verbosity < 0) || (verbosity > 2))
	      mjpeg_error_exit1("-v needs argument from {0, 1, 2} (not %d)",
				verbosity);
	    break;
	  case 'h':
	  case '?':
            usage();
            exit(1);
            break;
         default:
            usage();
            exit(1);
      }
   }

   mjpeg_default_handler_verbosity(verbosity);

   y4m_accept_extensions(1);
   y4m_init_stream_info(&streaminfo);
   y4m_init_frame_info(&frameinfo);
   if ((n = y4m_read_stream_header(in_fd, &streaminfo)) != Y4M_OK) {
      mjpeg_error("Couldn't read YUV4MPEG2 header: %s!",
         y4m_strerr(n));
      exit (1);
   }

   switch (y4m_si_get_chroma(&streaminfo)) {
   case Y4M_CHROMA_420JPEG:
   case Y4M_CHROMA_420MPEG2:
   case Y4M_CHROMA_420PALDV:
     break;
   default:
     mjpeg_error_exit1("Cannot handle non-4:2:0 streams yet!");
   }

   frame_width = y4m_si_get_width(&streaminfo);
   frame_height = y4m_si_get_height(&streaminfo);

   if ((screenwidth <= 0) || (screenheight <= 0)) {
     /* no user supplied screen size, so let's use the stream info */
     y4m_ratio_t aspect = y4m_si_get_sampleaspect(&streaminfo);
       
     if (!(Y4M_RATIO_EQL(aspect, y4m_sar_UNKNOWN))) {
       /* if pixel aspect ratio present, use it */
#if 1
       /* scale width, but maintain height (line count) */
       screenheight = frame_height;
       screenwidth = frame_width * aspect.n / aspect.d;
#else
       if ((frame_width * aspect.d) < (frame_height * aspect.n)) {
	 screenwidth = frame_width;
	 screenheight = frame_width * aspect.d / aspect.n;
       } else {
	 screenheight = frame_height;
	 screenwidth = frame_height * aspect.n / aspect.d;
       }
#endif
     } else {
       /* unknown aspect ratio -- assume square pixels */
       screenwidth = frame_width;
       screenheight = frame_height;
     }
   }

   /* Initialize the SDL library */
   if( SDL_Init(SDL_INIT_VIDEO) < 0 ) {
      mjpeg_error("Couldn't initialize SDL: %s", SDL_GetError());
      exit(1);
   }

   /* set window title */
   SDL_WM_SetCaption(window_title, NULL);

   /* yuv params */
   yuv[0] = malloc(frame_width * frame_height * sizeof(unsigned char));
   yuv[1] = malloc(frame_width * frame_height / 4 * sizeof(unsigned char));
   yuv[2] = malloc(frame_width * frame_height / 4 * sizeof(unsigned char));

   screen = SDL_SetVideoMode(screenwidth, screenheight, 0, SDL_SWSURFACE);
   if ( screen == NULL ) {
      mjpeg_error("SDL: Couldn't set %dx%d: %s",
		  screenwidth, screenheight, SDL_GetError());
      exit(1);
   }
   else {
      mjpeg_debug("SDL: Set %dx%d @ %d bpp",
		  screenwidth, screenheight, screen->format->BitsPerPixel);
   }

   /* since IYUV ordering is not supported by Xv accel on maddog's system
    *  (Matrox G400 --- although, the alias I420 is, but this is not
    *  recognized by SDL), we use YV12 instead, which is identical,
    *  except for ordering of Cb and Cr planes...
    * we swap those when we copy the data to the display buffer...
    */
   yuv_overlay = SDL_CreateYUVOverlay(frame_width, frame_height,
				      SDL_YV12_OVERLAY,
				      screen);
   if ( yuv_overlay == NULL ) {
      mjpeg_error("SDL: Couldn't create SDL_yuv_overlay: %s",
		      SDL_GetError());
      exit(1);
   }
   if ( yuv_overlay->hw_overlay ) 
     mjpeg_debug("SDL: Using hardware overlay.");

   rect.x = 0;
   rect.y = 0;
   rect.w = screenwidth;
   rect.h = screenheight;

   SDL_DisplayYUVOverlay(yuv_overlay, &rect);

   signal (SIGINT, sigint_handler);

   frame = 0;
   if ( frame_rate == 0.0 ) 
   {
	   /* frame rate has not been set from command-line... */
	   if (Y4M_RATIO_EQL(y4m_fps_UNKNOWN, y4m_si_get_framerate(&streaminfo))) {
	     mjpeg_info("Frame-rate undefined in stream... assuming 25Hz!" );
	     frame_rate = 25.0;
	   } else {
	     frame_rate = Y4M_RATIO_DBL(y4m_si_get_framerate(&streaminfo));
	   }
   }
   time_between_frames = 1.e6 / frame_rate;

   gettimeofday(&time_now,0);

   while ((n = y4m_read_frame(in_fd, &streaminfo, &frameinfo, yuv)) == Y4M_OK && (!got_sigint)) {

      /* Lock SDL_yuv_overlay */
      if ( SDL_MUSTLOCK(screen) ) {
         if ( SDL_LockSurface(screen) < 0 ) break;
      }
      if (SDL_LockYUVOverlay(yuv_overlay) < 0) break;

      /* let's draw the data (*yuv[3]) on a SDL screen (*screen) */
      memcpy(yuv_overlay->pixels[0], yuv[0], frame_width * frame_height);
      memcpy(yuv_overlay->pixels[1], yuv[2], frame_width * frame_height / 4);
      memcpy(yuv_overlay->pixels[2], yuv[1], frame_width * frame_height / 4);

      /* Unlock SDL_yuv_overlay */
      if ( SDL_MUSTLOCK(screen) ) {
         SDL_UnlockSurface(screen);
      }
      SDL_UnlockYUVOverlay(yuv_overlay);

      /* Show, baby, show! */
      SDL_DisplayYUVOverlay(yuv_overlay, &rect);
      mjpeg_info("Playing frame %4.4d - %s",
		 frame, print_status(frame, frame_rate));

      if (wait_for_sync)
         while(get_time_diff(time_now) < time_between_frames) {
            usleep(1000);
         }
      frame++;

      gettimeofday(&time_now,0);
   }

   if ((n != Y4M_OK) && (n != Y4M_ERR_EOF))
      mjpeg_error("Couldn't read frame: %s", y4m_strerr(n));

   for (n=0; n<3; n++) {
      free(yuv[n]);
   }

   mjpeg_info("Played %4.4d frames (%s)",
	      frame, print_status(frame, frame_rate));

   SDL_FreeYUVOverlay(yuv_overlay);
   SDL_Quit();

   y4m_fini_frame_info(&frameinfo);
   y4m_fini_stream_info(&streaminfo);
   return 0;
}
Пример #6
0
int main(int argc, char **argv)
	{
	int	c, err, ilace;
	int	fd_in = fileno(stdin), fd_out = fileno(stdout);
	y4m_ratio_t rate;
	y4m_stream_info_t si, so;
	y4m_frame_info_t fi;
	uint8_t *top1[3], *bot1[3], *top2[3], *bot2[3];

	opterr = 0;
	while	((c = getopt(argc, argv, "h")) != EOF)
		{
		switch	(c)
			{
			case	'h':
			case	'?':
			default:
				usage();
			}
		}

	y4m_accept_extensions(1);
	y4m_init_stream_info(&si);
	y4m_init_stream_info(&so);
	y4m_init_frame_info(&fi);

	err = y4m_read_stream_header(fd_in, &si);
	if	(err != Y4M_OK)
		mjpeg_error_exit1("Input stream error: %s\n", y4m_strerr(err));

	if	(y4m_si_get_plane_count(&si) != 3)
		mjpeg_error_exit1("only 3 plane formats supported");

	rate = y4m_si_get_framerate(&si);
	if	(!Y4M_RATIO_EQL(rate, y4m_fps_NTSC))
		mjpeg_error_exit1("input stream not NTSC 30000:1001");

	ilace = y4m_si_get_interlace(&si);
	if	(ilace != Y4M_ILACE_BOTTOM_FIRST && ilace != Y4M_ILACE_TOP_FIRST)
		mjpeg_error_exit1("input stream not interlaced");

	top1[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	top1[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	top1[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	bot1[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	bot1[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	bot1[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	top2[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	top2[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	top2[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	bot2[0] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,0) / 2);
	bot2[1] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,1) / 2);
	bot2[2] = (uint8_t *) malloc(y4m_si_get_plane_length(&si,2) / 2);

	y4m_copy_stream_info(&so, &si);
	y4m_si_set_framerate(&so, y4m_fps_NTSC_FILM);
	y4m_si_set_interlace(&so, Y4M_ILACE_NONE);

/*
 * At this point the input stream has been verified to be interlaced NTSC,
 * the output stream rate set to NTSC_FILM, interlacing tag changed to 
 * progressive, and the field buffers allocated.
 *
 * Time to write the output stream header and commence processing input.
*/
	y4m_write_stream_header(fd_out, &so);

	while	(1)
		{
		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		if	(err != Y4M_OK)
			goto done;
		y4m_write_fields(fd_out, &so, &fi, top1, bot1);		/* A */

		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		if	(err != Y4M_OK)
			goto done;
		y4m_write_fields(fd_out, &so, &fi, top1, bot1);		/* B */

		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		if	(err != Y4M_OK)
			goto done;
		err = y4m_read_fields(fd_in, &si, &fi, top2, bot2);
		if	(err != Y4M_OK)
			{
/*
 * End of input when reading the 2nd "mixed field" frame (C+D).  The previous
 * frame was the first "mixed field" frame (B+C).  Rather than emit a mixed
 * interlaced frame duplicate a field and output the previous frame.
*/
			if	(ilace == Y4M_ILACE_BOTTOM_FIRST)
				y4m_write_fields(fd_out, &so, &fi, bot1,bot1);
			else
				y4m_write_fields(fd_out, &so, &fi, top1,top1);
			goto done;
			}
/*
 * Now the key part of the processing - effectively discarding the first mixed
 * frame with fields from frames B + C and creating the C frame from the two
 * mixed frames.  For a BOTTOM FIELD FIRST stream use the 'top' field from
 * frame 3 and the 'bottom' fields from frame 4.  With a TOP FIELD FIRST stream
 * it's the other way around - use the 'bottom' field from frame 3 and the
 * 'top' field from frame 4.
*/
		if	(ilace == Y4M_ILACE_BOTTOM_FIRST)
			y4m_write_fields(fd_out, &so, &fi, top1, bot2);	/* C */
		else
			y4m_write_fields(fd_out, &so, &fi, top2, bot1); /* C */
		
		err = y4m_read_fields(fd_in, &si, &fi, top1, bot1);
		y4m_write_fields(fd_out, &so, &fi, top1, bot1);		/* D */
		}
done:	y4m_fini_frame_info(&fi);
	y4m_fini_stream_info(&si);
	y4m_fini_stream_info(&so);
	exit(0);
	}
Пример #7
0
void writeoutYUV4MPEGheader(int out_fd,
			    LavParam *param,
			    EditList el,
			    y4m_stream_info_t *streaminfo)
{
   int n;

   y4m_si_set_width(streaminfo, param->output_width);
   y4m_si_set_height(streaminfo, param->output_height);
   y4m_si_set_interlace(streaminfo, param->interlace);
   y4m_si_set_framerate(streaminfo, mpeg_conform_framerate(el.video_fps));
   if (!Y4M_RATIO_EQL(param->sar, y4m_sar_UNKNOWN)) {
     y4m_si_set_sampleaspect(streaminfo, param->sar);
   } else if ((el.video_sar_width != 0) || (el.video_sar_height != 0)) {
     y4m_ratio_t sar;
     sar.n = el.video_sar_width;
     sar.d = el.video_sar_height;
     y4m_si_set_sampleaspect(streaminfo, sar);
   } else {
     /* no idea! ...eh, just guess. */
     mjpeg_warn("unspecified sample-aspect-ratio --- taking a guess...");
     y4m_si_set_sampleaspect(streaminfo,
			     y4m_guess_sar(param->output_width, 
					   param->output_height,
					   param->dar));
   }

   switch (el_video_frame_data_format(0, &el)) { /* FIXME: checking only 0-th frame. */
   case DATAFORMAT_YUV420:
     switch (param->chroma) {
     case Y4M_UNKNOWN:
     case Y4M_CHROMA_420JPEG:
       break;
     case Y4M_CHROMA_420MPEG2:
     case Y4M_CHROMA_420PALDV:
       mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
       break;
     default:
       mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420jpeg') with this input");
       break;
     }
     break;

   case DATAFORMAT_YUV422:
     switch (param->chroma) {
     case Y4M_CHROMA_422:
       break;
     default:
       mjpeg_error_exit1("must specify chroma '422' with this input");
       break;
     }
     break;

   case DATAFORMAT_DV2:
#ifndef HAVE_LIBDV
     mjpeg_error_exit1("DV input was not configured at compile time");
#else
     el_get_video_frame(jpeg_data, 0, &el); /* FIXME: checking only 0-th frame. */
     dv_parse_header(decoder, jpeg_data);
     switch(decoder->sampling) {
     case e_dv_sample_420:
       switch (param->chroma) {
       case Y4M_UNKNOWN:
	 mjpeg_info("set chroma '420paldv' from input");
	 param->chroma = Y4M_CHROMA_420PALDV;
	 break;
       case Y4M_CHROMA_420PALDV:
	 break;
       case Y4M_CHROMA_420JPEG:
       case Y4M_CHROMA_420MPEG2:
	 mjpeg_warn("4:2:0 chroma should be '420paldv' with this input");
	 break;
       case Y4M_CHROMA_422:
         if(libdv_pal_yv12 == 1 )
	   mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input");
	 break;
       default:
	 mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input");
	 break;
       }
       break;
     case e_dv_sample_411:
       if (param->chroma != Y4M_CHROMA_411)
	 mjpeg_info("chroma '411' recommended with this input");
       switch (param->chroma) {
       case Y4M_CHROMA_420MPEG2:
       case Y4M_CHROMA_420PALDV:
	 mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
	 break;
       }
       break;
     case e_dv_sample_422:
       if (param->chroma != Y4M_CHROMA_422)
	 mjpeg_info("chroma '422' recommended with this input");
       switch (param->chroma) {
       case Y4M_CHROMA_420MPEG2:
       case Y4M_CHROMA_420PALDV:
	 mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
	 break;
       }
       break;
     default:
       break;
     }
#endif
     break;

   case DATAFORMAT_MJPG:
     if (param->chroma != Y4M_CHROMA_422 && el.chroma == Y4M_CHROMA_422)
       mjpeg_info("chroma '422' recommended with this input");
     switch (param->chroma) {
     case Y4M_CHROMA_420MPEG2:
     case Y4M_CHROMA_420PALDV:
       mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
       break;
     }
     break;
   }
   if (param->chroma == Y4M_UNKNOWN) {
     mjpeg_info("set default chroma '420jpeg'");
     param->chroma = Y4M_CHROMA_420JPEG;
   }
   y4m_si_set_chroma(streaminfo, param->chroma);

   n = y4m_write_stream_header(out_fd, streaminfo);
   if (n != Y4M_OK)
      mjpeg_error("Failed to write stream header: %s", y4m_strerr(n));
}
Пример #8
0
/** parse_commandline
 * Parses the commandline for the supplied parameters.
 * in: argc, argv: the classic commandline parameters
 */
static void parse_commandline(int argc, char ** argv, parameters_t *param)
{
  int c;
  
  param->pngformatstr = NULL;
  param->begin = 0;
  param->numframes = -1;
  param->framerate = y4m_fps_UNKNOWN;
  param->interlace = Y4M_UNKNOWN;
  param->interleave = -1;
  param->verbose = 1;
  param->ss_mode = DEFAULT_CHROMA_MODE;
  //param->mza_filename = NULL;
  //param->make_z_alpha = 0;

  /* parse options */
  for (;;) {
    if (-1 == (c = getopt(argc, argv, "I:hv:L:b:j:n:f:z:S:")))
      break;
    switch (c) {
    case 'j':
      param->pngformatstr = strdup(optarg);
      break;
#if 0 
    case 'z':
      param->mza_filename = strdup(optarg);
      param->make_z_alpha = 1;
      break;
#else
    case 'z':
      mjpeg_error("Z encoding currently unsupported !\n");
      exit(-1);
      break;
#endif
    case 'S':
      param->ss_mode = y4m_chroma_parse_keyword(optarg);
      if (param->ss_mode == Y4M_UNKNOWN) {
	mjpeg_error_exit1("Unknown subsampling mode option:  %s", optarg);
      } else if (!chroma_sub_implemented(param->ss_mode)) {
	mjpeg_error_exit1("Unsupported subsampling mode option:  %s", optarg);
      }
      break;
    case 'b':
      param->begin = atol(optarg);
      break;
    case 'n':
      param->numframes = atol(optarg);
      break;
    case 'f':
      param->framerate = mpeg_conform_framerate(atof(optarg));
      break;
    case 'I':
      switch (optarg[0]) {
      case 'p':
	param->interlace = Y4M_ILACE_NONE;
	break;
      case 't':
	param->interlace = Y4M_ILACE_TOP_FIRST;
	break;
      case 'b':
	param->interlace = Y4M_ILACE_BOTTOM_FIRST;
	break;
      default:
	mjpeg_error_exit1 ("-I option requires arg p, t, or b");
      }
      break;
    case 'L':
      param->interleave = atoi(optarg);
      if ((param->interleave != 0) &&
	  (param->interleave != 1)) 
	mjpeg_error_exit1 ("-L option requires arg 0 or 1");
      break;
    case 'v':
      param->verbose = atoi(optarg);
      if (param->verbose < 0 || param->verbose > 2) 
	mjpeg_error_exit1( "-v option requires arg 0, 1, or 2");    
      break;     
    case 'h':
    default:
      usage(argv[0]);
      exit(1);
    }
  }
  if (param->pngformatstr == NULL) 
    { 
      mjpeg_error("%s:  input format string not specified. (Use -j option.)",
		  argv[0]); 
      usage(argv[0]); 
      exit(1);
    }

  if (Y4M_RATIO_EQL(param->framerate, y4m_fps_UNKNOWN)) 
    {
      mjpeg_error("%s:  framerate not specified.  (Use -f option)",
		  argv[0]); 
      usage(argv[0]); 
      exit(1);
    }
}