Ejemplo n.º 1
0
bool VCDStillsStream::MuxPossible(clockticks currentSCR)
{
    if( bufmodel.Size() < au_unsent )
    {
        mjpeg_error_exit1( "Illegal VCD still: larger than maximum permitted by its buffering parameters!");
    }
	if (RunOutComplete() ||	bufmodel.Space() < au_unsent)
	{
		return false;
	}
	
	if( LastSectorLastAU() )
	{
		if( sibling != 0 )
        {
            if( !stream_mismatch_warned && sibling->NextAUType() != NOFRAME  )
            {
                mjpeg_warn( "One VCD stills stream runs significantly longer than the other!");
                mjpeg_warn( "Simultaneous stream ending recommended by standard not possible" );
                return true;
            }
            return sibling->MuxCompleted() || sibling->LastSectorLastAU();
        }
        else
            return true;
	}
	else
		return true;
}
Ejemplo n.º 2
0
/* Parse (the first) old, unofficial X-tag chroma specification,
   and then remove that tag from the X-tag list. */
static int
handle_old_chroma_xtag(y4m_stream_info_t *si)
{
  y4m_xtag_list_t *xtags = y4m_si_xtags(si);
  const char *tag = NULL;
  int n, chroma;

  for (n = y4m_xtag_count(xtags) - 1; n >= 0; n--) {
    tag = y4m_xtag_get(xtags, n);
    if (!strncmp("XYSCSS=", tag, 7)) break;
  }
  if ((tag == NULL) || (n < 0)) return Y4M_UNKNOWN;
  mjpeg_warn("Deprecated X-tag for chroma found in a stream header...");
  mjpeg_warn("...pester someone to upgrade the source's program!");
  /* parse the tag */
  tag += 7;
  if (!strcmp("411", tag))           chroma = Y4M_CHROMA_411;
  else if (!strcmp(tag, "420"))      chroma = Y4M_CHROMA_420JPEG;
  else if (!strcmp(tag, "420MPEG2")) chroma = Y4M_CHROMA_420MPEG2;
  else if (!strcmp(tag, "420PALDV")) chroma = Y4M_CHROMA_420PALDV;
  else if (!strcmp(tag, "420JPEG"))  chroma = Y4M_CHROMA_420JPEG;
  else if (!strcmp(tag, "444"))      chroma = Y4M_CHROMA_444;
  else chroma = Y4M_UNKNOWN;
  /* Remove the 'X' tag so that no one has to worry about it any more. */
  y4m_xtag_remove(xtags, n);
  /* Hmm... what if there are more XYSCSS tags?  Broken is as broken does;
     thank goodness this is temporary code. */
  return chroma;
}
Ejemplo n.º 3
0
void copyfield(uint8_t *m[3],uint8_t *n[3],y4m_stream_info_t *sinfo, int which)
{
	int r = 0;
	int h,w,cw,ch;

	h = y4m_si_get_plane_height(sinfo,0);
	w = y4m_si_get_plane_width(sinfo,0);
	cw = y4m_si_get_plane_width(sinfo,1);
	ch = y4m_si_get_plane_height(sinfo,1);


	if (which==Y4M_ILACE_TOP_FIRST) {
		r=0;
	} else if (which==Y4M_ILACE_BOTTOM_FIRST) {
		r=1;
	} else {
		mjpeg_warn("copyfield() invalid interlace selected (%d)",which);
	}

	for (; r < h; r += 2)
	{
		memcpy(&m[0][r * w], &n[0][r * w], w);
		if (r<ch) {
			memcpy(&m[1][r*cw], &n[1][r*cw], cw);
			memcpy(&m[2][r*cw], &n[2][r*cw], cw);
		}
	}
}
Ejemplo n.º 4
0
// this method isn't too effective
int search_video_1 (int m, int s, int line, uint8_t *yuv_data[3],y4m_stream_info_t *sinfo)
{

    int w,h;
    int x1,x2;
    int min,shift,tot;
    int linew, line1w;

    int ilace = y4m_si_get_interlace(sinfo);

    w = y4m_si_get_plane_width(sinfo,0);
    h = y4m_si_get_plane_height(sinfo,0);

    linew = line * w;

    if (ilace == Y4M_ILACE_NONE)
        line1w = (line+1) * w ;
    else
        line1w = (line+2) * w;

    line1w = (line+2) * w;


    mjpeg_debug("search_video %d",line);

    // 2 or 1 dependent on interlace or not.
    if (line+2 > h) {
        mjpeg_warn("line > height");
        return 0;
    }

    shift = 0;
    for (x1=-m; x1<m; x1++)
    {
        tot = 0;
        for(x2=0; x2<s; x2++)
        {
            // don't know if I should apply a standard addition to pixels outside the box.
            if (x1+x2 >=0 && x1+x2 < w)
                tot += abs ( *(yuv_data[0]+x1+x2+linew) - *(yuv_data[0]+x2+line1w));
            else
                tot += 128;
        }

        // ok it wasn't max afterall, it was min.
        if (x1==0) min = tot;
        if (tot < min) {
            min = tot;
            shift = x1;
        }
    }

    mjpeg_debug("exit search_video %d",line);

    return shift;
}
Ejemplo n.º 5
0
void LPCMStream::FillAUbuffer(unsigned int frames_to_buffer )
{
	last_buffered_AU += frames_to_buffer;
	mjpeg_debug( "Scanning %d MPEG LPCM audio frames to frame %d", 
				 frames_to_buffer, last_buffered_AU );

	while ( !bs.eos() 
            && decoding_order < last_buffered_AU 
            && !muxinto.AfterMaxPTS(access_unit.PTS) )
	{
		int skip=access_unit.length; 
        bs.SeekFwdBits( skip );
		prev_offset = AU_start;
		AU_start = bs.bitcount();
        if( AU_start - prev_offset != access_unit.length*8 )
        {
            mjpeg_warn("Discarding incomplete final frame LPCM  stream %d",
                       stream_num);
            aunits.DropLast();
            --decoding_order;
            break;
        }

        // Here we would check for header data but LPCM has no headers...
        if( bs.eos()   )
            break;

		access_unit.start = AU_start;
		access_unit.length = bytes_per_frame;
		access_unit.PTS = static_cast<clockticks>(decoding_order) * 
            (CLOCKS_per_90Kth_sec * ticks_per_frame_90kHz);
		access_unit.DTS = access_unit.PTS;
		access_unit.dorder = decoding_order;
		decoding_order++;
		aunits.Append( access_unit );
		num_frames++;
		
		num_syncword++;

		if (num_syncword >= old_frames+10 )
		{
			mjpeg_debug ("Got %d frame headers.", num_syncword);
			old_frames=num_syncword;
		}
        mjpeg_debug( "Got frame %d\n", decoding_order );

    }
	last_buffered_AU = decoding_order;
	eoscan =  bs.eos() || muxinto.AfterMaxPTS(access_unit.PTS);
}
Ejemplo n.º 6
0
static void input(int type, char *message)
{
    switch (type)
    {
    case LAVPLAY_MSG_ERROR:
        mjpeg_error("%s", message);
        break;
    case LAVPLAY_MSG_WARNING:
        mjpeg_warn("%s", message);
        break;
    case LAVPLAY_MSG_INFO:
        mjpeg_info("%s", message);
        break;
    case LAVPLAY_MSG_DEBUG:
        mjpeg_debug("%s", message);
        break;
    }
}
Ejemplo n.º 7
0
void init(LavParam *param, uint8_t *buffer[])
{
   param->luma_size = param->output_width * param->output_height;
   switch (param->chroma) {
   default:
     mjpeg_warn("unsupported chroma (%d), assume '420jpeg'", param->chroma);
     param->chroma = Y4M_UNKNOWN; /* will update in writeoutYUV4MPEGheader() */
     /* and do same as case Y4M_CHROMA_420JPEG... */
   case Y4M_UNKNOWN:
   case Y4M_CHROMA_420JPEG:
   case Y4M_CHROMA_420MPEG2:
   case Y4M_CHROMA_420PALDV:
     param->chroma_width  = param->output_width  / 2;
     param->chroma_height = param->output_height / 2;
     break;
   case Y4M_CHROMA_422:
     param->chroma_width  = param->output_width  / 2;
     param->chroma_height = param->output_height;
     break;
   case Y4M_CHROMA_411:
     param->chroma_width  = param->output_width  / 4;
     param->chroma_height = param->output_height;
     break;
   }
   param->chroma_size = param->chroma_height * param->chroma_width;

   buffer[0] = (uint8_t *)bufalloc(param->luma_size);
   buffer[1] = (uint8_t *)bufalloc(param->chroma_size);
   buffer[2] = (uint8_t *)bufalloc(param->chroma_size);
   
#ifdef HAVE_LIBDV
   dv_frame[0] = (uint8_t *)bufalloc(3 * param->output_width * param->output_height);
   dv_frame[1] = buffer[1];
   dv_frame[2] = buffer[2];
#endif
}
Ejemplo n.º 8
0
int y4m_parse_stream_tags(char *s, y4m_stream_info_t *i)
{
  char *token, *value;
  char tag;
  int err;

  /* parse fields */
  for (token = strtok(s, Y4M_DELIM); 
       token != NULL; 
       token = strtok(NULL, Y4M_DELIM)) {
    if (token[0] == '\0') continue;   /* skip empty strings */
    tag = token[0];
    value = token + 1;
    switch (tag) {
    case 'W':  /* width */
      i->width = atoi(value);
      if (i->width <= 0) return Y4M_ERR_RANGE;
      break;
    case 'H':  /* height */
      i->height = atoi(value); 
      if (i->height <= 0) return Y4M_ERR_RANGE;
      break;
    case 'F':  /* frame rate (fps) */
      if ((err = y4m_parse_ratio(&(i->framerate), value)) != Y4M_OK)
	return err;
      if (i->framerate.n < 0) return Y4M_ERR_RANGE;
      break;
    case 'I':  /* interlacing */
      switch (value[0]) {
      case 'p':  i->interlace = Y4M_ILACE_NONE; break;
      case 't':  i->interlace = Y4M_ILACE_TOP_FIRST; break;
      case 'b':  i->interlace = Y4M_ILACE_BOTTOM_FIRST; break;
      case 'm':  i->interlace = Y4M_ILACE_MIXED; break;
      case '?':
      default:
	i->interlace = Y4M_UNKNOWN; break;
      }
      break;
    case 'A':  /* sample (pixel) aspect ratio */
      if ((err = y4m_parse_ratio(&(i->sampleaspect), value)) != Y4M_OK)
	return err;
      if (i->sampleaspect.n < 0) return Y4M_ERR_RANGE;
      break;
    case 'C':
      i->chroma = y4m_chroma_parse_keyword(value);
      if (i->chroma == Y4M_UNKNOWN)
	return Y4M_ERR_HEADER;
      break;
    case 'X':  /* 'X' meta-tag */
      if ((err = y4m_xtag_add(&(i->x_tags), token)) != Y4M_OK) return err;
      break;
    default:
      /* possible error on unknown options */
      if (_y4mparam_allow_unknown_tags) {
	/* unknown tags ok:  store in xtag list and warn... */
	if ((err = y4m_xtag_add(&(i->x_tags), token)) != Y4M_OK) return err;
	mjpeg_warn("Unknown stream tag encountered:  '%s'", token);
      } else {
	/* unknown tags are *not* ok */
	return Y4M_ERR_BADTAG;
      }
      break;
    }
  }

  /* Without 'C' tag or any other chroma spec, default to 420jpeg */
  if (i->chroma == Y4M_UNKNOWN) 
    i->chroma = Y4M_CHROMA_420JPEG;

  /* Error checking... */
  /*      - Width and Height are required. */
  if ((i->width == Y4M_UNKNOWN) || (i->height == Y4M_UNKNOWN))
    return Y4M_ERR_HEADER;
  /*      - Non-420 chroma and mixed interlace require level >= 1 */
  if (_y4mparam_feature_level < 1) {
    if ((i->chroma != Y4M_CHROMA_420JPEG) &&
	(i->chroma != Y4M_CHROMA_420MPEG2) &&
	(i->chroma != Y4M_CHROMA_420PALDV))
      return Y4M_ERR_FEATURE;
    if (i->interlace == Y4M_ILACE_MIXED)
      return Y4M_ERR_FEATURE;
  }

  /* ta da!  done. */
  return Y4M_OK;
}
Ejemplo n.º 9
0
void MPAStream::FillAUbuffer(unsigned int frames_to_buffer )
{
	unsigned int padding_bit;
	last_buffered_AU += frames_to_buffer;

    if( eoscan )
        return;

    mjpeg_debug( "Scanning %d MPA frames to frame %d", 
                frames_to_buffer,
                last_buffered_AU );
	while( !bs.eos() 
           && decoding_order < last_buffered_AU 
           && !muxinto.AfterMaxPTS(access_unit.PTS) )
	{

		int skip=access_unit.length-4;
        bs.SeekFwdBits( skip );
		prev_offset = AU_start;
		AU_start = bs.bitcount();
        if( AU_start - prev_offset != access_unit.length*8 )
        {
            mjpeg_warn("Discarding incomplete final frame MPEG audio stream %02x!",
                       stream_id
                       );
            aunits.DropLast();
            --decoding_order;
            break;
        }
		/* Check we have reached the end of have  another catenated 
		   stream to process before finishing ... */
		if ( (syncword = bs.GetBits( 11))!=AUDIO_SYNCWORD )
		{
            //
            // Handle a broken last frame...
			if( !bs.eos()   )
			{
                mjpeg_warn( "Data follows end of last recogniseable MPEG audio frame - bad stream?");
                eoscan = true;
                return;
			}
            break;
		}
		// Skip version_id:2, layer:2, protection:1
		(void) bs.GetBits( 5);
		int rate_code	= bs.GetBits( 4);
		// Skip frequency
		(void) bs.GetBits( 2);

		padding_bit=bs.Get1Bit();
		access_unit.start = AU_start;
		access_unit.length = SizeFrame( rate_code, padding_bit );
		access_unit.PTS = static_cast<clockticks>(decoding_order) * static_cast<clockticks>(mpa_samples[layer]) * static_cast<clockticks>(CLOCKS)
			/ samples_per_second;
		access_unit.DTS = access_unit.PTS;
		access_unit.dorder = decoding_order;
		decoding_order++;
		aunits.Append( access_unit );
		num_frames[padding_bit]++;

		bs.GetBits( 9);
		
		num_syncword++;

		if (num_syncword >= old_frames+10 )
		{
			mjpeg_debug ("Got %d frame headers.", num_syncword);
			old_frames=num_syncword;
		
		}
	


    }
	last_buffered_AU = decoding_order;
	eoscan = bs.eos() || muxinto.AfterMaxPTS(access_unit.PTS);
}
Ejemplo n.º 10
0
void ysSource::parse_keyword(char *optarg)
{
  
  if (!strncasecmp(optarg, "ACTIVE=", 7)) {
    if (_active_region.parse_geometry(optarg+7)) {
      mjpeg_error_exit1("Bad ACTIVE keyword: '%s'", optarg);
    }

  } else if (!strncasecmp(optarg, "MATTE=", 6)) {
    if (_matte_region.parse_geometry(optarg+6)) {
      mjpeg_error_exit1("Bad MATTE keyword: '%s'", optarg);
    }

  } else if (!strncasecmp(optarg, "BG=", 3)) {
    bgcolor(ysYCbCr::parse_string(optarg+3));

  } else if (!strcasecmp(optarg, "NORM=NTSC")) {
    norm(NORM_NTSC);

  } else if (!strcasecmp(optarg, "NORM=PAL")) {
    norm(NORM_PAL);

  } else if (!strcasecmp(optarg, "NORM=SECAM")) {
    norm(NORM_PAL);

  } else if (!strncasecmp(optarg, "CHROMASS=", 9)) {
    //    if (_stream.subsampling().parse_mode(optarg+9)) {
    if (_stream.subsampling().is_known()) {
      mjpeg_warn("Overriding source's chroma subsampling mode!");
      //  Was %s",
      //                 _stream.subsampling().mode_to_string());
    }
    if (_stream.parse_subsampling(optarg+9)) {
      mjpeg_error_exit1("Bad chroma subsampling spec: '%s'", optarg);
    }

  } else if (!strncasecmp(optarg, "ILACE=", 6)) {
    if (!strcasecmp(optarg+6, "TOP_FIRST")) {
      interlace(Y4M_ILACE_TOP_FIRST);
    } else if (!strcasecmp(optarg+6, "BOTTOM_FIRST")) {
      interlace(Y4M_ILACE_BOTTOM_FIRST);
    } else if (!strcasecmp(optarg+6, "NONE")) {
      interlace(Y4M_ILACE_NONE);
    } else if (!strcasecmp(optarg+6, "TOP_ONLY")) {
      fake_progressive(FAKE_TOP_ONLY);
    } else if (!strcasecmp(optarg+6, "BOTTOM_ONLY")) {
      fake_progressive(FAKE_BOT_ONLY);
    } else {
      mjpeg_error_exit1("Bad interlace spec: '%s'", optarg);
    }

  } else if (!strncasecmp(optarg, "SAR=", 4)) {
    ysRatio sar;
    if (!strcasecmp(optarg+4, "NTSC")) {
      sar = y4m_sar_NTSC_CCIR601;
    } else if (!strcasecmp(optarg+4, "PAL")) {
      sar = y4m_sar_PAL_CCIR601;
    } else if (!strcasecmp(optarg+4, "NTSC_WIDE")) {
      sar = y4m_sar_NTSC_16_9;
    } else if (!strcasecmp(optarg+4, "PAL_WIDE")) {
      sar = y4m_sar_PAL_16_9;
    } else if (sar.parse_ratio(optarg+4)) {
      mjpeg_error_exit1("Bad ratio spec: '%s'", optarg);
    }
    _stream.sar(sar);

  } else
    mjpeg_error_exit1 ("Unrecognized input parameter:  '%s'", optarg);
}
Ejemplo n.º 11
0
void ysSource::check_parameters()
{
  int cause_to_exit = 0;

  /* init interlacing */
  if (_stream.interlace() == Y4M_UNKNOWN) {
    mjpeg_error("Source interlacing is unknown!");
    cause_to_exit = 1;
  }
  /* init/constrain SAR */
  if (!_stream.sar().is_known()) {
    mjpeg_error("Source sample aspect ratio unknown!");
    cause_to_exit = 1;
  }

  /* init/constrain chroma subsampling */
  if (!_stream.subsampling().is_known()) {
    mjpeg_error("Source chroma subsampling is unknown!");
    cause_to_exit = 1;
  }

  /* init/clip matte region */
  /* default is entire source frame --- so convolution can extend beyond
     the active region */
  if (!_matte_region.is_known()) {
    if (_matte_region.offset().is_known()) {
      mjpeg_info("Source matte region defaulting to source frame size.");
      _matte_region.dim(_stream.dim());
    } else {
      mjpeg_info("Source matte region defaulting to full source frame.");
      _matte_region = ysRegion(_stream.dim());
      _matte_region.origin_mode(ANC_TL);
    }
  }
  _matte_region.fixate(_stream.dim());

  /* check alignment */
  /* frame size and matte region must conform to alignment */
  {
    int xal = _stream.x_alignment();
    int yal = _stream.y_alignment();

    if (_stream.x_size() % xal) {
      mjpeg_error("Source x size (%d) is not multiple of %d!",
		  _stream.x_size(), xal);
      cause_to_exit = 1;
    }
    if (_stream.y_size() % yal) {
      mjpeg_error("Source y size (%d) is not multiple of %d!",
		  _stream.y_size(), yal);
      cause_to_exit = 1;
    }

    if (_matte_region.dim().x() % xal) {
      mjpeg_error("Source matte region x size (%d) is not multiple of %d!",
		  _matte_region.dim().x(), xal);
      cause_to_exit = 1;
    }
    if (_matte_region.offset().x() % xal) {
      mjpeg_error("Source matte region x offset (%d) is not multiple of %d!",
		  _matte_region.offset().x(), xal);
      cause_to_exit = 1;
    }
    if (_matte_region.dim().y() % yal) {
      mjpeg_error("Source matte region y size (%d) is not multiple of %d!",
		  _matte_region.dim().y(), yal);
      cause_to_exit = 1;
    }
    if (_matte_region.offset().y() % yal) {
      mjpeg_error("Source matte region y offset (%d) is not multiple of %d!",
		  _matte_region.offset().y(), yal);
      cause_to_exit = 1;
    }
  }

  if (cause_to_exit) 
    exit(1);

  /* init/clip active region */
  if (!_active_region.is_known()) {
    if (_active_region.offset().is_known()) {
      mjpeg_info("Source active region defaulting to source frame size.");
      _active_region.dim(_stream.dim());
    } else {
      mjpeg_info("Source active region defaulting to full source frame.");
      _active_region = ysRegion(_stream.dim());
      _active_region.origin_mode(ANC_TL);
    }
  }
  _active_region.fixate(_stream.dim());


#if 0  /* do clipping later, after ratios are established */
  if (_active_region.clip(ysRatioPoint(_stream.dim()))) {
    mjpeg_warn("Source active region clipped by frame size.");
  }
#endif

}
Ejemplo n.º 12
0
// *************************************************************************************
// MAIN
// *************************************************************************************
int main (int argc, char *argv[])
{

	int verbose = 4; // LOG_ERROR ;
	int fdIn = 0 ;
	int fdOut = 1 ;
	y4m_stream_info_t in_streaminfo,out_streaminfo;
	const static char *legal_flags = "d:m:V:";
	int c, *matrix,matlen;
	float divisor=0;

  while ((c = getopt (argc, argv, legal_flags)) != -1) {
    switch (c) {
      case 'V':
        verbose = atoi (optarg);
        if (verbose < 0 || verbose > 2)
          mjpeg_error_exit1 ("Verbose level must be [0..2]");
        break;
    case 'd':
	    divisor = atof(optarg);
		if (divisor == 0) {
			mjpeg_error_exit1 ("Divisor must not be 0");
		}

		break;
	case 'm':
		// strlen should be longer than the
		matrix = (int *) malloc (sizeof(int) * strlen(optarg));
		matlen = parse_matrix(optarg,matrix);
		if (matlen == 0) {
			mjpeg_error_exit1 ("Invalid matrix");
		}
		break;

	case '?':
          print_usage (argv);
          return 0 ;
          break;
    }
  }

	if (divisor == 0) {
		divisor = sum_matrix(matrix,matlen);
	}

	if (divisor == 0) {
		mjpeg_warn("divisor defaulting to 1\n");
		divisor = 1;
	}

  // mjpeg tools global initialisations
  mjpeg_default_handler_verbosity (verbose);

  // Initialize input streams
  y4m_init_stream_info (&in_streaminfo);
  y4m_init_stream_info (&out_streaminfo);

  // ***************************************************************
  // Get video stream informations (size, framerate, interlacing, aspect ratio).
  // The streaminfo structure is filled in
  // ***************************************************************
  // INPUT comes from stdin, we check for a correct file header
	if (y4m_read_stream_header (fdIn, &in_streaminfo) != Y4M_OK)
		mjpeg_error_exit1 ("Could'nt read YUV4MPEG header!");

	y4m_ratio_t src_frame_rate = y4m_si_get_framerate( &in_streaminfo );
	y4m_copy_stream_info( &out_streaminfo, &in_streaminfo );

  // Information output
  mjpeg_info ("yuvconvolve (version " YUVRFPS_VERSION ") performs a convolution matrix on yuv streams");
  mjpeg_info ("yuvconvolve -? for help");

	y4m_write_stream_header(fdOut,&out_streaminfo);

  /* in that function we do all the important work */

  fprintf (stderr,"matrix square: %d\n",matlen);

	convolve( fdIn,&in_streaminfo,fdOut,&out_streaminfo,matrix,divisor,matlen);

  y4m_fini_stream_info (&in_streaminfo);
  y4m_fini_stream_info (&out_streaminfo);

  return 0;
}
Ejemplo n.º 13
0
void writeoutYUV4MPEGheader(int out_fd,
			    LavParam *param,
			    EditList el,
			    y4m_stream_info_t *streaminfo)
{
   int n;

   y4m_si_set_width(streaminfo, param->output_width);
   y4m_si_set_height(streaminfo, param->output_height);
   y4m_si_set_interlace(streaminfo, param->interlace);
   y4m_si_set_framerate(streaminfo, mpeg_conform_framerate(el.video_fps));
   if (!Y4M_RATIO_EQL(param->sar, y4m_sar_UNKNOWN)) {
     y4m_si_set_sampleaspect(streaminfo, param->sar);
   } else if ((el.video_sar_width != 0) || (el.video_sar_height != 0)) {
     y4m_ratio_t sar;
     sar.n = el.video_sar_width;
     sar.d = el.video_sar_height;
     y4m_si_set_sampleaspect(streaminfo, sar);
   } else {
     /* no idea! ...eh, just guess. */
     mjpeg_warn("unspecified sample-aspect-ratio --- taking a guess...");
     y4m_si_set_sampleaspect(streaminfo,
			     y4m_guess_sar(param->output_width, 
					   param->output_height,
					   param->dar));
   }

   switch (el_video_frame_data_format(0, &el)) { /* FIXME: checking only 0-th frame. */
   case DATAFORMAT_YUV420:
     switch (param->chroma) {
     case Y4M_UNKNOWN:
     case Y4M_CHROMA_420JPEG:
       break;
     case Y4M_CHROMA_420MPEG2:
     case Y4M_CHROMA_420PALDV:
       mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
       break;
     default:
       mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420jpeg') with this input");
       break;
     }
     break;

   case DATAFORMAT_YUV422:
     switch (param->chroma) {
     case Y4M_CHROMA_422:
       break;
     default:
       mjpeg_error_exit1("must specify chroma '422' with this input");
       break;
     }
     break;

   case DATAFORMAT_DV2:
#ifndef HAVE_LIBDV
     mjpeg_error_exit1("DV input was not configured at compile time");
#else
     el_get_video_frame(jpeg_data, 0, &el); /* FIXME: checking only 0-th frame. */
     dv_parse_header(decoder, jpeg_data);
     switch(decoder->sampling) {
     case e_dv_sample_420:
       switch (param->chroma) {
       case Y4M_UNKNOWN:
	 mjpeg_info("set chroma '420paldv' from input");
	 param->chroma = Y4M_CHROMA_420PALDV;
	 break;
       case Y4M_CHROMA_420PALDV:
	 break;
       case Y4M_CHROMA_420JPEG:
       case Y4M_CHROMA_420MPEG2:
	 mjpeg_warn("4:2:0 chroma should be '420paldv' with this input");
	 break;
       case Y4M_CHROMA_422:
         if(libdv_pal_yv12 == 1 )
	   mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input");
	 break;
       default:
	 mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input");
	 break;
       }
       break;
     case e_dv_sample_411:
       if (param->chroma != Y4M_CHROMA_411)
	 mjpeg_info("chroma '411' recommended with this input");
       switch (param->chroma) {
       case Y4M_CHROMA_420MPEG2:
       case Y4M_CHROMA_420PALDV:
	 mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
	 break;
       }
       break;
     case e_dv_sample_422:
       if (param->chroma != Y4M_CHROMA_422)
	 mjpeg_info("chroma '422' recommended with this input");
       switch (param->chroma) {
       case Y4M_CHROMA_420MPEG2:
       case Y4M_CHROMA_420PALDV:
	 mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
	 break;
       }
       break;
     default:
       break;
     }
#endif
     break;

   case DATAFORMAT_MJPG:
     if (param->chroma != Y4M_CHROMA_422 && el.chroma == Y4M_CHROMA_422)
       mjpeg_info("chroma '422' recommended with this input");
     switch (param->chroma) {
     case Y4M_CHROMA_420MPEG2:
     case Y4M_CHROMA_420PALDV:
       mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
       break;
     }
     break;
   }
   if (param->chroma == Y4M_UNKNOWN) {
     mjpeg_info("set default chroma '420jpeg'");
     param->chroma = Y4M_CHROMA_420JPEG;
   }
   y4m_si_set_chroma(streaminfo, param->chroma);

   n = y4m_write_stream_header(out_fd, streaminfo);
   if (n != Y4M_OK)
      mjpeg_error("Failed to write stream header: %s", y4m_strerr(n));
}
Ejemplo n.º 14
0
/*
 * readframe - read jpeg or dv frame into yuv buffer
 *
 * returns:
 *	0   success
 *	1   fatal error
 *	2   corrupt data encountered; 
 *		decoding can continue, but this frame may be damaged 
 */
int readframe(int numframe, 
	      uint8_t *frame[],
	      LavParam *param,
	      EditList el)
{
  int len, i, res, data_format;
  uint8_t *frame_tmp;
  int warn;
  warn = 0;

  if (MAX_JPEG_LEN < el.max_frame_size) {
    mjpeg_error_exit1( "Max size of JPEG frame = %ld: too big",
		       el.max_frame_size);
  }
  
  len = el_get_video_frame(jpeg_data, numframe, &el);
  data_format = el_video_frame_data_format(numframe, &el);
  
  switch(data_format) {

  case DATAFORMAT_DV2 :
#ifndef HAVE_LIBDV
    mjpeg_error("DV input was not configured at compile time");
    res = 1;
#else
    mjpeg_debug("DV frame %d   len %d",numframe,len);
    res = 0;
    dv_parse_header(decoder, jpeg_data);
    switch(decoder->sampling) {
    case e_dv_sample_420:
      /* libdv decodes PAL DV directly as planar YUV 420
       * (YV12 or 4CC 0x32315659) if configured with the flag
       * --with-pal-yuv=YV12 which is not (!) the default
       */
      if (libdv_pal_yv12 == 1) {
	pitches[0] = decoder->width;
	pitches[1] = decoder->width / 2;
	pitches[2] = decoder->width / 2;
	if (pitches[0] != param->output_width ||
	    pitches[1] != param->chroma_width) {
	  mjpeg_error("for DV 4:2:0 only full width output is supported");
	  res = 1;
	} else {
	  dv_decode_full_frame(decoder, jpeg_data, e_dv_color_yuv,
			       frame, (int *)pitches);
	  /* swap the U and V components */
	  frame_tmp = frame[2];
	  frame[2] = frame[1];
	  frame[1] = frame_tmp;
	}
	break;
      }
    case e_dv_sample_411:
    case e_dv_sample_422:
      /* libdv decodes NTSC DV (native 411) and by default also PAL
       * DV (native 420) as packed YUV 422 (YUY2 or 4CC 0x32595559)
       * where the U and V information is repeated.  This can be
       * transformed to planar 420 (YV12 or 4CC 0x32315659).
       * For NTSC DV this transformation is lossy.
       */
      pitches[0] = decoder->width * 2;
      pitches[1] = 0;
      pitches[2] = 0;
      if (decoder->width != param->output_width) {
	mjpeg_error("for DV only full width output is supported");
	res = 1;
      } else {
	dv_decode_full_frame(decoder, jpeg_data, e_dv_color_yuv,
			     dv_frame, (int *)pitches);
	frame_YUV422_to_planar(frame, dv_frame[0],
			       decoder->width,	decoder->height,
			       param->chroma);
      }
      break;
    default:
      res = 1;
      break;
    }
#endif /* HAVE_LIBDV */
    break;

  case DATAFORMAT_YUV420 :
  case DATAFORMAT_YUV422 :
    mjpeg_debug("raw YUV frame %d   len %d",numframe,len);
    frame_tmp = jpeg_data;
    memcpy(frame[0], frame_tmp, param->luma_size);
    frame_tmp += param->luma_size;
    memcpy(frame[1], frame_tmp, param->chroma_size);
    frame_tmp += param->chroma_size;
    memcpy(frame[2], frame_tmp, param->chroma_size);
    res = 0;
    break;

  default:
    mjpeg_debug("MJPEG frame %d   len %d",numframe,len);
    res = decode_jpeg_raw(jpeg_data, len, el.video_inter,
			  param->chroma,
			  param->output_width, param->output_height,
			  frame[0], frame[1], frame[2]);
  }
  
  if (res < 0) {
    mjpeg_warn( "Fatal Error Decoding Frame %d", numframe);
    return 1;
  } else if (res == 1) {
    mjpeg_warn( "Decoding of Frame %d failed", numframe);
    warn = 1;
    res = 0;
  }
  
  
  if (param->mono) {
    for (i = 0;
	 i < param->chroma_size;
	 ++i) {
      frame[1][i] = 0x80;
      frame[2][i] = 0x80;
    }
  }

  if(warn)
	  return 2;
  else
	  return 0;
}
Ejemplo n.º 15
0
int
main(int argc, char *argv[])
{
	int	i;
	long long avg, total;
	int	input_fd = 0;
	int	output_fd = 1;
	int	horz;
	int	vert;
	int	c;
	int	frame_count;

	y4m_stream_info_t istream, ostream;
	y4m_frame_info_t iframe;

	y4m_accept_extensions(1);

	while((c = getopt(argc, argv, "r:R:t:T:v:S:hI:w:fc:")) != EOF) {
		switch(c) {
		case 'r':
			radius_luma = atoi(optarg);
			break;
		case 'R':
			radius_chroma = atoi(optarg);
			break;
		case 't':
			threshold_luma = atoi(optarg);
			break;
		case 'T':
			threshold_chroma = atoi(optarg);
			break;
		case 'I':
			interlace = atoi (optarg);
			if (interlace != 0 && interlace != 1)
			{
				Usage (argv[0]);
				exit (1);
			}
			break;
		case 'S':
			param_skip = atoi (optarg);
			break;
		case 'f':
			param_fast = 1;
			break;
		case 'w':
			if (strcmp (optarg, "8") == 0)
				param_weight_type = 1;
			else if (strcmp (optarg, "2.667") == 0)
				param_weight_type = 2;
			else if (strcmp (optarg, "13.333") == 0)
				param_weight_type = 3;
			else if (strcmp (optarg, "24") == 0)
				param_weight_type = 4;
			else
				param_weight_type = 0;
			param_weight = atof (optarg);
			break;
                case 'c':
                        cutoff = atof(optarg);
                        break;
		case 'v':
			verbose = atoi (optarg);
			if (verbose < 0 || verbose >2)
			{
				Usage (argv[0]);
				exit (1);
			}
			break;		  
			
		case 'h':
                        Usage (argv[0]);
		default:
			exit(0);
		}
	}

        if( param_weight < 0 ) {
            if( param_fast )
                param_weight = 8.0;
            else
                param_weight = 1.0;
        }

        for( i=1; i<NUMAVG; i++ ) {
            avg_replace[i]=0;
            divisor[i]=((1<<DIVISORBITS)+(i>>1))/i;
            divoffset[i]=divisor[i]*(i>>1)+(divisor[i]>>1);
        }

#ifdef HAVE_ASM_MMX
        if( cpu_accel() & ACCEL_X86_MMXEXT )
            domean8=1;
#endif

	mjpeg_info ("fast %d, weight type %d\n", param_fast,
		param_weight_type);

	if (radius_luma <= 0 || radius_chroma <= 0)
	   mjpeg_error_exit1("radius values must be > 0!");

	if (threshold_luma < 0 || threshold_chroma < 0)
	   mjpeg_error_exit1("threshold values must be >= 0!");

   (void)mjpeg_default_handler_verbosity(verbose);

	y4m_init_stream_info(&istream);
	y4m_init_stream_info(&ostream);
	y4m_init_frame_info(&iframe);

	i = y4m_read_stream_header(input_fd, &istream);
	if (i != Y4M_OK)
	  mjpeg_error_exit1("Input stream error: %s", y4m_strerr(i));

	if (y4m_si_get_plane_count(&istream) != 3)
	   mjpeg_error_exit1("Only 3 plane formats supported");

	chroma_mode = y4m_si_get_chroma(&istream);
	SS_H = y4m_chroma_ss_x_ratio(chroma_mode).d;
	SS_V = y4m_chroma_ss_y_ratio(chroma_mode).d;

	mjpeg_debug("chroma subsampling: %dH %dV\n",SS_H,SS_V);

	if (interlace == -1)
	{
	  i = y4m_si_get_interlace(&istream);
	  switch (i)
	  {
	  case Y4M_ILACE_NONE:
	       interlace = 0;
	       break;
	  case Y4M_ILACE_BOTTOM_FIRST:
	  case Y4M_ILACE_TOP_FIRST:
	       interlace = 1;
	       break;
	  default:
	       mjpeg_warn("Unknown interlacing '%d', assuming non-interlaced", i);
	       interlace = 0;
	       break;
	  }
	}

	if( interlace && y4m_si_get_height(&istream) % 2 != 0 )
		mjpeg_error_exit1("Input images have odd number of lines - can't treats as interlaced!" );

	horz = y4m_si_get_width(&istream);
	vert = y4m_si_get_height(&istream);
	mjpeg_debug("width=%d height=%d luma_r=%d chroma_r=%d luma_t=%d chroma_t=%d", horz, vert, radius_luma, radius_chroma, threshold_luma, threshold_chroma);

	y4m_copy_stream_info(&ostream, &istream);

	input_frame[0] = malloc(horz * vert);
	input_frame[1] = malloc((horz / SS_H) * (vert / SS_V));
	input_frame[2] = malloc((horz / SS_H) * (vert / SS_V));

	output_frame[0] = malloc(horz * vert);
	output_frame[1] = malloc((horz / SS_H) * (vert / SS_V));
	output_frame[2] = malloc((horz / SS_H) * (vert / SS_V));


	y4m_write_stream_header(output_fd, &ostream);

	frame_count = 0;
	while (y4m_read_frame(input_fd, &istream, &iframe, input_frame) == Y4M_OK)
	{ 
		frame_count++;
		if (frame_count > param_skip)
		{
		  filter(horz, vert,  input_frame, output_frame);
		  y4m_write_frame(output_fd, &ostream, &iframe, output_frame);
		}
		else
		  y4m_write_frame(output_fd, &ostream, &iframe, input_frame);
	}

	for (total=0, avg=0, i=0; i < NUMAVG; i++) {
		total += avg_replace[i];
                avg   += avg_replace[i] * i; 
        }
	mjpeg_info("frames=%d avg=%3.1f", frame_count, ((double)avg)/((double)total));

	for (i=0; i < NUMAVG; i++) {
		mjpeg_debug( "%02d: %6.2f", i,
			(((double)avg_replace[i]) * 100.0)/(double)(total));
	}

	y4m_fini_stream_info(&istream);
	y4m_fini_stream_info(&ostream);
	y4m_fini_frame_info(&iframe);
	exit(0);
}
Ejemplo n.º 16
0
static int y4m_parse_frame_tags(char *s, const y4m_stream_info_t *si,
				y4m_frame_info_t *fi)
{
  char *token, *value;
  char tag;
  int err;

  /* parse fields */
  for (token = strtok(s, Y4M_DELIM); 
       token != NULL; 
       token = strtok(NULL, Y4M_DELIM)) {
    if (token[0] == '\0') continue;   /* skip empty strings */
    tag = token[0];
    value = token + 1;
    switch (tag) {
    case 'I':
      /* frame 'I' tag requires feature level >= 1 */
      if (_y4mparam_feature_level < 1) return Y4M_ERR_FEATURE;
      if (si->interlace != Y4M_ILACE_MIXED) return Y4M_ERR_BADTAG;
      switch (value[0]) {
      case 't':  fi->presentation = Y4M_PRESENT_TOP_FIRST;        break;
      case 'T':  fi->presentation = Y4M_PRESENT_TOP_FIRST_RPT;    break;
      case 'b':  fi->presentation = Y4M_PRESENT_BOTTOM_FIRST;     break;
      case 'B':  fi->presentation = Y4M_PRESENT_BOTTOM_FIRST_RPT; break;
      case '1':  fi->presentation = Y4M_PRESENT_PROG_SINGLE;      break;
      case '2':  fi->presentation = Y4M_PRESENT_PROG_DOUBLE;      break;
      case '3':  fi->presentation = Y4M_PRESENT_PROG_TRIPLE;      break;
      default: 
	return Y4M_ERR_BADTAG;
      }
      switch (value[1]) {
      case 'p':  fi->temporal = Y4M_SAMPLING_PROGRESSIVE; break;
      case 'i':  fi->temporal = Y4M_SAMPLING_INTERLACED;  break;
      default: 
	return Y4M_ERR_BADTAG;
      }
      switch (value[2]) {
      case 'p':  fi->spatial = Y4M_SAMPLING_PROGRESSIVE; break;
      case 'i':  fi->spatial = Y4M_SAMPLING_INTERLACED;  break;
      case '?':  fi->spatial = Y4M_UNKNOWN;              break;
      default: 
	return Y4M_ERR_BADTAG;
      }
      break;
    case 'X':  /* 'X' meta-tag */
      if ((err = y4m_xtag_add(&(fi->x_tags), token)) != Y4M_OK) return err;
      break;
    default:
      /* possible error on unknown options */
      if (_y4mparam_allow_unknown_tags) {
	/* unknown tags ok:  store in xtag list and warn... */
	if ((err = y4m_xtag_add(&(fi->x_tags), token)) != Y4M_OK) return err;
	mjpeg_warn("Unknown frame tag encountered:  '%s'", token);
      } else {
	/* unknown tags are *not* ok */
	return Y4M_ERR_BADTAG;
      }
      break;
    }
  }
  /* error-checking and/or non-mixed defaults */
  switch (si->interlace) {
  case Y4M_ILACE_MIXED:
    /* T and P are required if stream "Im" */
    if ((fi->presentation == Y4M_UNKNOWN) || (fi->temporal == Y4M_UNKNOWN))
      return Y4M_ERR_HEADER;
    /* and S is required if stream is also 4:2:0 */
    if ( ((si->chroma == Y4M_CHROMA_420JPEG) ||
          (si->chroma == Y4M_CHROMA_420MPEG2) ||
          (si->chroma == Y4M_CHROMA_420PALDV)) &&
         (fi->spatial == Y4M_UNKNOWN) )
      return Y4M_ERR_HEADER;
    break;
  case Y4M_ILACE_NONE:
    /* stream "Ip" --> equivalent to frame "I1pp" */
    fi->spatial = Y4M_SAMPLING_PROGRESSIVE;
    fi->temporal = Y4M_SAMPLING_PROGRESSIVE;
    fi->presentation = Y4M_PRESENT_PROG_SINGLE;
    break;
  case Y4M_ILACE_TOP_FIRST:
    /* stream "It" --> equivalent to frame "Itii" */
    fi->spatial = Y4M_SAMPLING_INTERLACED;
    fi->temporal = Y4M_SAMPLING_INTERLACED;
    fi->presentation = Y4M_PRESENT_TOP_FIRST;
    break;
  case Y4M_ILACE_BOTTOM_FIRST:
    /* stream "Ib" --> equivalent to frame "Ibii" */
    fi->spatial = Y4M_SAMPLING_INTERLACED;
    fi->temporal = Y4M_SAMPLING_INTERLACED;
    fi->presentation = Y4M_PRESENT_BOTTOM_FIRST;
    break;
  default:
    /* stream unknown:  then, whatever */
    break;
  }
  /* ta da!  done. */
  return Y4M_OK;
}
Ejemplo n.º 17
0
/// Prefills the internal buffer for output multiplexing.
/// @param frames_to_buffer the number of audio frames to read ahead
void DTSStream::FillAUbuffer(unsigned int frames_to_buffer )
{
    unsigned int packet_samples;

	last_buffered_AU += frames_to_buffer;
	mjpeg_debug( "Scanning %d dts audio frames to frame %d", 
				 frames_to_buffer, last_buffered_AU );

	while( !bs.eos() && decoding_order < last_buffered_AU 
            && !muxinto.AfterMaxPTS(access_unit.PTS) )
	{
		int skip = access_unit.length - header_skip; 
        bs.SeekFwdBits(skip);
		prev_offset = AU_start;
		AU_start = bs.bitcount();

        if( AU_start - prev_offset != access_unit.length*8 )
        {
            mjpeg_warn( "Discarding incomplete final frame dts stream %d!",
                       stream_num);
            aunits.DropLast();
            decoding_order--;
            break;
        }

		/* Check if we have reached the end or have  another catenated 
		   stream to process before finishing ... */
		if ( (syncword = bs.GetBits(32))!=DTS_SYNCWORD )
		{
			if( !bs.eos()   )
			{
				mjpeg_error_exit1( "Can't find next dts frame: @ %lld we have %04x - broken bit-stream?", AU_start/8, syncword );
            }
            break;
		}

        bs.GetBits(6);         // additional sync
        bs.GetBits(1);         // CRC
        packet_samples = (bs.GetBits(7) + 1) * 32;         // pcm samples
        framesize = bs.GetBits(14) + 1;        // frame size

        bs.GetBits(6);              // audio channels
        bs.GetBits(4);              // sample rate code
        bs.GetBits(5);              // bitrate
        bs.GetBits(5);              // misc.

        access_unit.start = AU_start;
		access_unit.length = framesize;
		access_unit.PTS = static_cast<clockticks>(decoding_order) * 
			static_cast<clockticks>(packet_samples) * 
			static_cast<clockticks>(CLOCKS)	/ samples_per_second;
		access_unit.DTS = access_unit.PTS;
		access_unit.dorder = decoding_order;
		decoding_order++;
		aunits.Append( access_unit );
		num_frames++;

		num_syncword++;

		if (num_syncword >= old_frames+10 )
		{
			mjpeg_debug ("Got %d frame headers.", num_syncword);
			old_frames=num_syncword;
		}

    }
	last_buffered_AU = decoding_order;
	eoscan = bs.eos() || muxinto.AfterMaxPTS(access_unit.PTS);
}
int main(int argc, char **argv)
{
    int    i, c, interlace, frames, err;
    int    ywidth, yheight, uvwidth, uvheight, ylen, uvlen;
    int    verbose = 0, fdin;
    int    NlumaX = 4, NlumaY = 4, NchromaX = 4, NchromaY = 4;
    float  BWlumaX = 0.8, BWlumaY = 0.8, BWchromaX = 0.7, BWchromaY = 0.7;
    struct filter *lumaXtaps, *lumaYtaps, *chromaXtaps, *chromaYtaps;
    u_char *yuvinout[3];
    float *yuvtmp1,*yuvtmp2;
    y4m_stream_info_t istream, ostream;
    y4m_frame_info_t iframe;

    fdin = fileno(stdin);
    
    y4m_accept_extensions(1);

    /* read command line */
    opterr = 0;
    while   ((c = getopt(argc, argv, "hvL:C:x:X:y:Y:")) != EOF)
	{
	    switch  (c)
		{
		case    'L':
		    sscanf(optarg,"%d,%f,%d,%f",&NlumaX,&BWlumaX,&NlumaY,&BWlumaY);
		    break;
		case    'C':
		    sscanf(optarg,"%d,%f,%d,%f",&NchromaX,&BWchromaX,&NchromaY,&BWchromaY);
		    break;
		case    'x':
		    sscanf(optarg,"%d,%f",&NchromaX,&BWchromaX);
		    break;
		case    'X':
		    sscanf(optarg,"%d,%f",&NlumaX,&BWlumaX);
		    break;
		case    'y':
		    sscanf(optarg,"%d,%f",&NchromaY,&BWchromaY);
		    break;
		case    'Y':
		    sscanf(optarg,"%d,%f",&NlumaY,&BWlumaY);
		    break;
		case    'v':
		    verbose++;
		    break;
		case    '?':
		case    'h':
		default:
		    usage();
		}
	}
    
    if (BWlumaX <= 0.0 || BWlumaX > 1.0)
       mjpeg_error_exit1("Horizontal luma bandwidth '%f' not >0 and <=1.0", BWlumaX);
    if (BWlumaY <= 0.0 || BWlumaY > 1.0)
       mjpeg_error_exit1("Vertical luma bandwidth '%f' not >0 and <=1.0", BWlumaY);
    if (BWchromaX <= 0.0 || BWchromaX > 1.0)
       mjpeg_error_exit1("Horizontal chroma bandwidth '%f' not >0 and <=1.0", BWchromaX);
    if (BWchromaY <= 0.0 || BWchromaY > 1.0)
       mjpeg_error_exit1("Vertical chroma bandwidth '%f' not >0 and <=1.0", BWchromaY);

    /* initialize input stream and check chroma subsampling and interlacing */
    y4m_init_stream_info(&istream);
    y4m_init_frame_info(&iframe);
    err = y4m_read_stream_header(fdin, &istream);
    if (err != Y4M_OK)
	mjpeg_error_exit1("Input stream error: %s\n", y4m_strerr(err));

    if	(y4m_si_get_plane_count(&istream) != 3)
	mjpeg_error_exit1("Only the 3 plane formats supported");

    i = y4m_si_get_interlace(&istream);
    switch (i)
        {
        case Y4M_ILACE_NONE:
	    interlace = 0;
	    break;
        case Y4M_ILACE_BOTTOM_FIRST:
        case Y4M_ILACE_TOP_FIRST:
	    interlace = 1;
	    break;
        default:
	    mjpeg_warn("Unknown interlacing '%d', assuming non-interlaced", i);
	    interlace = 0;
	    break;
        }

    ywidth = y4m_si_get_width(&istream);	/* plane 0 = Y */
    yheight = y4m_si_get_height(&istream);
    ylen = ywidth * yheight;
    uvwidth = y4m_si_get_plane_width(&istream, 1);	/* planes 1&2 = U+V */
    uvheight = y4m_si_get_plane_height(&istream, 1);
    uvlen = y4m_si_get_plane_length(&istream, 1);
    
    /* initialize output stream */
    y4m_init_stream_info(&ostream);
    y4m_copy_stream_info(&ostream, &istream);
    y4m_write_stream_header(fileno(stdout), &ostream);
    
    /* allocate input and output buffers */
    yuvinout[0] = my_malloc(ylen*sizeof(u_char));
    yuvinout[1] = my_malloc(uvlen*sizeof(u_char));
    yuvinout[2] = my_malloc(uvlen*sizeof(u_char));
    yuvtmp1 = my_malloc(MAX(ylen,uvlen)*sizeof(float));
    yuvtmp2 = my_malloc(MAX(ylen,uvlen)*sizeof(float));

    /* get filter taps */
    lumaXtaps   = get_coeff(NlumaX, BWlumaX);
    lumaYtaps   = get_coeff(NlumaY, BWlumaY);
    chromaXtaps = get_coeff(NchromaX, BWchromaX);
    chromaYtaps = get_coeff(NchromaY, BWchromaY);

    set_accel(uvwidth,uvheight);

    if (verbose)
	y4m_log_stream_info(mjpeg_loglev_t("info"), "", &istream);
    
    /* main processing loop */
    for (frames=0; y4m_read_frame(fdin,&istream,&iframe,yuvinout) == Y4M_OK; frames++)
	{
	    if (verbose && ((frames % 100) == 0))
		mjpeg_info("Frame %d\n", frames);
	    
            convolveFrame(yuvinout[0],ywidth,yheight,interlace,lumaXtaps,lumaYtaps,yuvtmp1,yuvtmp2);
            convolveFrame(yuvinout[1],uvwidth,uvheight,interlace,chromaXtaps,chromaYtaps,yuvtmp1,yuvtmp2);
            convolveFrame(yuvinout[2],uvwidth,uvheight,interlace,chromaXtaps,chromaYtaps,yuvtmp1,yuvtmp2);

	    y4m_write_frame(fileno(stdout), &ostream, &iframe, yuvinout);

	}
    
    /* clean up */
    y4m_fini_frame_info(&iframe);
    y4m_fini_stream_info(&istream);
    y4m_fini_stream_info(&ostream);
    exit(0);
}
Ejemplo n.º 19
0
static void sigint_handler (int signal) {
   mjpeg_warn("Caught SIGINT, exiting...");
   got_sigint = 1;
}
Ejemplo n.º 20
0
int main (int argc, char *argv[])
{
   int verbose = 1;
   int in_fd  = 0;         /* stdin */
   int out_fd = 1;         /* stdout */
   unsigned char *yuv0[3]; /* input 0 */
   unsigned char *yuv1[3]; /* input 1 */
   unsigned char *yuv[3];  /* output */
   int w, h, len, lensr2;
   int i, j, opacity, opacity_range, frame, numframes, r = 0;
   unsigned int param_opacity0   = 0;     /* opacity of input1 at the beginning */
   unsigned int param_opacity1   = 255;   /* opacity of input1 at the end */
   unsigned int param_duration   = 0;     /* duration of transistion effect */
   unsigned int param_skipframes = 0;     /* # of frames to skip */
   unsigned int param_numframes  = 0;     /* # of frames to (process - skip+num) * framerepeat <= duration */
   unsigned int param_framerep   = 1;    /* # of repititions per frame */
   y4m_stream_info_t streaminfo;
   y4m_frame_info_t frameinfo;

   y4m_init_stream_info (&streaminfo);
   y4m_init_frame_info (&frameinfo);

   while ((i = getopt(argc, argv, "v:o:O:d:s:n:r:")) != -1) {
      switch (i) {
      case 'v':
         verbose = atoi (optarg);
		 if( verbose < 0 || verbose >2 )
		 {
			 usage ();
			 exit (1);
		 }
         break;		  
      case 'o':
         param_opacity0 = atoi (optarg);
         if (param_opacity0 > 255) {
            mjpeg_warn( "start opacity > 255");
            param_opacity0 = 255;
         }
         break;
      case 'O':
         param_opacity1 = atoi (optarg);
         if (param_opacity1 > 255) {
            mjpeg_warn( "end opacity > 255");
            param_opacity1 = 255;
         }
         break;
      case 'd':
         param_duration = atoi (optarg);
         if (param_duration == 0) {
            mjpeg_error_exit1( "error: duration = 0 frames");
         }
         break;
      case 's':
         param_skipframes = atoi (optarg);
         break;
      case 'n':
         param_numframes = atoi (optarg);
         break;
      case 'r':
         param_framerep = atoi (optarg);
         break;
      }
   }
   if (param_numframes == 0)
      param_numframes = (param_duration - param_skipframes) / param_framerep;
   if (param_duration == 0) {
      usage ();
      exit (1);
   }
   numframes = (param_skipframes + param_numframes) * param_framerep;
   if (numframes > param_duration) {
      mjpeg_error_exit1( "skip + num > duration");
   }

   (void)mjpeg_default_handler_verbosity(verbose);


   i = y4m_read_stream_header (in_fd, &streaminfo);
   if (i != Y4M_OK) {
      fprintf (stderr, "%s: input stream error - %s\n", 
	       argv[0], y4m_strerr(i));
      exit (1);
   }
   w = y4m_si_get_width(&streaminfo);
   h = y4m_si_get_height(&streaminfo);
   
   len = w*h;
   lensr2 = len >> 2;
   yuv[0] = malloc (len);
   yuv0[0] = malloc (len);
   yuv1[0] = malloc (len);
   yuv[1] = malloc (lensr2);
   yuv0[1] = malloc (lensr2);
   yuv1[1] = malloc (lensr2);
   yuv[2] = malloc (lensr2); 
   yuv0[2] = malloc (lensr2); 
   yuv1[2] = malloc (lensr2);

   y4m_write_stream_header (out_fd, &streaminfo);

   frame = param_skipframes;
   param_duration--;
   opacity_range = param_opacity1 - param_opacity0;
   while (1) {

      if (!r) {
        r = param_framerep;

      i = y4m_read_frame(in_fd, &streaminfo, &frameinfo, yuv0);
      if (i != Y4M_OK)
          exit (frame < numframes);

      j = y4m_read_frame(in_fd, &streaminfo, &frameinfo, yuv1);
      if (j != Y4M_OK)
          exit (frame < numframes);
      }
      r--;

      opacity = param_opacity0 + ((frame * opacity_range) / param_duration);

      blend (yuv0, yuv1, opacity, len, yuv);
      y4m_write_frame (out_fd, &streaminfo, &frameinfo, yuv);
      if (++frame == numframes)
         exit (0);
   }

}