void DTSStream::Init ( const int _stream_num)

{
    stream_num = _stream_num;
	MuxStream::Init( PRIVATE_STR_1, 
					 1,  // Buffer scale
					 default_buffer_size,
					 false,
					 muxinto.buffers_in_audio,
					 muxinto.always_buffers_in_audio
		);
    mjpeg_info ("Scanning for header info: dts Audio stream %02x (%s)",
                stream_num,
                bs.StreamName()
                );

	AU_start = bs.bitcount();
    if (bs.GetBits(32)==DTS_SYNCWORD)
    {
		num_syncword++;
        bs.GetBits(6);         // additional sync
        bs.GetBits(1);         // CRC
        bs.GetBits(7);         // pcm samples
        framesize = bs.GetBits(14) + 1;        // frame size

        bs.GetBits(6);         // audio channels
        frequency = bs.GetBits(4);  // sample rate code
        bit_rate = dts_bitrate_index[bs.GetBits(5)];
        bs.GetBits(5);              // misc.

        header_skip = 10;        // Initially skipped past 10 bytes of header 

		num_frames++;
        access_unit.start = AU_start;
		access_unit.length = framesize;
        mjpeg_info( "dts frame size = %d", framesize );
		samples_per_second = dts_frequency[frequency];

		/* Presentation time-stamping  */
		access_unit.PTS = static_cast<clockticks>(decoding_order) * 
			static_cast<clockticks>(DTS_PACKET_SAMPLES) * 
			static_cast<clockticks>(CLOCKS)	/ samples_per_second;
		access_unit.DTS = access_unit.PTS;
		access_unit.dorder = decoding_order;
		++decoding_order;
		aunits.Append( access_unit );

    } else
    {
		mjpeg_error ( "Invalid dts Audio stream header.");
		exit (1);
    }
	OutputHdrInfo();
}
Exemple #2
0
static void input(int type, char *message)
{
    switch (type)
    {
    case LAVPLAY_MSG_ERROR:
        mjpeg_error("%s", message);
        break;
    case LAVPLAY_MSG_WARNING:
        mjpeg_warn("%s", message);
        break;
    case LAVPLAY_MSG_INFO:
        mjpeg_info("%s", message);
        break;
    case LAVPLAY_MSG_DEBUG:
        mjpeg_debug("%s", message);
        break;
    }
}
Exemple #3
0
static
void parse_args(cl_info_t *cl, int argc, char **argv)
{
  int c;

  cl->offset = 0;
  cl->framecount = 0;
  cl->aspect = y4m_sar_SQUARE;
  cl->interlace = Y4M_ILACE_NONE;
  cl->framerate = y4m_fps_NTSC;
  cl->interleave = 0;
  cl->repeatlast = 0;
  cl->ss_mode = DEFAULT_CHROMA_MODE;
  cl->verbosity = 1;
  cl->bgr = 0;
  cl->fdin = fileno(stdin); /* default to stdin */

  while ((c = getopt(argc, argv, "BA:F:I:Lo:n:rS:v:h")) != -1) {
    switch (c) {
    case 'A':
      if (y4m_parse_ratio(&(cl->aspect), optarg) != Y4M_OK) {
	mjpeg_error("Could not parse ratio:  '%s'", optarg);
	goto ERROR_EXIT;
      }
      break;
    case 'F':
      if (y4m_parse_ratio(&(cl->framerate), optarg) != Y4M_OK) {
	mjpeg_error("Could not parse ratio:  '%s'", optarg);
	goto ERROR_EXIT;
      }
      break;
    case 'I':
      switch (optarg[0]) {
      case 'p':  cl->interlace = Y4M_ILACE_NONE;  break;
      case 't':  cl->interlace = Y4M_ILACE_TOP_FIRST;  break;
      case 'b':  cl->interlace = Y4M_ILACE_BOTTOM_FIRST;  break;
      default:
	mjpeg_error("Unknown value for interlace: '%c'", optarg[0]);
	goto ERROR_EXIT;
	break;
      }
      break;
    case 'L':
      cl->interleave = 1;
      break;
    case 'B':
      cl->bgr = 1;
      break;
    case 'o':
      if ((cl->offset = atoi(optarg)) < 0)
	mjpeg_error_exit1("Offset must be >= 0:  '%s'", optarg);
      break;
    case 'n':
      if ((cl->framecount = atoi(optarg)) < 0)
	mjpeg_error_exit1("Frame count must be >= 0:  '%s'", optarg);
      break;
    case 'r':
      cl->repeatlast = 1;
      break;
    case 'S':
      cl->ss_mode = y4m_chroma_parse_keyword(optarg);
      if (cl->ss_mode == Y4M_UNKNOWN) {
	mjpeg_error("Unknown subsampling mode option:  %s", optarg);
	goto ERROR_EXIT;
      } else if (!chroma_sub_implemented(cl->ss_mode)) {
	mjpeg_error("Unsupported subsampling mode option:  %s", optarg);
	goto ERROR_EXIT;
      }
      break;
    case 'v':
      cl->verbosity = atoi(optarg);
      if ((cl->verbosity < 0) || (cl->verbosity > 2))
	mjpeg_error("Verbosity must be 0, 1, or 2:  '%s'", optarg);
      break;
    case 'h':
      usage(argv[0]);
      exit(0);
      break;
    case '?':
    default:
      goto ERROR_EXIT;
      break;
    }
  }
  /* optional remaining argument is a filename */
  if (optind == (argc - 1)) {
    if ((cl->fdin = open(argv[optind], O_RDONLY | O_BINARY)) == -1)
      mjpeg_error_exit1("Failed to open '%s':  %s",
			argv[optind], strerror(errno));
  } else if (optind != argc) 
    goto ERROR_EXIT;


  mjpeg_default_handler_verbosity(cl->verbosity);

  mjpeg_info("Command-line Parameters:");
  mjpeg_info("             framerate:  %d:%d",
	     cl->framerate.n, cl->framerate.d);
  mjpeg_info("    pixel aspect ratio:  %d:%d",
	     cl->aspect.n, cl->aspect.d);
  mjpeg_info("         pixel packing:  %s",
	     cl->bgr?"BGR":"RGB");
  mjpeg_info("             interlace:  %s%s",
	     mpeg_interlace_code_definition(cl->interlace),
	     (cl->interlace == Y4M_ILACE_NONE) ? "" :
	     (cl->interleave) ? " (interleaved PPM input)" :
	     " (field-sequential PPM input)");
  mjpeg_info("        starting frame:  %d", cl->offset);
  if (cl->framecount == 0)
    mjpeg_info("           # of frames:  all%s",
	       (cl->repeatlast) ? ", repeat last frame forever" :
	       ", until input exhausted");
  else
    mjpeg_info("           # of frames:  %d%s",
	       cl->framecount,
	       (cl->repeatlast) ? ", repeat last frame until done" :
	       ", or until input exhausted");
  mjpeg_info("    chroma subsampling:  %s",
	     y4m_chroma_description(cl->ss_mode));

  /* DONE! */
  return;

 ERROR_EXIT:
  mjpeg_error("For usage hints, use option '-h'.  Please take a hint.");
  exit(1);

}
Exemple #4
0
static int set_option(const char *name, char *value)
{
    /* return 1 means error, return 0 means okay */
    int nerr = 0;
    if (strcmp(name, "verbose")==0 || strcmp(name, "v")==0)
    {
        verbose = atoi(optarg);
    }
    else if (strcmp(name, "audio")==0 || strcmp(name, "a")==0)
    {
        info->audio = atoi(optarg);
    }
    else if (strcmp(name, "use-write")==0 || strcmp(name, "U")==0)
    {
        info->use_write = 1;
    }
    else if (strcmp(name, "H-offset")==0 || strcmp(name, "H")==0)
    {
        info->horizontal_offset = atoi(optarg);
    }
    else if (strcmp(name, "V-offset")==0 || strcmp(name, "V")==0)
    {
        info->vertical_offset = atoi(optarg);
    }
    else if (strcmp(name, "skip")==0 || strcmp(name, "s")==0)
    {
        skip_seconds = atoi(optarg);
        if (skip_seconds<0) skip_seconds = 0;
    }
    else if (strcmp(name, "synchronization")==0 || strcmp(name, "c")==0)
    {
        info->sync_correction = atoi(optarg);
    }
    else if (strcmp(name, "mjpeg-buffers")==0 || strcmp(name, "n")==0)
    {
        info->MJPG_numbufs = atoi(optarg);
        if (info->MJPG_numbufs<4) info->MJPG_numbufs = 4;
        if (info->MJPG_numbufs>256) info->MJPG_numbufs = 256;
    }
    else if (strcmp(name, "no-quit")==0 || strcmp(name, "q")==0)
    {
        info->continuous = 1;
    }
    else if (strcmp(name, "exchange-fields")==0 || strcmp(name, "x")==0)
    {
        info->exchange_fields = 1;
    }
    else if (strcmp(name, "zoom")==0 || strcmp(name, "z")==0)
    {
        info->zoom_to_fit = 1;
    }
    else if (strcmp(name, "gui-mode")==0 || strcmp(name, "g")==0)
    {
        gui_mode = 1;
    }
    else if (strcmp(name, "full-screen")==0 || strcmp(name, "Z")==0)
    {
        info->soft_full_screen = 1;
    }
    else if( strcmp( name, "preserve-pathnames") == 0 || strcmp(name, "P")==0)
    {
        info->preserve_pathnames = 1;
    }
    else if (strcmp(name, "playback")==0 || strcmp(name, "p")==0)
    {
        switch (value[0])
        {
#ifdef HAVE_V4L
        case 'H':
        case 'C':
#endif
#ifdef HAVE_SDL
        case 'S':
#endif
            info->playback_mode = value[0];
            break;
        default:
            mjpeg_error("Unknown playback mode: \'%c\'", value[0]);
            nerr++;
            break;
        }
    }
    else if (strcmp(name, "size")==0 || strcmp(name, "S")==0)
    {
        if (sscanf(value, "%dx%d", &info->sdl_width, &info->sdl_height)!=2)
        {
            mjpeg_error( "--size parameter requires NxN argument");
            nerr++;
        }
    }
    else if (strcmp(name, "flicker")==0 || strcmp(name, "F")==0)
    {
        info->flicker_reduction = 0;
    }
    else if (strcmp(name, "display")==0)
    {
        info->display = optarg;
    }
    else if (strcmp(name, "s-x-offset")==0)
    {
        info->vw_x_offset = atoi(optarg);
    }
    else if (strcmp(name, "s-y-offset")==0)
    {
        info->vw_y_offset = atoi(optarg);
    }
    else nerr++; /* unknown option - error */

    return nerr;
}
Exemple #5
0
int
main(int argc, char **argv)
	{
	int	fdin, fdout, err, c, i, verbose = 1;
	y4m_stream_info_t istream, ostream;
	y4m_frame_info_t iframe;

	fdin = fileno(stdin);
	fdout = fileno(stdout);

	y4m_accept_extensions(1);
	y4m_init_stream_info(&istream);
	y4m_init_frame_info(&iframe);

	while	((c = getopt(argc, argv, "L:C:hv:N")) != EOF)
		{
		switch	(c)
			{
			case	'N':
				lowuv = lowy = 0;
				lowuv = highy = 255;
				break;
			case	'L':
				i = sscanf(optarg, "%lf,%lf,%d", &y_radius, 
						&y_amount, &y_threshold);
				if	(i != 3)
					{
					mjpeg_error("-L r,a,t");
					usage(argv[0]);
					}
				break;
			case	'C':
				i = sscanf(optarg, "%lf,%lf,%d", &uv_radius,
						&uv_amount, &uv_threshold);
				if	(i != 3)
					{
					mjpeg_error("-C r,a,t");
					usage(argv[0]);
					}
				break;
			case	'v':
				verbose = atoi(optarg);
				if	(verbose < 0 || verbose > 2)
					mjpeg_error_exit1("-v 0|1|2");
				break;
			case	'h':
			default:
				usage(argv[0]);
				break;
			}
		}

	if	(isatty(fdout))
		mjpeg_error_exit1("stdout must not be a terminal");

	mjpeg_default_handler_verbosity(verbose);

	err = y4m_read_stream_header(fdin, &istream);
	if	(err != Y4M_OK)
		mjpeg_error_exit1("Couldn't read input stream header");

	switch	(y4m_si_get_interlace(&istream))
		{
		case	Y4M_ILACE_NONE:
			interlaced = 0;
			break;
		case	Y4M_ILACE_BOTTOM_FIRST:
		case	Y4M_ILACE_TOP_FIRST:
			interlaced = 1;
			break;
		default:
			mjpeg_error_exit1("Unsupported/unknown interlacing");
		}

	if	(y4m_si_get_plane_count(&istream) != 3)
		mjpeg_error_exit1("Only 3 plane formats supported");

	yheight = y4m_si_get_plane_height(&istream, 0);
	uvheight = y4m_si_get_plane_height(&istream, 1);
	ywidth = y4m_si_get_plane_width(&istream, 0);
	uvwidth = y4m_si_get_plane_width(&istream, 1);
	ylen = y4m_si_get_plane_length(&istream, 0);
	uvlen = y4m_si_get_plane_length(&istream, 1);

/* Input and output frame buffers */
	i_yuv[0] = (u_char *)malloc(ylen);
	i_yuv[1] = (u_char *)malloc(uvlen);
	i_yuv[2] = (u_char *)malloc(uvlen);
	o_yuv[0] = (u_char *)malloc(ylen);
	o_yuv[1] = (u_char *)malloc(uvlen);
	o_yuv[2] = (u_char *)malloc(uvlen);

/*
 * general purpose row/column scratch buffers.  Slightly over allocated to
 * simplify life.
*/
	cur_col = (u_char *)malloc(MAX(ywidth, yheight));
	dest_col = (u_char *)malloc(MAX(ywidth, yheight));
	cur_row = (u_char *)malloc(MAX(ywidth, yheight));
	dest_row = (u_char *)malloc(MAX(ywidth, yheight));

/*
 * Generate the convolution matrices.  The generation routine allocates the
 * memory and returns the length.
*/
	cmatrix_y_len = gen_convolve_matrix(y_radius, &cmatrix_y);
	cmatrix_uv_len = gen_convolve_matrix(uv_radius, &cmatrix_uv);
	ctable_y = gen_lookup_table(cmatrix_y, cmatrix_y_len);
	ctable_uv = gen_lookup_table(cmatrix_uv, cmatrix_uv_len);

	y4m_init_stream_info(&ostream);
	y4m_copy_stream_info(&ostream, &istream);
	y4m_write_stream_header(fileno(stdout), &ostream);

	mjpeg_info("Luma radius: %f", y_radius);
	mjpeg_info("Luma amount: %f", y_amount);
	mjpeg_info("Luma threshold: %d", y_threshold);
	if	(uv_radius != -1.0)
		{
		mjpeg_info("Chroma radius: %f", uv_radius);
		mjpeg_info("Chroma amount: %f", uv_amount);
		mjpeg_info("Chroma threshold: %d", uv_threshold);
		}

	for	(frameno = 0; y4m_read_frame(fdin, &istream, &iframe, i_yuv) == Y4M_OK; frameno++)
		{
		y4munsharp();
		err = y4m_write_frame(fdout, &ostream, &iframe, o_yuv);
		if	(err != Y4M_OK)
			{
			mjpeg_error("y4m_write_frame err at frame %d", frameno);
			break;
			}
		}
	y4m_fini_frame_info(&iframe);
	y4m_fini_stream_info(&istream);
	y4m_fini_stream_info(&ostream);
	exit(0);
	}
Exemple #6
0
int main(int argc, char *argv[])
{
    AVFormatContext *pFormatCtx;
	AVInputFormat *avif = NULL;
    int             i, videoStream;
    AVCodecContext  *pCodecCtx;
    AVCodec         *pCodec;
    AVFrame         *pFrame; 
    AVFrame         *pFrame444; 
    AVPacket        packet;
    int             frameFinished;
    int             numBytes;
    uint8_t         *buffer;

	int fdOut = 1 ;
	int yuv_interlacing = Y4M_UNKNOWN;
	int yuv_ss_mode = Y4M_UNKNOWN;
	y4m_ratio_t yuv_frame_rate;
	y4m_ratio_t yuv_aspect;
// need something for chroma subsampling type.
	int write_error_code;
	int header_written = 0;
	int convert = 0;
	int stream = 0;
	enum PixelFormat convert_mode;

        const static char *legal_flags = "chI:F:A:S:o:s:f:";

	int y;
	int                frame_data_size ;
	uint8_t            *yuv_data[3] ;      

	y4m_stream_info_t streaminfo;
        y4m_frame_info_t frameinfo;

        y4m_init_stream_info(&streaminfo);
        y4m_init_frame_info(&frameinfo);

	yuv_frame_rate.d = 0;
	yuv_aspect.d = 0;

    // Register all formats and codecs
    av_register_all();

while ((i = getopt (argc, argv, legal_flags)) != -1) {
    switch (i) {
	    case 'I':
		switch (optarg[0]) {
		      case 'p':  yuv_interlacing = Y4M_ILACE_NONE;  break;
		      case 't':  yuv_interlacing = Y4M_ILACE_TOP_FIRST;  break;
		      case 'b':  yuv_interlacing = Y4M_ILACE_BOTTOM_FIRST;  break;
		      default:
			mjpeg_error("Unknown value for interlace: '%c'", optarg[0]);
			return -1;
			break;
		}

		break;
        case 'F':
          if( Y4M_OK != y4m_parse_ratio(&yuv_frame_rate, optarg) )
              mjpeg_error_exit1 ("Syntax for frame rate should be Numerator:Denominator");

                break;
	case 'A':
          if( Y4M_OK != y4m_parse_ratio(&yuv_aspect, optarg) ) {
			if (!strcmp(optarg,PAL)) {
				y4m_parse_ratio(&yuv_aspect, "128:117");
			} else if (!strcmp(optarg,PAL_WIDE)) {
				y4m_parse_ratio(&yuv_aspect, "640:351");
			} else if (!strcmp(optarg,NTSC)) {
				y4m_parse_ratio(&yuv_aspect, "4320:4739");
			} else if (!strcmp(optarg,NTSC_WIDE)) {
				y4m_parse_ratio(&yuv_aspect, "5760:4739");
			} else {
              mjpeg_error_exit1 ("Syntax for aspect ratio should be Numerator:Denominator");
			}
		}
			break;
	case 'S':
		yuv_ss_mode = y4m_chroma_parse_keyword(optarg);
		if (yuv_ss_mode == Y4M_UNKNOWN) {
			mjpeg_error("Unknown subsampling mode option:  %s", optarg);
			mjpeg_error("Try: 420mpeg2 444 422 411");
			return -1;
		}
		break;
	case 'o':
		fdOut = open (optarg,O_CREAT|O_WRONLY,0644);
		if (fdOut == -1) {
		      mjpeg_error_exit1 ("Cannot open file for writing");
		}
		break;	
	case 'c':
		convert = 1;
		break;
	case 's':
		stream = atoi(optarg);
		break;
	case 'f':
		avif = av_find_input_format	(optarg);
		break;
	case 'h':
	case '?':
          print_usage (argv);
          return 0 ;
          break;
    }
  }

	//fprintf (stderr,"optind: %d\n",optind);
	optind--;
	argc -= optind;
	argv += optind;

	if (argc == 1) {
          print_usage (argv);
          return 0 ;
	}

    // Open video file
    if(av_open_input_file(&pFormatCtx, argv[1], avif, 0, NULL)!=0)
        return -1; // Couldn't open file

    // Retrieve stream information
    if(av_find_stream_info(pFormatCtx)<0)
        return -1; // Couldn't find stream information

    // Dump information about file onto standard error
    dump_format(pFormatCtx, 0, argv[1], 0);

    // Find the first video stream
    videoStream=-1;
    for(i=0; i<pFormatCtx->nb_streams; i++)
        if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO)
        {
	// mark debug
	//fprintf (stderr,"Video Codec ID: %d (%s)\n",pFormatCtx->streams[i]->codec->codec_id ,pFormatCtx->streams[i]->codec->codec_name);
			if (videoStream == -1 && stream == 0) {
			// May still be overridden by the -s option
				videoStream=i;
			}
			if (stream == i) {
				videoStream=i;
				break;
			}
        }
    if(videoStream==-1)
        return -1; // Didn't find a video stream

    // Get a pointer to the codec context for the video stream
    pCodecCtx=pFormatCtx->streams[videoStream]->codec;

    // Find the decoder for the video stream
    pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec==NULL)
        return -1; // Codec not found

    // Open codec
    if(avcodec_open(pCodecCtx, pCodec)<0)
        return -1; // Could not open codec

// Read framerate, aspect ratio and chroma subsampling from Codec
	if (yuv_frame_rate.d == 0) {
		yuv_frame_rate.n = pFormatCtx->streams[videoStream]->r_frame_rate.num;
		yuv_frame_rate.d = pFormatCtx->streams[videoStream]->r_frame_rate.den;
	}
	if (yuv_aspect.d == 0) {
		yuv_aspect.n = pCodecCtx-> sample_aspect_ratio.num;
		yuv_aspect.d = pCodecCtx-> sample_aspect_ratio.den;
	}

// 0:0 is an invalid aspect ratio default to 1:1
	if (yuv_aspect.d == 0 || yuv_aspect.n == 0 ) {
		yuv_aspect.n=1;
		yuv_aspect.d=1;
	}
	if (convert) {
	        if (yuv_ss_mode == Y4M_UNKNOWN) {
			print_usage();
			return 0;	
		} else {
			y4m_accept_extensions(1);
			switch (yuv_ss_mode) {
			case Y4M_CHROMA_420MPEG2: convert_mode = PIX_FMT_YUV420P; break;
			case Y4M_CHROMA_422: convert_mode = PIX_FMT_YUV422P; break;
			case Y4M_CHROMA_444: convert_mode = PIX_FMT_YUV444P; break;
			case Y4M_CHROMA_411: convert_mode = PIX_FMT_YUV411P; break;
			case Y4M_CHROMA_420JPEG: convert_mode = PIX_FMT_YUVJ420P; break;
			default:
				mjpeg_error_exit1("Cannot convert to this chroma mode");
				break;

			}
		}
	} else if (yuv_ss_mode == Y4M_UNKNOWN) {
		switch (pCodecCtx->pix_fmt) {
		case PIX_FMT_YUV420P: yuv_ss_mode=Y4M_CHROMA_420MPEG2; break;
		case PIX_FMT_YUV422P: yuv_ss_mode=Y4M_CHROMA_422; break;
		case PIX_FMT_YUV444P: yuv_ss_mode=Y4M_CHROMA_444; break;
		case PIX_FMT_YUV411P: yuv_ss_mode=Y4M_CHROMA_411; break;
		case PIX_FMT_YUVJ420P: yuv_ss_mode=Y4M_CHROMA_420JPEG; break;
		default:
			yuv_ss_mode=Y4M_CHROMA_444; 
			convert_mode = PIX_FMT_YUV444P;
		// is there a warning function
			mjpeg_error("Unsupported Chroma mode. Upsampling to YUV444\n");
		// enable advanced yuv stream
			y4m_accept_extensions(1);
			convert = 1;
			break;
		}
	}


    // Allocate video frame
    pFrame=avcodec_alloc_frame();

    // Output YUV format details
// is there some mjpeg_info functions?
	fprintf (stderr,"YUV Aspect Ratio: %d:%d\n",yuv_aspect.n,yuv_aspect.d);
	fprintf (stderr,"YUV frame rate: %d:%d\n",yuv_frame_rate.n,yuv_frame_rate.d);
	fprintf (stderr,"YUV Chroma Subsampling: %d\n",yuv_ss_mode);
	
    // Set the YUV stream details
    // Interlace is handled when the first frame is read.
	y4m_si_set_sampleaspect(&streaminfo, yuv_aspect);
	y4m_si_set_framerate(&streaminfo, yuv_frame_rate);
	y4m_si_set_chroma(&streaminfo, yuv_ss_mode);

	// Loop until nothing read
    while(av_read_frame(pFormatCtx, &packet)>=0)
    {
        // Is this a packet from the video stream?
        if(packet.stream_index==videoStream)
        {
            // Decode video frame
            avcodec_decode_video(pCodecCtx, pFrame, &frameFinished, 
                packet.data, packet.size);

            // Did we get a video frame?
            if(frameFinished)
            {
                // Save the frame to disk

	// As we don't know interlacing until the first frame
	// we wait until the first frame is read before setting the interlace flag
	// and outputting the YUV header
	// It also appears that some codecs don't set width or height until the first frame either
		if (!header_written) {
			if (yuv_interlacing == Y4M_UNKNOWN) {
				if (pFrame->interlaced_frame) {
					if (pFrame->top_field_first) {
						yuv_interlacing = Y4M_ILACE_TOP_FIRST;
					} else {
						yuv_interlacing = Y4M_ILACE_BOTTOM_FIRST;
					}
				} else {
					yuv_interlacing = Y4M_ILACE_NONE;
				}
			}
			if (convert) {
				// initialise conversion to different chroma subsampling
				pFrame444=avcodec_alloc_frame();
				numBytes=avpicture_get_size(convert_mode, pCodecCtx->width, pCodecCtx->height);
				buffer=(uint8_t *)malloc(numBytes);
				avpicture_fill((AVPicture *)pFrame444, buffer, convert_mode, pCodecCtx->width, pCodecCtx->height);
			}

			y4m_si_set_interlace(&streaminfo, yuv_interlacing);
			y4m_si_set_width(&streaminfo, pCodecCtx->width);
			y4m_si_set_height(&streaminfo, pCodecCtx->height);


			chromalloc(yuv_data,&streaminfo);

			fprintf (stderr,"YUV interlace: %d\n",yuv_interlacing);
			fprintf (stderr,"YUV Output Resolution: %dx%d\n",pCodecCtx->width, pCodecCtx->height);

			if ((write_error_code = y4m_write_stream_header(fdOut, &streaminfo)) != Y4M_OK)
			{
				mjpeg_error("Write header failed: %s", y4m_strerr(write_error_code));
			} 
			header_written = 1;
		}

		if (convert) {
			// convert to 444
            
            
            
/*
+#ifdef HAVE_LIBSWSCALE
+			struct SwsContext* img_convert_ctx =
+				sws_getContext(context->width, context->height, PIX_FMT_RGB24,
+					context->width, context->height, context->pix_fmt,
+					SWS_BICUBIC, NULL, NULL, NULL);
+
+			sws_scale(img_convert_ctx, pict->data, pict->linesize, 
+				0, context->height, encodable->data,
+				encodable->linesize);
+
+			sws_freeContext (img_convert_ctx);
+#else
 			img_convert((AVPicture *)encodable, context->pix_fmt, (AVPicture *)pict, PIX_FMT_RGB24, context->width, context->height);
-
+				(AVPicture *)pict, PIX_FMT_RGB24,
+				context->width, context->height);
+#endif			
*/


			struct SwsContext* img_convert_ctx =
				sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
					pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
					SWS_BICUBIC, NULL, NULL, NULL);

			sws_scale(img_convert_ctx, pFrame->data, pFrame->linesize, 
				0, pCodecCtx->height, pFrame444->data,
				pFrame444->linesize);

			sws_freeContext (img_convert_ctx);


            
			//img_convert((AVPicture *)pFrame444, convert_mode, (AVPicture*)pFrame, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
			chromacpy(yuv_data,pFrame444,&streaminfo);
		} else {
			chromacpy(yuv_data,pFrame,&streaminfo);
		}
		write_error_code = y4m_write_frame( fdOut, &streaminfo, &frameinfo, yuv_data);
            }
        }

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&packet);
    }

	y4m_fini_stream_info(&streaminfo);
	y4m_fini_frame_info(&frameinfo);

	free(yuv_data[0]);
	free(yuv_data[1]);
	free(yuv_data[2]);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codec
    avcodec_close(pCodecCtx);

    // Close the video file
    av_close_input_file(pFormatCtx);

    return 0;
}
Exemple #7
0
void MultiplexJob::SetupInputStreams( std::vector< IBitStream *> &inputs )
{
    IBitStream *bs;
    IBitStreamUndo undo;
    unsigned int i;
    bool bad_file = false;

    for( i = 0; i < inputs.size(); ++i )
    {
        bs = inputs[i];
        // Remember the streams initial state...
        bs->PrepareUndo( undo );
        if( LPCMStream::Probe( *bs ) )
        {
            mjpeg_info ("File %s looks like an LPCM Audio stream.",
                        bs->StreamName());
            bs->UndoChanges( undo );
            streams.push_back( new JobStream( bs,  LPCM_AUDIO) );
            ++audio_tracks;
            ++lpcm_tracks;
            continue;
        }

        bs->UndoChanges( undo );
        if( MPAStream::Probe( *bs ) )
        {
            mjpeg_info ("File %s looks like an MPEG Audio stream.",
                        bs->StreamName() );
            bs->UndoChanges( undo );
            streams.push_back( new JobStream( bs, MPEG_AUDIO) );
            ++audio_tracks;
            continue;
        }

        bs->UndoChanges( undo );
        if( AC3Stream::Probe( *bs ) )
        {
            mjpeg_info ("File %s looks like an AC3 Audio stream.",
                        bs->StreamName());
            bs->UndoChanges( undo );
            streams.push_back( new JobStream( bs, AC3_AUDIO) );
            ++audio_tracks;
            continue;
        }

        bs->UndoChanges( undo );
        if( DTSStream::Probe( *bs ) )
        {
            mjpeg_info ("File %s looks like a dts Audio stream.",
                        bs->StreamName());
            bs->UndoChanges( undo);
            streams.push_back( new JobStream( bs, DTS_AUDIO) );
            ++audio_tracks;
            continue;
        }

        bs->UndoChanges( undo );
        if( VideoStream::Probe( *bs ) )
        {
            mjpeg_info ("File %s looks like an MPEG Video stream.",
                        bs->StreamName());
            bs->UndoChanges( undo );
            streams.push_back( new JobStream( bs, MPEG_VIDEO) );
            ++video_tracks;
            continue;
        }

        bs->UndoChanges( undo );

        if( SUBPStream::Probe( *bs ) )
        {
            mjpeg_info ("File %s looks like an Subpicture stream.",
                        bs->StreamName());
            bs->UndoChanges( undo );
            streams.push_back( new JobStream( bs, SUBP_STREAM) );
            ++subtitle_tracks;
            continue;
        }


#ifdef ZALPHA
        if( ZAlphaStream::Probe( *bs ) )
        {
            mjpeg_info ("File %s looks like an Z/Alpha Video stream.",
                        bs->StreamName());
            bs->UndoChanges( undo );
            streams.push_back( new JobStream( bs, Z_ALPHA) );
            ++video_tracks;
            ++z_alpha_tracks;
            continue;
        }
#endif
        bad_file = true;
        mjpeg_error ("File %s unrecogniseable!", bs->StreamName());
        delete bs;
    }

    if( bad_file )
    {
        mjpeg_error_exit1( "Unrecogniseable file(s)... exiting.");
    }

    //
    // Where no parameters for streams have been specified
    // simply set the default values (these will depend on the format
    // we're muxing of course...)
    //

    for( i = video_param.size(); i < video_tracks; ++i )
    {
        video_param.push_back(VideoParams::Default( mux_format ));
    }
    for( i = lpcm_param.size(); i < lpcm_tracks; ++i )
    {
        lpcm_param.push_back(LpcmParams::Default(mux_format));
    }
    for( i = subtitle_params.size(); i < subtitle_tracks; ++i )
    {
        subtitle_params.push_back(SubtitleStreamParams::Default(mux_format));
    }


    //
    // Set standard values if the selected profile implies this...
    //
    for( i = 0; i <video_tracks; ++i )
    {
        if( video_param[i]->Force(mux_format) )
        {
            mjpeg_info( "Video stream %d: profile %d selected - ignoring non-standard options!", i, mux_format );
        }
    }

    mjpeg_info( "Found %d audio streams, %d video streams and %d subtitle streams",
                audio_tracks,
                video_tracks,
                subtitle_tracks
              );

}
Exemple #8
0
int main(int argc, char *argv[])
{
   int verbosity = 1;
   double time_between_frames = 0.0;
   double frame_rate = 0.0;
   struct timeval time_now;
   int n, frame;
   unsigned char *yuv[3];
   int in_fd = 0;
   int screenwidth=0, screenheight=0;
   y4m_stream_info_t streaminfo;
   y4m_frame_info_t frameinfo;
   int frame_width;
   int frame_height;
   int wait_for_sync = 1;
   char *window_title = NULL;

   while ((n = getopt(argc, argv, "hs:t:f:cv:")) != EOF) {
      switch (n) {
         case 'c':
            wait_for_sync = 0;
            break;
         case 's':
            if (sscanf(optarg, "%dx%d", &screenwidth, &screenheight) != 2) {
               mjpeg_error_exit1( "-s option needs two arguments: -s 10x10");
               exit(1);
            }
            break;
	  case 't':
	    window_title = optarg;
	    break;
	  case 'f':
		  frame_rate = atof(optarg);
		  if( frame_rate <= 0.0 || frame_rate > 200.0 )
			  mjpeg_error_exit1( "-f option needs argument > 0.0 and < 200.0");
		  break;
          case 'v':
	    verbosity = atoi(optarg);
	    if ((verbosity < 0) || (verbosity > 2))
	      mjpeg_error_exit1("-v needs argument from {0, 1, 2} (not %d)",
				verbosity);
	    break;
	  case 'h':
	  case '?':
            usage();
            exit(1);
            break;
         default:
            usage();
            exit(1);
      }
   }

   mjpeg_default_handler_verbosity(verbosity);

   y4m_accept_extensions(1);
   y4m_init_stream_info(&streaminfo);
   y4m_init_frame_info(&frameinfo);
   if ((n = y4m_read_stream_header(in_fd, &streaminfo)) != Y4M_OK) {
      mjpeg_error("Couldn't read YUV4MPEG2 header: %s!",
         y4m_strerr(n));
      exit (1);
   }

   switch (y4m_si_get_chroma(&streaminfo)) {
   case Y4M_CHROMA_420JPEG:
   case Y4M_CHROMA_420MPEG2:
   case Y4M_CHROMA_420PALDV:
     break;
   default:
     mjpeg_error_exit1("Cannot handle non-4:2:0 streams yet!");
   }

   frame_width = y4m_si_get_width(&streaminfo);
   frame_height = y4m_si_get_height(&streaminfo);

   if ((screenwidth <= 0) || (screenheight <= 0)) {
     /* no user supplied screen size, so let's use the stream info */
     y4m_ratio_t aspect = y4m_si_get_sampleaspect(&streaminfo);
       
     if (!(Y4M_RATIO_EQL(aspect, y4m_sar_UNKNOWN))) {
       /* if pixel aspect ratio present, use it */
#if 1
       /* scale width, but maintain height (line count) */
       screenheight = frame_height;
       screenwidth = frame_width * aspect.n / aspect.d;
#else
       if ((frame_width * aspect.d) < (frame_height * aspect.n)) {
	 screenwidth = frame_width;
	 screenheight = frame_width * aspect.d / aspect.n;
       } else {
	 screenheight = frame_height;
	 screenwidth = frame_height * aspect.n / aspect.d;
       }
#endif
     } else {
       /* unknown aspect ratio -- assume square pixels */
       screenwidth = frame_width;
       screenheight = frame_height;
     }
   }

   /* Initialize the SDL library */
   if( SDL_Init(SDL_INIT_VIDEO) < 0 ) {
      mjpeg_error("Couldn't initialize SDL: %s", SDL_GetError());
      exit(1);
   }

   /* set window title */
   SDL_WM_SetCaption(window_title, NULL);

   /* yuv params */
   yuv[0] = malloc(frame_width * frame_height * sizeof(unsigned char));
   yuv[1] = malloc(frame_width * frame_height / 4 * sizeof(unsigned char));
   yuv[2] = malloc(frame_width * frame_height / 4 * sizeof(unsigned char));

   screen = SDL_SetVideoMode(screenwidth, screenheight, 0, SDL_SWSURFACE);
   if ( screen == NULL ) {
      mjpeg_error("SDL: Couldn't set %dx%d: %s",
		  screenwidth, screenheight, SDL_GetError());
      exit(1);
   }
   else {
      mjpeg_debug("SDL: Set %dx%d @ %d bpp",
		  screenwidth, screenheight, screen->format->BitsPerPixel);
   }

   /* since IYUV ordering is not supported by Xv accel on maddog's system
    *  (Matrox G400 --- although, the alias I420 is, but this is not
    *  recognized by SDL), we use YV12 instead, which is identical,
    *  except for ordering of Cb and Cr planes...
    * we swap those when we copy the data to the display buffer...
    */
   yuv_overlay = SDL_CreateYUVOverlay(frame_width, frame_height,
				      SDL_YV12_OVERLAY,
				      screen);
   if ( yuv_overlay == NULL ) {
      mjpeg_error("SDL: Couldn't create SDL_yuv_overlay: %s",
		      SDL_GetError());
      exit(1);
   }
   if ( yuv_overlay->hw_overlay ) 
     mjpeg_debug("SDL: Using hardware overlay.");

   rect.x = 0;
   rect.y = 0;
   rect.w = screenwidth;
   rect.h = screenheight;

   SDL_DisplayYUVOverlay(yuv_overlay, &rect);

   signal (SIGINT, sigint_handler);

   frame = 0;
   if ( frame_rate == 0.0 ) 
   {
	   /* frame rate has not been set from command-line... */
	   if (Y4M_RATIO_EQL(y4m_fps_UNKNOWN, y4m_si_get_framerate(&streaminfo))) {
	     mjpeg_info("Frame-rate undefined in stream... assuming 25Hz!" );
	     frame_rate = 25.0;
	   } else {
	     frame_rate = Y4M_RATIO_DBL(y4m_si_get_framerate(&streaminfo));
	   }
   }
   time_between_frames = 1.e6 / frame_rate;

   gettimeofday(&time_now,0);

   while ((n = y4m_read_frame(in_fd, &streaminfo, &frameinfo, yuv)) == Y4M_OK && (!got_sigint)) {

      /* Lock SDL_yuv_overlay */
      if ( SDL_MUSTLOCK(screen) ) {
         if ( SDL_LockSurface(screen) < 0 ) break;
      }
      if (SDL_LockYUVOverlay(yuv_overlay) < 0) break;

      /* let's draw the data (*yuv[3]) on a SDL screen (*screen) */
      memcpy(yuv_overlay->pixels[0], yuv[0], frame_width * frame_height);
      memcpy(yuv_overlay->pixels[1], yuv[2], frame_width * frame_height / 4);
      memcpy(yuv_overlay->pixels[2], yuv[1], frame_width * frame_height / 4);

      /* Unlock SDL_yuv_overlay */
      if ( SDL_MUSTLOCK(screen) ) {
         SDL_UnlockSurface(screen);
      }
      SDL_UnlockYUVOverlay(yuv_overlay);

      /* Show, baby, show! */
      SDL_DisplayYUVOverlay(yuv_overlay, &rect);
      mjpeg_info("Playing frame %4.4d - %s",
		 frame, print_status(frame, frame_rate));

      if (wait_for_sync)
         while(get_time_diff(time_now) < time_between_frames) {
            usleep(1000);
         }
      frame++;

      gettimeofday(&time_now,0);
   }

   if ((n != Y4M_OK) && (n != Y4M_ERR_EOF))
      mjpeg_error("Couldn't read frame: %s", y4m_strerr(n));

   for (n=0; n<3; n++) {
      free(yuv[n]);
   }

   mjpeg_info("Played %4.4d frames (%s)",
	      frame, print_status(frame, frame_rate));

   SDL_FreeYUVOverlay(yuv_overlay);
   SDL_Quit();

   y4m_fini_frame_info(&frameinfo);
   y4m_fini_stream_info(&streaminfo);
   return 0;
}
Exemple #9
0
void ysSource::check_parameters()
{
  int cause_to_exit = 0;

  /* init interlacing */
  if (_stream.interlace() == Y4M_UNKNOWN) {
    mjpeg_error("Source interlacing is unknown!");
    cause_to_exit = 1;
  }
  /* init/constrain SAR */
  if (!_stream.sar().is_known()) {
    mjpeg_error("Source sample aspect ratio unknown!");
    cause_to_exit = 1;
  }

  /* init/constrain chroma subsampling */
  if (!_stream.subsampling().is_known()) {
    mjpeg_error("Source chroma subsampling is unknown!");
    cause_to_exit = 1;
  }

  /* init/clip matte region */
  /* default is entire source frame --- so convolution can extend beyond
     the active region */
  if (!_matte_region.is_known()) {
    if (_matte_region.offset().is_known()) {
      mjpeg_info("Source matte region defaulting to source frame size.");
      _matte_region.dim(_stream.dim());
    } else {
      mjpeg_info("Source matte region defaulting to full source frame.");
      _matte_region = ysRegion(_stream.dim());
      _matte_region.origin_mode(ANC_TL);
    }
  }
  _matte_region.fixate(_stream.dim());

  /* check alignment */
  /* frame size and matte region must conform to alignment */
  {
    int xal = _stream.x_alignment();
    int yal = _stream.y_alignment();

    if (_stream.x_size() % xal) {
      mjpeg_error("Source x size (%d) is not multiple of %d!",
		  _stream.x_size(), xal);
      cause_to_exit = 1;
    }
    if (_stream.y_size() % yal) {
      mjpeg_error("Source y size (%d) is not multiple of %d!",
		  _stream.y_size(), yal);
      cause_to_exit = 1;
    }

    if (_matte_region.dim().x() % xal) {
      mjpeg_error("Source matte region x size (%d) is not multiple of %d!",
		  _matte_region.dim().x(), xal);
      cause_to_exit = 1;
    }
    if (_matte_region.offset().x() % xal) {
      mjpeg_error("Source matte region x offset (%d) is not multiple of %d!",
		  _matte_region.offset().x(), xal);
      cause_to_exit = 1;
    }
    if (_matte_region.dim().y() % yal) {
      mjpeg_error("Source matte region y size (%d) is not multiple of %d!",
		  _matte_region.dim().y(), yal);
      cause_to_exit = 1;
    }
    if (_matte_region.offset().y() % yal) {
      mjpeg_error("Source matte region y offset (%d) is not multiple of %d!",
		  _matte_region.offset().y(), yal);
      cause_to_exit = 1;
    }
  }

  if (cause_to_exit) 
    exit(1);

  /* init/clip active region */
  if (!_active_region.is_known()) {
    if (_active_region.offset().is_known()) {
      mjpeg_info("Source active region defaulting to source frame size.");
      _active_region.dim(_stream.dim());
    } else {
      mjpeg_info("Source active region defaulting to full source frame.");
      _active_region = ysRegion(_stream.dim());
      _active_region.origin_mode(ANC_TL);
    }
  }
  _active_region.fixate(_stream.dim());


#if 0  /* do clipping later, after ratios are established */
  if (_active_region.clip(ysRatioPoint(_stream.dim()))) {
    mjpeg_warn("Source active region clipped by frame size.");
  }
#endif

}
int
main(int argc, char **argv)
	{
	int	sts, c, width = 640, height = 480, noheader = 0;
	int	Y = 16, U = 128, V = 128, chroma_mode = Y4M_CHROMA_420MPEG2;
	int	numframes = 1, force = 0;
	y4m_ratio_t	rate_ratio = y4m_fps_NTSC;
	y4m_ratio_t	aspect_ratio = y4m_sar_SQUARE;
	int	plane_length[3];
	u_char	*yuv[3];
	y4m_stream_info_t ostream;
	y4m_frame_info_t oframe;
	char	interlace = Y4M_ILACE_NONE;

	opterr = 0;
	y4m_accept_extensions(1);

	while	((c = getopt(argc, argv, "Hfx:w:h:r:i:a:Y:U:V:n:")) != EOF)
		{
		switch	(c)
			{
			case	'H':
				noheader = 1;
				break;
			case	'a':
				sts = y4m_parse_ratio(&aspect_ratio, optarg);
				if	(sts != Y4M_OK)
					mjpeg_error_exit1("Invalid aspect: %s",
						optarg);
				break;
			case	'w':
				width = atoi(optarg);
				break;
			case	'h':
				height = atoi(optarg);
				break;
			case	'r':
				sts = y4m_parse_ratio(&rate_ratio, optarg);
				if	(sts != Y4M_OK)
					mjpeg_error_exit1("Invalid rate: %s", optarg);
				break;
			case	'Y':
				Y = atoi(optarg);
				break;
			case	'U':
				U = atoi(optarg);
				break;
			case	'V':
				V = atoi(optarg);
				break;
			case	'i':
				switch	(optarg[0])
					{
					case	'p':
						interlace = Y4M_ILACE_NONE;
						break;
					case	't':
						interlace = Y4M_ILACE_TOP_FIRST;
						break;
					case	'b':
						interlace = Y4M_ILACE_BOTTOM_FIRST;
						break;
					default:
						usage();
					}
				break;
			case	'x':
				chroma_mode = y4m_chroma_parse_keyword(optarg);
				if	(chroma_mode == Y4M_UNKNOWN)
					{
					if	(strcmp(optarg, "help") != 0)
						mjpeg_error("Invalid -x arg '%s'", optarg);
					chroma_usage();
					}

				break;
			case	'f':
				force = 1;
				break;
			case	'n':
				numframes = atoi(optarg);
				break;
			case	'?':
			default:
				usage();
			}
		}

	if	(width <= 0)
		mjpeg_error_exit1("Invalid Width: %d", width);

	if	(height <= 0)
		mjpeg_error_exit1("Invalid Height: %d", height);

	if	(!force && (Y < 16 || Y > 235))
		mjpeg_error_exit1("16 < Y < 235");

	if	(!force && (U < 16 || U > 240))
		mjpeg_error_exit1("16 < U < 240");

	if	(!force && (V < 16 || V > 240))
		mjpeg_error_exit1("16 < V < 240");

	y4m_init_stream_info(&ostream);
	y4m_init_frame_info(&oframe);
	y4m_si_set_width(&ostream, width);
	y4m_si_set_height(&ostream, height);
	y4m_si_set_interlace(&ostream, interlace);
	y4m_si_set_framerate(&ostream, rate_ratio);
	y4m_si_set_sampleaspect(&ostream, aspect_ratio);
	y4m_si_set_chroma(&ostream, chroma_mode);

	if	(y4m_si_get_plane_count(&ostream) != 3)
		mjpeg_error_exit1("Only the 3 plane formats supported");

	plane_length[0] = y4m_si_get_plane_length(&ostream, 0);
	plane_length[1] = y4m_si_get_plane_length(&ostream, 1);
	plane_length[2] = y4m_si_get_plane_length(&ostream, 2);

	yuv[0] = malloc(plane_length[0]);
	yuv[1] = malloc(plane_length[1]);
	yuv[2] = malloc(plane_length[2]);

/*
 * Now fill the array once with black but use the provided Y, U and V values
*/
	memset(yuv[0], Y, plane_length[0]);
	memset(yuv[1], U, plane_length[1]);
	memset(yuv[2], V, plane_length[2]);

	if	(noheader == 0)
		y4m_write_stream_header(fileno(stdout), &ostream);
	while	(numframes--)
		y4m_write_frame(fileno(stdout), &ostream, &oframe, yuv);

	free(yuv[0]);
	free(yuv[1]);
	free(yuv[2]);
	y4m_fini_stream_info(&ostream);
	y4m_fini_frame_info(&oframe);
	exit(0);
	}
void MPAStream::Init ( const int stream_num )

{
	int padding_bit;

	MuxStream::Init( AUDIO_STR_0 + stream_num, 
					 0,  // Buffer scale
					 muxinto.audio_buffer_size,
					 muxinto.vcd_zero_stuffing,
					 muxinto.buffers_in_audio,
					 muxinto.always_buffers_in_audio
		);
    mjpeg_info ("Scanning for header info: Audio stream %02x (%s)",
                AUDIO_STR_0 + stream_num,
                bs.StreamName()
                );

	/* A.Stevens 2000 - update to be compatible up to  MPEG2.5
	 */
    AU_start = bs.bitcount();
    if (bs.GetBits(11)==AUDIO_SYNCWORD)
    {
		num_syncword++;
		version_id = bs.GetBits( 2);
		layer 		= 3-bs.GetBits( 2); /* 0..2 not 1..3!! */
		protection 		= bs.Get1Bit();
		bit_rate_code	= bs.GetBits( 4);
		frequency 		= bs.GetBits( 2);
		padding_bit     = bs.Get1Bit();
		bs.Get1Bit();
		mode 		= bs.GetBits( 2);
		mode_extension 	= bs.GetBits( 2);
		copyright 		= bs.Get1Bit();
		original_copy 	= bs.Get1Bit ();
		emphasis		= bs.GetBits( 2);

		framesize =
			mpa_bitrates_kbps[version_id][layer][bit_rate_code]  * 
			mpa_slots[layer] *1000 /
			mpa_freq_table[version_id][frequency];

		size_frames[0] = framesize * ( layer == 0 ? 4 : 1);
		size_frames[1] = (framesize+1) * ( layer == 0 ? 4 : 1);
		num_frames[padding_bit]++;
        access_unit.start  = AU_start;
		access_unit.length = size_frames[padding_bit];
	  
		samples_per_second = mpa_freq_table[version_id][frequency];

		/* Presentation time-stamping  */
		access_unit.PTS = static_cast<clockticks>(decoding_order) * 
			static_cast<clockticks>(mpa_samples [layer]) * 
			static_cast<clockticks>(CLOCKS)	/ samples_per_second;
		access_unit.DTS = access_unit.PTS;
		access_unit.dorder = decoding_order;
		++decoding_order;
		aunits.Append( access_unit );

    } else
    {
		mjpeg_error ( "Invalid MPEG Audio stream header.");
		exit (1);
    }


	OutputHdrInfo();
}
Exemple #12
0
void writeoutYUV4MPEGheader(int out_fd,
			    LavParam *param,
			    EditList el,
			    y4m_stream_info_t *streaminfo)
{
   int n;

   y4m_si_set_width(streaminfo, param->output_width);
   y4m_si_set_height(streaminfo, param->output_height);
   y4m_si_set_interlace(streaminfo, param->interlace);
   y4m_si_set_framerate(streaminfo, mpeg_conform_framerate(el.video_fps));
   if (!Y4M_RATIO_EQL(param->sar, y4m_sar_UNKNOWN)) {
     y4m_si_set_sampleaspect(streaminfo, param->sar);
   } else if ((el.video_sar_width != 0) || (el.video_sar_height != 0)) {
     y4m_ratio_t sar;
     sar.n = el.video_sar_width;
     sar.d = el.video_sar_height;
     y4m_si_set_sampleaspect(streaminfo, sar);
   } else {
     /* no idea! ...eh, just guess. */
     mjpeg_warn("unspecified sample-aspect-ratio --- taking a guess...");
     y4m_si_set_sampleaspect(streaminfo,
			     y4m_guess_sar(param->output_width, 
					   param->output_height,
					   param->dar));
   }

   switch (el_video_frame_data_format(0, &el)) { /* FIXME: checking only 0-th frame. */
   case DATAFORMAT_YUV420:
     switch (param->chroma) {
     case Y4M_UNKNOWN:
     case Y4M_CHROMA_420JPEG:
       break;
     case Y4M_CHROMA_420MPEG2:
     case Y4M_CHROMA_420PALDV:
       mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
       break;
     default:
       mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420jpeg') with this input");
       break;
     }
     break;

   case DATAFORMAT_YUV422:
     switch (param->chroma) {
     case Y4M_CHROMA_422:
       break;
     default:
       mjpeg_error_exit1("must specify chroma '422' with this input");
       break;
     }
     break;

   case DATAFORMAT_DV2:
#ifndef HAVE_LIBDV
     mjpeg_error_exit1("DV input was not configured at compile time");
#else
     el_get_video_frame(jpeg_data, 0, &el); /* FIXME: checking only 0-th frame. */
     dv_parse_header(decoder, jpeg_data);
     switch(decoder->sampling) {
     case e_dv_sample_420:
       switch (param->chroma) {
       case Y4M_UNKNOWN:
	 mjpeg_info("set chroma '420paldv' from input");
	 param->chroma = Y4M_CHROMA_420PALDV;
	 break;
       case Y4M_CHROMA_420PALDV:
	 break;
       case Y4M_CHROMA_420JPEG:
       case Y4M_CHROMA_420MPEG2:
	 mjpeg_warn("4:2:0 chroma should be '420paldv' with this input");
	 break;
       case Y4M_CHROMA_422:
         if(libdv_pal_yv12 == 1 )
	   mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input");
	 break;
       default:
	 mjpeg_error_exit1("must specify 4:2:0 chroma (should be '420paldv') with this input");
	 break;
       }
       break;
     case e_dv_sample_411:
       if (param->chroma != Y4M_CHROMA_411)
	 mjpeg_info("chroma '411' recommended with this input");
       switch (param->chroma) {
       case Y4M_CHROMA_420MPEG2:
       case Y4M_CHROMA_420PALDV:
	 mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
	 break;
       }
       break;
     case e_dv_sample_422:
       if (param->chroma != Y4M_CHROMA_422)
	 mjpeg_info("chroma '422' recommended with this input");
       switch (param->chroma) {
       case Y4M_CHROMA_420MPEG2:
       case Y4M_CHROMA_420PALDV:
	 mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
	 break;
       }
       break;
     default:
       break;
     }
#endif
     break;

   case DATAFORMAT_MJPG:
     if (param->chroma != Y4M_CHROMA_422 && el.chroma == Y4M_CHROMA_422)
       mjpeg_info("chroma '422' recommended with this input");
     switch (param->chroma) {
     case Y4M_CHROMA_420MPEG2:
     case Y4M_CHROMA_420PALDV:
       mjpeg_warn("4:2:0 chroma should be '420jpeg' with this input");
       break;
     }
     break;
   }
   if (param->chroma == Y4M_UNKNOWN) {
     mjpeg_info("set default chroma '420jpeg'");
     param->chroma = Y4M_CHROMA_420JPEG;
   }
   y4m_si_set_chroma(streaminfo, param->chroma);

   n = y4m_write_stream_header(out_fd, streaminfo);
   if (n != Y4M_OK)
      mjpeg_error("Failed to write stream header: %s", y4m_strerr(n));
}
Exemple #13
0
/*
 * readframe - read jpeg or dv frame into yuv buffer
 *
 * returns:
 *	0   success
 *	1   fatal error
 *	2   corrupt data encountered; 
 *		decoding can continue, but this frame may be damaged 
 */
int readframe(int numframe, 
	      uint8_t *frame[],
	      LavParam *param,
	      EditList el)
{
  int len, i, res, data_format;
  uint8_t *frame_tmp;
  int warn;
  warn = 0;

  if (MAX_JPEG_LEN < el.max_frame_size) {
    mjpeg_error_exit1( "Max size of JPEG frame = %ld: too big",
		       el.max_frame_size);
  }
  
  len = el_get_video_frame(jpeg_data, numframe, &el);
  data_format = el_video_frame_data_format(numframe, &el);
  
  switch(data_format) {

  case DATAFORMAT_DV2 :
#ifndef HAVE_LIBDV
    mjpeg_error("DV input was not configured at compile time");
    res = 1;
#else
    mjpeg_debug("DV frame %d   len %d",numframe,len);
    res = 0;
    dv_parse_header(decoder, jpeg_data);
    switch(decoder->sampling) {
    case e_dv_sample_420:
      /* libdv decodes PAL DV directly as planar YUV 420
       * (YV12 or 4CC 0x32315659) if configured with the flag
       * --with-pal-yuv=YV12 which is not (!) the default
       */
      if (libdv_pal_yv12 == 1) {
	pitches[0] = decoder->width;
	pitches[1] = decoder->width / 2;
	pitches[2] = decoder->width / 2;
	if (pitches[0] != param->output_width ||
	    pitches[1] != param->chroma_width) {
	  mjpeg_error("for DV 4:2:0 only full width output is supported");
	  res = 1;
	} else {
	  dv_decode_full_frame(decoder, jpeg_data, e_dv_color_yuv,
			       frame, (int *)pitches);
	  /* swap the U and V components */
	  frame_tmp = frame[2];
	  frame[2] = frame[1];
	  frame[1] = frame_tmp;
	}
	break;
      }
    case e_dv_sample_411:
    case e_dv_sample_422:
      /* libdv decodes NTSC DV (native 411) and by default also PAL
       * DV (native 420) as packed YUV 422 (YUY2 or 4CC 0x32595559)
       * where the U and V information is repeated.  This can be
       * transformed to planar 420 (YV12 or 4CC 0x32315659).
       * For NTSC DV this transformation is lossy.
       */
      pitches[0] = decoder->width * 2;
      pitches[1] = 0;
      pitches[2] = 0;
      if (decoder->width != param->output_width) {
	mjpeg_error("for DV only full width output is supported");
	res = 1;
      } else {
	dv_decode_full_frame(decoder, jpeg_data, e_dv_color_yuv,
			     dv_frame, (int *)pitches);
	frame_YUV422_to_planar(frame, dv_frame[0],
			       decoder->width,	decoder->height,
			       param->chroma);
      }
      break;
    default:
      res = 1;
      break;
    }
#endif /* HAVE_LIBDV */
    break;

  case DATAFORMAT_YUV420 :
  case DATAFORMAT_YUV422 :
    mjpeg_debug("raw YUV frame %d   len %d",numframe,len);
    frame_tmp = jpeg_data;
    memcpy(frame[0], frame_tmp, param->luma_size);
    frame_tmp += param->luma_size;
    memcpy(frame[1], frame_tmp, param->chroma_size);
    frame_tmp += param->chroma_size;
    memcpy(frame[2], frame_tmp, param->chroma_size);
    res = 0;
    break;

  default:
    mjpeg_debug("MJPEG frame %d   len %d",numframe,len);
    res = decode_jpeg_raw(jpeg_data, len, el.video_inter,
			  param->chroma,
			  param->output_width, param->output_height,
			  frame[0], frame[1], frame[2]);
  }
  
  if (res < 0) {
    mjpeg_warn( "Fatal Error Decoding Frame %d", numframe);
    return 1;
  } else if (res == 1) {
    mjpeg_warn( "Decoding of Frame %d failed", numframe);
    warn = 1;
    res = 0;
  }
  
  
  if (param->mono) {
    for (i = 0;
	 i < param->chroma_size;
	 ++i) {
      frame[1][i] = 0x80;
      frame[2][i] = 0x80;
    }
  }

  if(warn)
	  return 2;
  else
	  return 0;
}
/** 
Reads one PNG file. 
@param process Process the image data (0 for initial parameter determination)
@returns -1 on failure, 1 on sucess
*/
int decode_png(const char *pngname, int process, parameters_t *param)
{
  int num_pass = 1;
  int bit_depth, color_type;
  FILE *pngfile;
  //png_byte hdptr[8];

  /* Now open this PNG file, and examine its header to retrieve the 
     YUV4MPEG info that shall be written */
  pngfile = fopen(pngname, "rb");
  if (!pngfile)
    {
      perror("PNG file open failed:");
      return -1;
    }

  //fread(hdptr, 1, 8, pngfile);

#if 0 
  bool is_png = !png_sig_cmp(hdptr, 0, 8);
  if (!is_png)
    {
      mjpeg_error("%s is _no_ PNG file !\n");
      return -1;
    }
#endif
  
  png_ptr = png_create_read_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
  if (!png_ptr)
    mjpeg_error_exit1("%s: Could not allocate PNG read struct !", pngname);
  
  png_init_io(png_ptr, pngfile);
  //png_set_sig_bytes(png_ptr, 8);
  
  info_ptr = png_create_info_struct(png_ptr);
  if (!info_ptr)
    {
      png_destroy_read_struct(&png_ptr,
			      (png_infopp)NULL, (png_infopp)NULL);
      mjpeg_error_exit1("%s: Could not allocate PNG info struct !", pngname);
    }
  
  end_info = png_create_info_struct(png_ptr);
  if (!end_info)
    {
      png_destroy_read_struct(&png_ptr, &info_ptr,
			      (png_infopp)NULL);
      mjpeg_error_exit1("%s: Could not allocate PNG end info struct !", pngname);
    }
  
  if (setjmp(png_jmpbuf(png_ptr)))
    {
      png_destroy_read_struct(&png_ptr, &info_ptr,
			      &end_info);
      mjpeg_error("%s: Corrupted PNG file !", pngname);
      return -1;
    }
  
  if (process)
    png_set_read_user_transform_fn(png_ptr, png_separation);
  png_read_png(png_ptr, info_ptr, PNG_TRANSFORM_STRIP_16 | PNG_TRANSFORM_STRIP_ALPHA, NULL);
  
  if (png_get_IHDR(png_ptr, info_ptr, &param->width, &param->height, &bit_depth,
		       //   &color_type, &interlace_type, &compression_type, &filter_type))
		   &color_type, NULL, NULL, NULL))	
    num_pass = png_set_interlace_handling(png_ptr);
  else
    mjpeg_error_exit1("PNG header reading failed !!\n");
#if 0 
  mjpeg_info("Reading info struct...\n");
  png_read_info(png_ptr, info_ptr);
  mjpeg_info("Done...\n");
  
  if (png_get_IHDR(png_ptr, info_ptr, &param->width, &param->height, &bit_depth,
		       //   &color_type, &interlace_type, &compression_type, &filter_type))
		   &color_type, NULL, NULL, NULL))	
    num_pass = png_set_interlace_handling(png_ptr);
  else
    mjpeg_error_exit1("PNG header reading failed !!\n");
  
  if (process)
    {
      printf("%d passes needed\n\n", num_pass);
      
      if (bit_depth != 8 && bit_depth != 16)
	{
	  mjpeg_error_exit1("Invalid bit_depth %d, only 8 and 16 bit allowed !!\n", bit_depth);
	}
      
      png_set_strip_16(png_ptr); // always has to strip the 16bit input, MPEG can't handle it	  
      png_set_strip_alpha(png_ptr); // Alpha can't be processed until Z/Alpha is integrated
      
      printf("\nAllocating row buffer...");
      png_set_read_user_transform_fn(png_ptr, png_separation);
      png_bytep row_buf = (png_bytep)png_malloc(png_ptr,
						png_get_rowbytes(png_ptr, info_ptr));
      
      for (int n=0; n < num_pass; n++)
	for (int y=0; y < sh_param->height; y++)
	  {
	    printf("Writing row data for pass %d\n", n);
	    png_read_rows(png_ptr, (png_bytepp)&row_buf, NULL, 1);
	  }
      
      png_free(png_ptr, row_buf);	      
    }
  png_read_end(png_ptr, info_ptr);
#endif  
  if (setjmp(png_ptr->jmpbuf)) {
    png_destroy_read_struct(&png_ptr, &info_ptr, &end_info);
    return 2;
    }

  fclose(pngfile);

  return 1;
}
/** parse_commandline
 * Parses the commandline for the supplied parameters.
 * in: argc, argv: the classic commandline parameters
 */
static void parse_commandline(int argc, char ** argv, parameters_t *param)
{
  int c;
  
  param->pngformatstr = NULL;
  param->begin = 0;
  param->numframes = -1;
  param->framerate = y4m_fps_UNKNOWN;
  param->interlace = Y4M_UNKNOWN;
  param->interleave = -1;
  param->verbose = 1;
  param->ss_mode = DEFAULT_CHROMA_MODE;
  //param->mza_filename = NULL;
  //param->make_z_alpha = 0;

  /* parse options */
  for (;;) {
    if (-1 == (c = getopt(argc, argv, "I:hv:L:b:j:n:f:z:S:")))
      break;
    switch (c) {
    case 'j':
      param->pngformatstr = strdup(optarg);
      break;
#if 0 
    case 'z':
      param->mza_filename = strdup(optarg);
      param->make_z_alpha = 1;
      break;
#else
    case 'z':
      mjpeg_error("Z encoding currently unsupported !\n");
      exit(-1);
      break;
#endif
    case 'S':
      param->ss_mode = y4m_chroma_parse_keyword(optarg);
      if (param->ss_mode == Y4M_UNKNOWN) {
	mjpeg_error_exit1("Unknown subsampling mode option:  %s", optarg);
      } else if (!chroma_sub_implemented(param->ss_mode)) {
	mjpeg_error_exit1("Unsupported subsampling mode option:  %s", optarg);
      }
      break;
    case 'b':
      param->begin = atol(optarg);
      break;
    case 'n':
      param->numframes = atol(optarg);
      break;
    case 'f':
      param->framerate = mpeg_conform_framerate(atof(optarg));
      break;
    case 'I':
      switch (optarg[0]) {
      case 'p':
	param->interlace = Y4M_ILACE_NONE;
	break;
      case 't':
	param->interlace = Y4M_ILACE_TOP_FIRST;
	break;
      case 'b':
	param->interlace = Y4M_ILACE_BOTTOM_FIRST;
	break;
      default:
	mjpeg_error_exit1 ("-I option requires arg p, t, or b");
      }
      break;
    case 'L':
      param->interleave = atoi(optarg);
      if ((param->interleave != 0) &&
	  (param->interleave != 1)) 
	mjpeg_error_exit1 ("-L option requires arg 0 or 1");
      break;
    case 'v':
      param->verbose = atoi(optarg);
      if (param->verbose < 0 || param->verbose > 2) 
	mjpeg_error_exit1( "-v option requires arg 0, 1, or 2");    
      break;     
    case 'h':
    default:
      usage(argv[0]);
      exit(1);
    }
  }
  if (param->pngformatstr == NULL) 
    { 
      mjpeg_error("%s:  input format string not specified. (Use -j option.)",
		  argv[0]); 
      usage(argv[0]); 
      exit(1);
    }

  if (Y4M_RATIO_EQL(param->framerate, y4m_fps_UNKNOWN)) 
    {
      mjpeg_error("%s:  framerate not specified.  (Use -f option)",
		  argv[0]); 
      usage(argv[0]); 
      exit(1);
    }
}
/** Here we process the command line options */
void process_commandline(int argc, char *argv[], struct area_s *inarea, 
  int *darker, int *copy_pixel, struct color_yuv *coloryuv, int *average_pixel)
{
int c;
char area [20];

while ((c = getopt(argc, argv, "Hhv:i:s:d:c:a:")) != -1)
  {
  switch (c)
    {
       case 'v':
         verbose = atoi(optarg);
         if ( verbose < 0 || verbose > 2)
           print_usage(argv[0]);
         if (verbose == 2)
           mjpeg_info("Set Verbose = %i", verbose);
         break;
       case 'i':
         strncpy(area, optarg, 20); /* This part shows how to process */
         fillarea(area, inarea);   /* command line options */
         break;
       case 's':
         strncpy(area, optarg, 20); 
         set_yuvcolor(area, coloryuv);
         break;
       case 'd':
         *darker = atoi(optarg);
         break;
       case 'c':
         *copy_pixel = atoi(optarg);
         break;
       case 'a':
         *average_pixel = atoi(optarg);
         break;
       case 'H':
       case 'h':
         print_usage(argv[0]);
    }
  }

/* Checking if we have used the -i option */
if ( ((*inarea).height == 0) && ((*inarea).width == 0) )
  mjpeg_error_exit1("You have to use the -i option");

/* Checking the range of the darker -d option */
if ( (*darker < 0) || (*darker > 100) )
  mjpeg_error_exit1("You can only make the area 1-100 percent darker");
else
  mjpeg_info("Setting the area %i percent darker", *darker);

/* Checking the copy pixel option */
if (*copy_pixel != 0)
  {
  if ( ((*inarea).height/2) < *copy_pixel)
    {
       mjpeg_error("You can only copy half of the height into the area"); 
       mjpeg_error_exit1("lower the copy pixel value below: %i ", 
                   ((*inarea).height/2));
    }

  if ( (*copy_pixel % 2) != 0)
    mjpeg_error_exit1("you have to use a even number of lines to copy into the field");

  if ( ((*inarea).height % *copy_pixel) != 0)  
    mjpeg_error_exit1("the height has to be a multiply of the copy pixel value"); 

  mjpeg_info("Number of rows using for coping into the area %i", *copy_pixel);
  }

/* Checking the average pixel option */
if (*average_pixel != 0)
  {
    if ( (*average_pixel > (*inarea).height) || 
         (*average_pixel > (*inarea).width) )
      mjpeg_error_exit1("The pixles used for the average must be less the the inactive area");

  if ( (*average_pixel % 2) != 0)  
   mjpeg_error_exit1("you have to use a even number for the average pixels"); 

  mjpeg_info("Number of pixels used for averaging %i", *average_pixel);
  }
}