Exemple #1
0
SCM mjpeg_to_yuv420p(SCM scm_source_ptr, SCM scm_shape, SCM scm_dest_ptr, SCM scm_offsets)
{
  unsigned char *source_ptr = scm_to_pointer(scm_source_ptr);
  unsigned char *dest_ptr = scm_to_pointer(scm_dest_ptr);
  int width = scm_to_int(scm_cadr(scm_shape));
  int height = scm_to_int(scm_car(scm_shape));
  int64_t offsets[3];
  memset(offsets, 0, sizeof(offsets));
  scm_to_long_array(scm_offsets, offsets);
  decode_jpeg_raw(source_ptr, width * height * 2, Y4M_ILACE_NONE, 0, width, height,
                  dest_ptr + offsets[0], dest_ptr + offsets[2], dest_ptr + offsets[1]);
  return SCM_UNSPECIFIED;
}
Exemple #2
0
/*
 * readframe - read jpeg or dv frame into yuv buffer
 *
 * returns:
 *	0   success
 *	1   fatal error
 *	2   corrupt data encountered; 
 *		decoding can continue, but this frame may be damaged 
 */
int readframe(int numframe, 
	      uint8_t *frame[],
	      LavParam *param,
	      EditList el)
{
  int len, i, res, data_format;
  uint8_t *frame_tmp;
  int warn;
  warn = 0;

  if (MAX_JPEG_LEN < el.max_frame_size) {
    mjpeg_error_exit1( "Max size of JPEG frame = %ld: too big",
		       el.max_frame_size);
  }
  
  len = el_get_video_frame(jpeg_data, numframe, &el);
  data_format = el_video_frame_data_format(numframe, &el);
  
  switch(data_format) {

  case DATAFORMAT_DV2 :
#ifndef HAVE_LIBDV
    mjpeg_error("DV input was not configured at compile time");
    res = 1;
#else
    mjpeg_debug("DV frame %d   len %d",numframe,len);
    res = 0;
    dv_parse_header(decoder, jpeg_data);
    switch(decoder->sampling) {
    case e_dv_sample_420:
      /* libdv decodes PAL DV directly as planar YUV 420
       * (YV12 or 4CC 0x32315659) if configured with the flag
       * --with-pal-yuv=YV12 which is not (!) the default
       */
      if (libdv_pal_yv12 == 1) {
	pitches[0] = decoder->width;
	pitches[1] = decoder->width / 2;
	pitches[2] = decoder->width / 2;
	if (pitches[0] != param->output_width ||
	    pitches[1] != param->chroma_width) {
	  mjpeg_error("for DV 4:2:0 only full width output is supported");
	  res = 1;
	} else {
	  dv_decode_full_frame(decoder, jpeg_data, e_dv_color_yuv,
			       frame, (int *)pitches);
	  /* swap the U and V components */
	  frame_tmp = frame[2];
	  frame[2] = frame[1];
	  frame[1] = frame_tmp;
	}
	break;
      }
    case e_dv_sample_411:
    case e_dv_sample_422:
      /* libdv decodes NTSC DV (native 411) and by default also PAL
       * DV (native 420) as packed YUV 422 (YUY2 or 4CC 0x32595559)
       * where the U and V information is repeated.  This can be
       * transformed to planar 420 (YV12 or 4CC 0x32315659).
       * For NTSC DV this transformation is lossy.
       */
      pitches[0] = decoder->width * 2;
      pitches[1] = 0;
      pitches[2] = 0;
      if (decoder->width != param->output_width) {
	mjpeg_error("for DV only full width output is supported");
	res = 1;
      } else {
	dv_decode_full_frame(decoder, jpeg_data, e_dv_color_yuv,
			     dv_frame, (int *)pitches);
	frame_YUV422_to_planar(frame, dv_frame[0],
			       decoder->width,	decoder->height,
			       param->chroma);
      }
      break;
    default:
      res = 1;
      break;
    }
#endif /* HAVE_LIBDV */
    break;

  case DATAFORMAT_YUV420 :
  case DATAFORMAT_YUV422 :
    mjpeg_debug("raw YUV frame %d   len %d",numframe,len);
    frame_tmp = jpeg_data;
    memcpy(frame[0], frame_tmp, param->luma_size);
    frame_tmp += param->luma_size;
    memcpy(frame[1], frame_tmp, param->chroma_size);
    frame_tmp += param->chroma_size;
    memcpy(frame[2], frame_tmp, param->chroma_size);
    res = 0;
    break;

  default:
    mjpeg_debug("MJPEG frame %d   len %d",numframe,len);
    res = decode_jpeg_raw(jpeg_data, len, el.video_inter,
			  param->chroma,
			  param->output_width, param->output_height,
			  frame[0], frame[1], frame[2]);
  }
  
  if (res < 0) {
    mjpeg_warn( "Fatal Error Decoding Frame %d", numframe);
    return 1;
  } else if (res == 1) {
    mjpeg_warn( "Decoding of Frame %d failed", numframe);
    warn = 1;
    res = 0;
  }
  
  
  if (param->mono) {
    for (i = 0;
	 i < param->chroma_size;
	 ++i) {
      frame[1][i] = 0x80;
      frame[2][i] = 0x80;
    }
  }

  if(warn)
	  return 2;
  else
	  return 0;
}
static int generate_YUV4MPEG(parameters_t *param)
{
  uint32_t frame;
  //size_t pngsize;
  char pngname[FILENAME_MAX];
  uint8_t *yuv[3];  /* buffer for Y/U/V planes of decoded PNG */
  y4m_stream_info_t streaminfo;
  y4m_frame_info_t frameinfo;

  if ((param->width % 2) == 0)
    param->new_width = param->width;
  else
    {
      param->new_width = ((param->width >> 1) + 1) << 1;
      printf("Setting new, even image width %d", param->new_width);
    }

  mjpeg_info("Now generating YUV4MPEG stream.");
  y4m_init_stream_info(&streaminfo);
  y4m_init_frame_info(&frameinfo);

  y4m_si_set_width(&streaminfo, param->new_width);
  y4m_si_set_height(&streaminfo, param->height);
  y4m_si_set_interlace(&streaminfo, param->interlace);
  y4m_si_set_framerate(&streaminfo, param->framerate);
  y4m_si_set_chroma(&streaminfo, param->ss_mode);

  yuv[0] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[0][0]));
  yuv[1] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[1][0]));
  yuv[2] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[2][0]));

  y4m_write_stream_header(STDOUT_FILENO, &streaminfo);

  for (frame = param->begin;
       (frame < param->numframes + param->begin) || (param->numframes == -1);
       frame++) 
    {
      //      if (frame < 25)
      //      else      
      //snprintf(pngname, sizeof(pngname), param->pngformatstr, frame - 25);
      snprintf(pngname, sizeof(pngname), param->pngformatstr, frame);
            
      raw0 = yuv[0];
      raw1 = yuv[1];
      raw2 = yuv[2];
      if (decode_png(pngname, 1, param) == -1)
	{
	  mjpeg_info("Read from '%s' failed:  %s", pngname, strerror(errno));
	  if (param->numframes == -1) 
	    {
	      mjpeg_info("No more frames.  Stopping.");
	      break;  /* we are done; leave 'while' loop */
	    } 
	  else 
	    {
	      mjpeg_info("Rewriting latest frame instead.");
	    }
	} 
      else 
	{
#if 0 
	  mjpeg_debug("Preparing frame");
	  
	  /* Now open this PNG file, and examine its header to retrieve the 
	     YUV4MPEG info that shall be written */

	  if ((param->interlace == Y4M_ILACE_NONE) || (param->interleave == 1)) 
	    {
	      mjpeg_info("Processing non-interlaced/interleaved %s.", 
			 pngname, pngsize);

	      decode_png(imagedata, 0, 420, yuv[0], yuv[1], yuv[2], 
			 param->width, param->height, param->new_width);
	      
#if 0 
	      if (param->make_z_alpha)
		{
		  mjpeg_info("Writing Z/Alpha data.\n");
		  za_write(real_z_imagemap, param->width, param->height,z_alpha_fp,frame);
		}
#endif
	    } 
	  else 
	    {
	      mjpeg_error_exit1("Can't handle interlaced PNG information (yet) since there is no standard for it.\n"
				"Use interleaved mode (-L option) to create interlaced material.");

	      switch (param->interlace) 
		{		  
		case Y4M_ILACE_TOP_FIRST:
		  mjpeg_info("Processing interlaced, top-first %s", pngname);
#if 0 
		  decode_jpeg_raw(jpegdata, jpegsize,
				  Y4M_ILACE_TOP_FIRST,
				  420, param->width, param->height,
				  yuv[0], yuv[1], yuv[2]);
#endif
		  break;
		case Y4M_ILACE_BOTTOM_FIRST:
		  mjpeg_info("Processing interlaced, bottom-first %s", pngname);
#if 0 
		  decode_jpeg_raw(jpegdata, jpegsize,
				  Y4M_ILACE_BOTTOM_FIRST,
				  420, param->width, param->height,
				  yuv[0], yuv[1], yuv[2]);
#endif
		  break;
		default:
		  mjpeg_error_exit1("FATAL logic error?!?");
		  break;
		}
	    }
#endif
	  mjpeg_debug("Converting frame to YUV format.");
	  /* Transform colorspace, then subsample (in place) */
	  convert_RGB_to_YCbCr(yuv, param->height *  param->new_width);
	  chroma_subsample(param->ss_mode, yuv, param->new_width, param->height);

	  mjpeg_debug("Frame decoded, now writing to output stream.");
	}
      
      mjpeg_debug("Frame decoded, now writing to output stream.");
      y4m_write_frame(STDOUT_FILENO, &streaminfo, &frameinfo, yuv);
    }

#if 0 
  if (param->make_z_alpha)
    {
      za_write_end(z_alpha_fp);
      fclose(z_alpha_fp);
    }
#endif

  y4m_fini_stream_info(&streaminfo);
  y4m_fini_frame_info(&frameinfo);
  free(yuv[0]);
  free(yuv[1]);
  free(yuv[2]);

  return 0;
}