Ejemplo n.º 1
0
/**
 * @brief Receive decoded video packet.
 *
 * @param av Handler.
 * @param output Storage.
 * @return int
 * @retval 0 Success.
 * @retval ToxAvError On Error.
 */
inline__ int toxav_recv_video ( ToxAv *av, int32_t call_index, vpx_image_t **output)
{
    if ( !output ) return ErrorInternal;

    uint8_t packet [RTP_PAYLOAD_SIZE];
    int recved_size = 0;
    int rc;
    CallSpecific *call = &av->calls[call_index];

    do {
        recved_size = toxav_recv_rtp_payload(av, call_index, TypeVideo, packet);

        if (recved_size > 0 && ( rc = vpx_codec_decode(&call->cs->v_decoder, packet, recved_size, NULL, 0) ) != VPX_CODEC_OK) {
            /*fprintf(stderr, "Error decoding video: %s\n", vpx_codec_err_to_string(rc));*/
            return ErrorInternal;
        }

    } while (recved_size > 0);

    vpx_codec_iter_t iter = NULL;
    vpx_image_t *img;
    img = vpx_codec_get_frame(&call->cs->v_decoder, &iter);

    *output = img;
    return 0;
}
Ejemplo n.º 2
0
int decompress_image(struct vpx_context *ctx, const uint8_t *in, int size, uint8_t *out[3], int outstride[3])
{
	vpx_image_t *img;
	int frame_sz = size;
	vpx_codec_iter_t iter = NULL;
	const uint8_t *frame = in;
	int i = 0;

	if (vpx_codec_decode(&ctx->codec, frame, frame_sz, NULL, 0)) {
		codec_error(&ctx->codec, "vpx_codec_decode");
		return -1;
	}
	img = vpx_codec_get_frame(&ctx->codec, &iter);
	if (img == NULL) {
		codec_error(&ctx->codec, "vpx_codec_get_frame");
		return -1;
	}

	for (i = 0; i < 3; i++) {
		out[i] = img->planes[i];
		outstride[i] = img->stride[i];
	}

    ctx->pixfmt = img->fmt;
	return 0;
}
Ejemplo n.º 3
0
void krad_vpx_decoder_decode (krad_vpx_decoder_t *vpx,
                              void *buffer,
                              int len) {

  if (vpx_codec_decode (&vpx->decoder, buffer, len, 0, 0)) {
    printf ("Failed to decode %d byte frame: %s\n",
              len, vpx_codec_error (&vpx->decoder));
  }

  if (vpx->width == 0) {

    vpx_codec_get_stream_info (&vpx->decoder, &vpx->stream_info);

    printf ("VPX Stream Info: W:%d H:%d KF:%d\n",
            vpx->stream_info.w,
            vpx->stream_info.h, 
            vpx->stream_info.is_kf);

    vpx->width = vpx->stream_info.w;
    vpx->height = vpx->stream_info.h;
  }

  vpx->iter = NULL;
  vpx->img = vpx_codec_get_frame (&vpx->decoder, &vpx->iter);
}
Ejemplo n.º 4
0
void krad_vpx_decoder_decode (krad_vpx_decoder_t *kradvpx, void *buffer, int len) {


	if (vpx_codec_decode(&kradvpx->decoder, buffer, len, 0, 0))
	{
		failfast ("Failed to decode %d byte frame: %s\n", len, vpx_codec_error(&kradvpx->decoder));
		//exit(1);
	}

	vpx_codec_get_stream_info (&kradvpx->decoder, &kradvpx->stream_info);
	//printf("VPX Stream Info: W:%d H:%d KF:%d\n", kradvpx->stream_info.w, kradvpx->stream_info.h, 
	//		  kradvpx->stream_info.is_kf);
	
	if (kradvpx->width == 0) {
		kradvpx->width = kradvpx->stream_info.w;
		kradvpx->height = kradvpx->stream_info.h;
	}
	/*
	if (kradvpx->img == NULL) {
	
		kradvpx->width = kradvpx->stream_info.w;
		kradvpx->height = kradvpx->stream_info.h;
		
		if ((kradvpx->img = vpx_img_alloc(NULL, VPX_IMG_FMT_YV12, kradvpx->stream_info.w, 
										  kradvpx->stream_info.h, 1)) == NULL) {
			failfast ("Failed to allocate vpx image\n");
		}
	
	}
	*/
	kradvpx->iter = NULL;
	kradvpx->img = vpx_codec_get_frame (&kradvpx->decoder, &kradvpx->iter);

}
Ejemplo n.º 5
0
/**
 * @brief Receive decoded video packet.
 *
 * @param av Handler.
 * @param output Storage.
 * @return int
 * @retval 0 Success.
 * @retval ToxAvError On Error.
 */
inline__ int toxav_recv_video ( ToxAv *av, vpx_image_t **output)
{
    if ( !output ) return ErrorInternal;

    uint8_t packet [RTP_PAYLOAD_SIZE];
    int recved_size = 0;
    int error;
    
    do {
        recved_size = toxav_recv_rtp_payload(av, TypeVideo, packet);

        if (recved_size > 0 && ( error = vpx_codec_decode(&av->cs->v_decoder, packet, recved_size, NULL, 0) ) != VPX_CODEC_OK) 
            fprintf(stderr, "Error decoding: %s\n", vpx_codec_err_to_string(error));
        
    } while (recved_size > 0);

    vpx_codec_iter_t iter = NULL;
    vpx_image_t *img;
    img = vpx_codec_get_frame(&av->cs->v_decoder, &iter);

    *output = img;
    return 0;
    /* Yeah, i set output to be NULL if nothing received
     */
}
Ejemplo n.º 6
0
	EVideoDecoderReadState VideoDecoderVPX::readNextFrame( float & _pts )
	{	
		const mkvparser::Block * block  = m_blockEntry->GetBlock();

		const int frameCount = block->GetFrameCount();

		for (int i = 0; i < frameCount; ++i)
		{
			const mkvparser::Block::Frame & frame = block->GetFrame( i );

			m_frameSize = frame.len;
			//const long long offset = frame.pos;

			m_frame = (unsigned char *)realloc( m_frame, m_frameSize );

			frame.Read( &m_reader, m_frame );

			vpx_codec_err_t err_decode = vpx_codec_decode( &m_codec, m_frame, m_frameSize, NULL, 0);

			if( err_decode != VPX_CODEC_OK )
			{
				return VDRS_FAILURE;
			}
		}

		m_track->GetNext( m_blockEntry, m_blockEntry );
        
        m_pts += m_dataInfo.getFrameTiming();

        return VDRS_SUCCESS;
    }
Ejemplo n.º 7
0
int main(int argc, char **argv) {
  int frame_cnt = 0;
  FILE *outfile = NULL;
  vpx_codec_ctx_t codec;
  VpxVideoReader *reader = NULL;
  const VpxVideoInfo *info = NULL;
  const VpxInterface *decoder = NULL;

  exec_name = argv[0];

  if (argc != 3)
    die("Invalid number of arguments.");

  reader = vpx_video_reader_open(argv[1]);
  if (!reader)
    die("Failed to open %s for reading.", argv[1]);

  if (!(outfile = fopen(argv[2], "wb")))
    die("Failed to open %s for writing.", argv[2]);

  info = vpx_video_reader_get_info(reader);

  decoder = get_vpx_decoder_by_fourcc(info->codec_fourcc);
  if (!decoder)
    die("Unknown input codec.");

  printf("Using %s\n", vpx_codec_iface_name(decoder->codec_interface()));

  if (vpx_codec_dec_init(&codec, decoder->codec_interface(), NULL, 0))
    die_codec(&codec, "Failed to initialize decoder");

  while (vpx_video_reader_read_frame(reader)) {
    vpx_codec_iter_t iter = NULL;
    vpx_image_t *img = NULL;
    size_t frame_size = 0;
    const unsigned char *frame = vpx_video_reader_get_frame(reader,
                                                            &frame_size);
    if (vpx_codec_decode(&codec, frame, (unsigned int)frame_size, NULL, 0))
      die_codec(&codec, "Failed to decode frame");

    while ((img = vpx_codec_get_frame(&codec, &iter)) != NULL) {
      unsigned char digest[16];

      get_image_md5(img, digest);
      print_md5(outfile, digest);
      fprintf(outfile, "  img-%dx%d-%04d.i420\n",
              img->d_w, img->d_h, ++frame_cnt);
    }
  }

  printf("Processed %d frames.\n", frame_cnt);
  if (vpx_codec_destroy(&codec))
    die_codec(&codec, "Failed to destroy codec.");

  vpx_video_reader_close(reader);

  fclose(outfile);
  return EXIT_SUCCESS;
}
Ejemplo n.º 8
0
int main(int argc, char **argv) {
  int frame_cnt = 0;
  FILE *outfile = NULL;
  vpx_codec_ctx_t codec;
  VpxVideoReader *reader = NULL;
  const VpxInterface *decoder = NULL;
  const VpxVideoInfo *info = NULL;

  exec_name = argv[0];

  if (argc != 3)
    die("Invalid number of arguments.");

  reader = vpx_video_reader_open(argv[1]);
  if (!reader)
    die("Failed to open %s for reading.", argv[1]);

  if (!(outfile = fopen(argv[2], "wb")))
    die("Failed to open %s for writing.", argv[2]);

  info = vpx_video_reader_get_info(reader);

  decoder = get_vpx_decoder_by_fourcc(info->codec_fourcc);
  if (!decoder)
    die("Unknown input codec.");

  printf("Using %s\n", vpx_codec_iface_name(decoder->interface()));

  if (vpx_codec_dec_init(&codec, decoder->interface(), NULL, 0))
    die_codec(&codec, "Failed to initialize decoder.");

  while (vpx_video_reader_read_frame(reader)) {
    vpx_codec_iter_t iter = NULL;
    vpx_image_t *img = NULL;
    size_t frame_size = 0;
    const unsigned char *frame = vpx_video_reader_get_frame(reader,
                                                            &frame_size);
    if (vpx_codec_decode(&codec, frame, (unsigned int)frame_size, NULL, 0))
      die_codec(&codec, "Failed to decode frame.");

    while ((img = vpx_codec_get_frame(&codec, &iter)) != NULL) {
      vpx_img_write(img, outfile);
      ++frame_cnt;
    }
  }

  printf("Processed %d frames.\n", frame_cnt);
  if (vpx_codec_destroy(&codec))
    die_codec(&codec, "Failed to destroy codec");

  printf("Play: ffplay -f rawvideo -pix_fmt yuv420p -s %dx%d %s\n",
         info->frame_width, info->frame_height, argv[2]);

  vpx_video_reader_close(reader);

  fclose(outfile);

  return EXIT_SUCCESS;
}
Ejemplo n.º 9
0
int32_t kr_vpx_decode (krad_vpx_decoder_t *vpx,
                       kr_medium_t *medium,
                       kr_codeme_t *codeme) {

  if (vpx_codec_decode (&vpx->decoder, codeme->data, codeme->sz, 0, 0)) {
    printf ("Failed to decode %zu byte frame: %s\n",
              codeme->sz, vpx_codec_error (&vpx->decoder));
  }

  if (vpx->width == 0) {

    vpx_codec_get_stream_info (&vpx->decoder, &vpx->stream_info);

    printk ("VPX Stream Info: W:%d H:%d KF:%d\n",
            vpx->stream_info.w,
            vpx->stream_info.h, 
            vpx->stream_info.is_kf);

    vpx->width = vpx->stream_info.w;
    vpx->height = vpx->stream_info.h;
  }

  vpx->iter = NULL;
  vpx->img = vpx_codec_get_frame (&vpx->decoder, &vpx->iter);

  if (vpx->img != NULL) {
  
    medium->v.pps[0] = vpx->width;
    medium->v.pps[1] = vpx->width/2;  
    medium->v.pps[2] = vpx->width/2;
      
  //printf ("VPXDEC: s %u %u %u", vpx->img->stride[0], vpx->img->stride[1], vpx->img->stride[2]);
  //memcpy (medium->data, vpx->img->img_data, vpx->img->stride[0] * vpx->height + (vpx->img->stride[1] * vpx->height * 2));
  int r;
  for (r = 0; r< vpx->height; r++) {
    memcpy (medium->data + (r * vpx->width), vpx->img->planes[0] + (r * vpx->img->stride[0]), vpx->width);
  }
  for (r = 0; r< vpx->height/2; r++) {
    memcpy (medium->data + (vpx->width * (vpx->height)) + (r * vpx->width/2), vpx->img->planes[1] + (r * vpx->img->stride[1]), vpx->width / 2);
  }
  for (r = 0; r< vpx->height/2; r++) {
    memcpy (medium->data + (vpx->width * (vpx->height)) + ((vpx->width * (vpx->height))/4) + (r * vpx->width/2),
            vpx->img->planes[2] + (r * vpx->img->stride[2]), vpx->width / 2);
  }  

      //memcpy (medium->data + vpx->img->stride[0] * vpx->height, vpx->img->planes[1], vpx->img->stride[1] * vpx->height / 2);
      //memcpy (medium->data + (vpx->img->stride[0] * vpx->height) + (vpx->img->stride[1] * (vpx->height /2)), vpx->img->planes[2], vpx->img->stride[2] * vpx->height / 2);    

    medium->v.ppx[0] = medium->data;
    medium->v.ppx[1] = medium->data + vpx->width * (vpx->height);  
    medium->v.ppx[2] = medium->data + vpx->width * (vpx->height) + ((vpx->width * (vpx->height)) /4);


    return 1;
  }

  return 0;
}
Ejemplo n.º 10
0
RefPtr<MediaDataDecoder::DecodePromise>
VPXDecoder::ProcessDecode(MediaRawData* aSample)
{
  MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());

  if (vpx_codec_err_t r = vpx_codec_decode(&mVPX, aSample->Data(), aSample->Size(), nullptr, 0)) {
    LOG("VPX Decode error: %s", vpx_codec_err_to_string(r));
    return DecodePromise::CreateAndReject(
      MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
                  RESULT_DETAIL("VPX error: %s", vpx_codec_err_to_string(r))),
      __func__);
  }

  vpx_codec_iter_t iter = nullptr;
  vpx_image_t *img;
  vpx_image_t *img_alpha = nullptr;
  bool alpha_decoded = false;
  DecodedData results;

  while ((img = vpx_codec_get_frame(&mVPX, &iter))) {
    NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420 ||
                 img->fmt == VPX_IMG_FMT_I444,
                 "WebM image format not I420 or I444");
    NS_ASSERTION(!alpha_decoded,
                 "Multiple frames per packet that contains alpha");

    if (aSample->AlphaSize() > 0) {
      if (!alpha_decoded){
        MediaResult rv = DecodeAlpha(&img_alpha, aSample);
        if (NS_FAILED(rv)) {
          return DecodePromise::CreateAndReject(rv, __func__);
        }
        alpha_decoded = true;
      }
    }
    // Chroma shifts are rounded down as per the decoding examples in the SDK
    VideoData::YCbCrBuffer b;
    b.mPlanes[0].mData = img->planes[0];
    b.mPlanes[0].mStride = img->stride[0];
    b.mPlanes[0].mHeight = img->d_h;
    b.mPlanes[0].mWidth = img->d_w;
    b.mPlanes[0].mOffset = b.mPlanes[0].mSkip = 0;

    b.mPlanes[1].mData = img->planes[1];
    b.mPlanes[1].mStride = img->stride[1];
    b.mPlanes[1].mOffset = b.mPlanes[1].mSkip = 0;

    b.mPlanes[2].mData = img->planes[2];
    b.mPlanes[2].mStride = img->stride[2];
    b.mPlanes[2].mOffset = b.mPlanes[2].mSkip = 0;

    if (img->fmt == VPX_IMG_FMT_I420) {
      b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
      b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;

      b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
      b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
    } else if (img->fmt == VPX_IMG_FMT_I444) {
Ejemplo n.º 11
0
static int encode_frame(vpx_codec_ctx_t *ecodec,
                        vpx_codec_enc_cfg_t *cfg,
                        vpx_image_t *img,
                        unsigned int frame_in,
                        VpxVideoWriter *writer,
                        int test_decode,
                        vpx_codec_ctx_t *dcodec,
                        unsigned int *frame_out,
                        int *mismatch_seen) {
  int got_pkts = 0;
  vpx_codec_iter_t iter = NULL;
  const vpx_codec_cx_pkt_t *pkt = NULL;
  int got_data;
  const vpx_codec_err_t res = vpx_codec_encode(ecodec, img, frame_in, 1,
                                               0, VPX_DL_GOOD_QUALITY);
  if (res != VPX_CODEC_OK)
    die_codec(ecodec, "Failed to encode frame");

  got_data = 0;

  while ((pkt = vpx_codec_get_cx_data(ecodec, &iter)) != NULL) {
    got_pkts = 1;

    if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
      const int keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0;

      if (!(pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT)) {
                *frame_out += 1;
        }

      if (!vpx_video_writer_write_frame(writer,
                                        pkt->data.frame.buf,
                                        pkt->data.frame.sz,
                                        pkt->data.frame.pts)) {
        die_codec(ecodec, "Failed to write compressed frame");
      }
      printf(keyframe ? "K" : ".");
      fflush(stdout);
      got_data = 1;

      // Decode 1 frame.
      if (test_decode) {
        if (vpx_codec_decode(dcodec, pkt->data.frame.buf,
                             (unsigned int)pkt->data.frame.sz, NULL, 0))
          die_codec(dcodec, "Failed to decode frame.");
      }
    }
  }

  // Mismatch checking
  if (got_data && test_decode) {
    testing_decode(ecodec, dcodec, cfg, *frame_out, mismatch_seen);
  }

  return got_pkts;
}
Ejemplo n.º 12
0
void vc_iterate(VCSession *vc)
{
    if (!vc) {
        return;
    }

    pthread_mutex_lock(vc->queue_mutex);

    struct RTPMessage *p;

    if (!rb_read(vc->vbuf_raw, (void **)&p)) {
        LOGGER_TRACE(vc->log, "no Video frame data available");
        pthread_mutex_unlock(vc->queue_mutex);
        return;
    }

    pthread_mutex_unlock(vc->queue_mutex);
    const struct RTPHeader *const header = &p->header;

    uint32_t full_data_len;

    if (header->flags & RTP_LARGE_FRAME) {
        full_data_len = header->data_length_full;
        LOGGER_DEBUG(vc->log, "vc_iterate:001:full_data_len=%d", (int)full_data_len);
    } else {
        full_data_len = p->len;
        LOGGER_DEBUG(vc->log, "vc_iterate:002");
    }

    LOGGER_DEBUG(vc->log, "vc_iterate: rb_read p->len=%d p->header.xe=%d", (int)full_data_len, p->header.xe);
    LOGGER_DEBUG(vc->log, "vc_iterate: rb_read rb size=%d", (int)rb_size(vc->vbuf_raw));
    const vpx_codec_err_t rc = vpx_codec_decode(vc->decoder, p->data, full_data_len, nullptr, MAX_DECODE_TIME_US);
    free(p);

    if (rc != VPX_CODEC_OK) {
        LOGGER_ERROR(vc->log, "Error decoding video: %d %s", (int)rc, vpx_codec_err_to_string(rc));
        return;
    }

    /* Play decoded images */
    vpx_codec_iter_t iter = nullptr;

    for (vpx_image_t *dest = vpx_codec_get_frame(vc->decoder, &iter);
            dest != nullptr;
            dest = vpx_codec_get_frame(vc->decoder, &iter)) {
        if (vc->vcb) {
            vc->vcb(vc->av, vc->friend_number, dest->d_w, dest->d_h,
                    (const uint8_t *)dest->planes[0], (const uint8_t *)dest->planes[1], (const uint8_t *)dest->planes[2],
                    dest->stride[0], dest->stride[1], dest->stride[2], vc->vcb_user_data);
        }

        vpx_img_free(dest); // is this needed? none of the VPx examples show that
    }
}
Ejemplo n.º 13
0
JNIEXPORT jint Java_ryulib_VideoZip_VPX_DecodeBitmap(JNIEnv* env,
		jclass clazz, jint handle, jobject bitmap, jbyteArray buffer, jint bufferSize)
{
	int result = 0;

	RyuVPX *pHandle = (RyuVPX *) handle;

	jbyte *pByteBuffer = (*env)->GetByteArrayElements(env, buffer, 0);

	unsigned char *pFrame = (unsigned char *) pByteBuffer;
	int *pFrameSize;
	int frameSize = 0;
	int count = 0;
	vpx_image_t *img;
	vpx_codec_iter_t iter;

	while (count < bufferSize) {
		pFrameSize = (int *) pFrame;
		frameSize = *pFrameSize;
		pFrame = pFrame + sizeof(int);

		if (vpx_codec_decode(&pHandle->codec, (unsigned char*) pFrame, frameSize, NULL, 0)) {
		goto EXIT;
		}
		pFrame = pFrame + frameSize;

		count = count + sizeof(int) + frameSize;

		iter = NULL;
		while((img = vpx_codec_get_frame(&pHandle->codec, &iter))) {
			void *pixelBitmap;
			if (AndroidBitmap_lockPixels(env, bitmap, &pixelBitmap) >= 0) {
				I420ToARGB(
					(unsigned char *) img->planes[0], img->stride[0],
					(unsigned char *) img->planes[1], img->stride[1],
					(unsigned char *) img->planes[2], img->stride[2],
					(unsigned char *) pixelBitmap,
					img->d_w * _PixelSize,
					img->d_w, img->d_h
				);

				AndroidBitmap_unlockPixels(env, bitmap);
			}

			result = 1;
		}
	}

EXIT:
	(*env)->ReleaseByteArrayElements(env, buffer, pByteBuffer, 0);

	return result;
}
Ejemplo n.º 14
0
// Decodes any frame that was parsed by FFmpeg
static int
vpx_decode(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *pkt)
{
    Decoder * const decoder = avctx->opaque;
    AVFrame * const frame = data;
    const void *iter;
    struct vpx_image *img;
    vpx_codec_err_t err;
    int i;

    err = vpx_codec_decode(&decoder->decoder, pkt->data, pkt->size, NULL, 0);
    if (err != VPX_CODEC_OK)
        goto error_decode;

    iter = NULL;
    img = vpx_codec_get_frame(&decoder->decoder, &iter);
    if (img) {
        frame->format = ff_get_pix_fmt(img->fmt);
        if (frame->format == AV_PIX_FMT_NONE || frame->format != avctx->pix_fmt)
            goto error_unsupported_format;

        frame->width = img->d_w;
        frame->height = img->d_h;
        if (frame->width != avctx->width || frame->height != avctx->height) {
            avcodec_set_dimensions(avctx, frame->width, frame->height);
            if (frame->width != avctx->width || frame->height != avctx->height)
                goto error_size_change;
        }

        frame->interlaced_frame = 0;
        for (i = 0; i < 4; i++) {
            frame->data[i] = img->planes[i];
            frame->linesize[i] = img->stride[i];
        }
        *got_frame = 1;
    }
    return pkt->size;

    /* ERRORS */
error_decode:
    mvt_error("failed to decode frame: %s", vpx_codec_error(&decoder->decoder));
    return AVERROR_INVALIDDATA;
error_unsupported_format:
    mvt_error("unsupported image format (%d)", img->fmt);
    return AVERROR_INVALIDDATA;
error_size_change:
    mvt_error("failed to change dimensions (%dx%d)",
        frame->width, frame->height);
    return AVERROR_INVALIDDATA;
}
Ejemplo n.º 15
0
void VPXDecoder::decodeFrame(Graphics::Surface &surface, Common::SeekableReadStream &dataStream) {
	if (!_initialized)
		return;

	// Read all the data from the stream
	Common::ScopedArray<byte> data(new byte[dataStream.size()]);
	dataStream.read(data.get(), dataStream.size());

	// Perform the actual decode
	vpx_codec_err_t result = vpx_codec_decode(&_context, data.get(), dataStream.size(), 0, 0);
	if (result != VPX_CODEC_OK)
		return;

	// Try to get the image
	vpx_codec_iter_t iter = 0;
	vpx_image_t *image = vpx_codec_get_frame(&_context, &iter);
	if (!image)
		return;

	// Figure out the color range
	Graphics::YUVToRGBManager::LuminanceScale scale;
	switch (image->range) {
	case VPX_CR_STUDIO_RANGE:
		scale = Graphics::YUVToRGBManager::kScaleITU;
		break;
	case VPX_CR_FULL_RANGE:
		scale = Graphics::YUVToRGBManager::kScaleFull;
		break;
	default:
		return;
	}

	// If we don't have it already, create our local surface
	if (!_surface)
		_surface.reset(new Graphics::Surface(image->w, image->h));

	// Do the conversion based on the color space
	switch (image->fmt) {
	case VPX_IMG_FMT_I420:
		YUVToRGBMan.convert420(scale, _surface->getData(), _surface->getPitch(), image->planes[0], image->planes[1], image->planes[2], image->w, image->h, image->stride[0], image->stride[1]);
		break;
	default:
		return;
	}

	// Copy the subarea into the surface
	for (int y = 0; y < surface.getHeight(); y++)
		memcpy(surface.getData() + y * surface.getPitch(), _surface->getData() + (y * image->d_h) * image->d_w * 4, image->d_w * 4);
}
Ejemplo n.º 16
0
static int
x_vpx_decoder_decode(vpx_dec_ctx_t *_decoder, const char *buffer, size_t bufsiz)
  {
    vpx_image_t *img;
    unsigned char *buf;
    vpx_codec_iter_t iter = NULL;

    vpx_codec_decode(_decoder, buffer, bufsiz, 0, VPX_DL_REALTIME);
    while ((img = vpx_codec_get_frame(_decoder, &iter)))
      {
        printf("Decoded frame(fmt(%d)) bps=%d,%dx%d\n", img->fmt, img->bps,
            img->d_w, img->d_h);
        // img->img_data;
      }
    return 0;
  }
Ejemplo n.º 17
0
int
__decode_vp8_pkt(vpx_codec_ctx_t *_decoder, const uint8_t *buffer,
    unsigned int bufsiz, int outfd)
{
  vpx_image_t *img;
  vpx_codec_iter_t iter = NULL;

  vpx_codec_decode(_decoder, buffer, bufsiz, 0, VPX_DL_REALTIME);
  while ((img = vpx_codec_get_frame(_decoder, &iter)))
    {
      // printf("Decoded frame(fmt(0x%x)) bps=%d,%dx%d\n", img->fmt, img->bps,
      //  img->d_w, img->d_h);
      x_vp8_write_frame(outfd, img);
    }
  return 0;
}
Ejemplo n.º 18
0
static void decode_video(ToxAv *av, CallSpecific *call, DECODE_PACKET *p)
{
    int32_t call_index = call - av->calls;

    int rc = vpx_codec_decode(&call->cs->v_decoder, p->data, p->size, NULL, MAX_DECODE_TIME_US);

    if (rc != VPX_CODEC_OK) {
        LOGGER_ERROR("Error decoding video: %s\n", vpx_codec_err_to_string(rc));
    }

    vpx_codec_iter_t iter = NULL;
    vpx_image_t *img;
    img = vpx_codec_get_frame(&call->cs->v_decoder, &iter);

    if (img && av->video_callback) {
        av->video_callback(av, call_index, img, av->video_callback_userdata);
    } else {
        LOGGER_WARNING("Video packet dropped due to missing callback or no image!");
    }

    free(p);
}
/*
 * Method:    codec_decode
 */
JNIEXPORT jint JNICALL
Java_org_jitsi_impl_neomedia_codec_video_VPX_codec_1decode
    (JNIEnv *env,
     jclass clazz,
     jlong context,
     jbyteArray buf,
     jint buf_offset,
     jint buf_size,
     jlong user_priv,
     jlong deadline)
{
    jbyte *buf_ptr = (*env)->GetByteArrayElements(env, buf, NULL);

    vpx_codec_err_t ret;
    ret = vpx_codec_decode((vpx_codec_ctx_t *) (intptr_t) context,
                           (uint8_t *) (buf_ptr + buf_offset),
                           (unsigned int) buf_size,
                           (void *) (intptr_t) user_priv,
                           (long) deadline);
    (*env)->ReleaseByteArrayElements(env, buf, buf_ptr, JNI_ABORT);
    return (jint) ret;
}
Ejemplo n.º 20
0
/*
 
  Decode the given data. Note that on linux libvpx 
  will crash at this moment when the first partition 
  you pass into this function is not a key frame. 

 */
int rxs_decoder_decode(rxs_decoder* dec, uint8_t* buffer, uint32_t nbytes) {

  vpx_codec_iter_t iter = NULL;
  vpx_codec_err_t err;

  if (!dec) { return -1; } 
  if (!buffer) { return -2; } 
  if (!nbytes) { return -3; } 

  dec->img = NULL;

  err = vpx_codec_decode(&dec->ctx, buffer, nbytes, NULL, 0);
  if (err) {
    printf("Error: cannot decode buffer: %s\n", vpx_codec_err_to_string(err));
    return -4;
  }

  while ( (dec->img = vpx_codec_get_frame(&dec->ctx, &iter)) ) {
    dec->on_image(dec, dec->img);
  }

  return 0;
}
Ejemplo n.º 21
0
/**
    \fn uncompress
*/
bool    decoderVPX::uncompress (ADMCompressedImage * in, ADMImage * out)
{
    if (vpx_codec_decode(VPX, in->data, in->dataLength, NULL, 0) != VPX_CODEC_OK) 
    {
        ADM_warning("Error decoding VPX\n");
        return false;
    }
    struct vpx_image *img;
    const void *iter = NULL;
    img = vpx_codec_get_frame(VPX, &iter);
    if(img)
    {
            if (img->fmt != VPX_IMG_FMT_I420) 
            {
                ADM_warning("Wrong Colorspace\n");
                return false;
            }
            ADMImageRef    *r=out->castToRef();
            if(r)
            {
                    r->_planes[0]=img->planes[0];
                    r->_planes[1]=img->planes[1];
                    r->_planes[2]=img->planes[2];
                    r->_planeStride[0]=img->stride[0];
                    r->_planeStride[1]=img->stride[1];
                    r->_planeStride[2]=img->stride[2];
                    r->_colorspace=ADM_COLOR_YV12;
                    r->Pts=in->demuxerPts;
                    r->flags=in->flags;
                    return true;
            }
                
            ADM_warning("Only ref for VPX decoder\n");

    }
    return false;
}
Ejemplo n.º 22
0
uint8_t stream_parse_yuv(Stream *stream, uint8_t *gl_luma_buf, uint8_t *gl_chromaB_buf, uint8_t *gl_chromaR_buf, size_t *net_bytes_read) {
  *net_bytes_read = 0;

  if (vpx_video_stream_reader_read_frame(stream->net_buf, stream->net_buf_fill, stream->reader)) {
    size_t frame_size = 0;
    const unsigned char *frame = vpx_video_stream_reader_get_frame(stream->reader, &frame_size);
    *net_bytes_read = frame_size + 12;
    if (vpx_codec_decode(&stream->codec, frame, (unsigned int)frame_size, NULL, 16000))
      proc_warn("Failed to decode frame");

    vpx_image_t *img = NULL;
    stream->iter = NULL;
    while ((img = vpx_codec_get_frame(&stream->codec, &stream->iter)) != NULL) {
      size_t y;
      for (y = 0; y < img->d_h; y++) {
          memcpy(gl_luma_buf + y * img->d_w, img->planes[0] + y * img->stride[0], img->d_w);
          memcpy(gl_chromaB_buf + y/2 * img->d_w/2, img->planes[1] + y/2 * img->stride[1], img->d_w/2);
          memcpy(gl_chromaR_buf + y/2 * img->d_w/2, img->planes[2] + y/2 * img->stride[2], img->d_w/2);
      }
      return 0;
    }
  }
  return 1;
}
Ejemplo n.º 23
0
uint8_t stream_parse(Stream *stream, uint8_t *gl_rgb_buf, size_t *net_bytes_read) {
  *net_bytes_read = 0;
  if (vpx_video_stream_reader_read_frame(stream->net_buf, stream->net_buf_fill, stream->reader)) {
    size_t frame_size = 0;
    const unsigned char *frame = vpx_video_stream_reader_get_frame(stream->reader, &frame_size);
    *net_bytes_read = frame_size + 12;
    if (vpx_codec_decode(&stream->codec, frame, (unsigned int)frame_size, NULL, 0))
      proc_warn("Failed to decode frame");

    vpx_image_t *img = NULL;
    stream->iter = NULL;
    while ((img = vpx_codec_get_frame(&stream->codec, &stream->iter)) != NULL) {
      int status = ConvertFromI420(img->planes[0], img->stride[0],
        img->planes[2], img->stride[2],
        img->planes[1], img->stride[1],
        gl_rgb_buf, img->d_w * 3,
        img->d_w, img->d_h,
        FOURCC_24BG);
      // proc_info("I420ToRGB24 to status - %i", status);
      return 0;
    }
  }
  return 1;
}
Ejemplo n.º 24
0
static void dec_process(MSFilter *f) {
	mblk_t *im;
	DecState *s=(DecState*)f->data;

	while( (im=ms_queue_get(f->inputs[0]))!=0) {
		mblk_t *m;

		dec_unpacketize(f, s, im, &s->q);

		while((m=ms_queue_get(&s->q))!=NULL){
			vpx_codec_err_t err;
			vpx_codec_iter_t  iter = NULL;
			vpx_image_t *img;

			err = vpx_codec_decode(&s->codec, m->b_rptr, m->b_wptr - m->b_rptr, NULL, 0);
			if (err) {
				ms_warning("vpx_codec_decode failed : %d %s (%s)\n", err, vpx_codec_err_to_string(err), vpx_codec_error_detail(&s->codec));

				if ((f->ticker->time - s->last_error_reported_time)>5000 || s->last_error_reported_time==0) {
					s->last_error_reported_time=f->ticker->time;
					ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS);
				}
				if (s->first_image_decoded == FALSE) {
					/* if no frames have been decoded yet, do not try to browse decoded frames */
					freemsg(m);
					continue;
				}
			}


			/* browse decoded frames */
			while((img = vpx_codec_get_frame(&s->codec, &iter))) {
				int i,j;

				if (s->yuv_width != img->d_w || s->yuv_height != img->d_h) {
					if (s->yuv_msg)
						freemsg(s->yuv_msg);
					s->yuv_msg = ms_yuv_buf_alloc(&s->outbuf, img->d_w, img->d_h);
					s->yuv_width = img->d_w;
					s->yuv_height = img->d_h;
				}

				/* scale/copy frame to destination mblk_t */
				for(i=0; i<3; i++) {
					uint8_t* dest = s->outbuf.planes[i];
					uint8_t* src = img->planes[i];
					int h = img->d_h >> ((i>0)?1:0);

					for(j=0; j<h; j++) {
						memcpy(dest, src, s->outbuf.strides[i]);

						dest += s->outbuf.strides[i];
						src += img->stride[i];
					}
				}
				ms_queue_put(f->outputs[0], dupmsg(s->yuv_msg));

				if (ms_video_update_average_fps(&s->fps, f->ticker->time)) {
					ms_message("VP8 decoder: Frame size: %dx%d", s->yuv_width, s->yuv_height);
				}
				if (!s->first_image_decoded) {
					s->first_image_decoded = TRUE;
					ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
				}
			}
			freemsg(m);
		}
	}
}
Ejemplo n.º 25
0
      Dynamic read_frame(struct DecInputContext *input) {

        vpx_codec_iter_t iter = NULL;
        vpx_image_t *img;

        uint8_t *buf = NULL;
        size_t bytes_in_buffer = 0;
        size_t buffer_size = 0;
        int status = webm_read_frame(input->webm_ctx, &buf, &bytes_in_buffer, &buffer_size);
        if(status == 0) {
          if(vpx_codec_decode(input->decoder, buf, (unsigned int)bytes_in_buffer, NULL, 0)) {
            warn("Failed to decode frame: %s", vpx_codec_error(input->decoder));
          }
        } else {
          warn("EOF/Error %d", status);
        }

        if(status == 0) {
          img = vpx_codec_get_frame(input->decoder, &iter);
        } else {
          img = NULL;
        }

        /*
        {
          status:Int, (0 = success, 1 = EOF, -1 = Error)
          data:BytesData, // image bytes
          width:Int,
          height:Int,
          comp:Int, // RGBA = 4
        }
        */
        int width = 0;
        int height = 0;
        int comp = 4;
        unsigned char* rgba;


        hx::Anon result = hx::Anon_obj::Create();
        result->Add(HX_CSTRING("status"), status);

        if(img != NULL) {
          width = img->d_w;
          height = img->d_h;
          rgba = (unsigned char*)malloc(sizeof(unsigned char) * width * height * comp);
          if(img->fmt == VPX_IMG_FMT_I420) {
            ::libyuv::I420ToABGR(
              img->planes[VPX_PLANE_Y],img->stride[VPX_PLANE_Y],
              img->planes[VPX_PLANE_U],img->stride[VPX_PLANE_U],
              img->planes[VPX_PLANE_V],img->stride[VPX_PLANE_V],
              rgba, width * comp, width, height
            );

            ImgBytesData data = to_haxe_bytes(rgba, width * height * comp);
            result->Add(HX_CSTRING("data"), data);

          } else {
            warn("Img format unknown");
          }
        } else {
          warn("No image decoded!");
        }

        result->Add(HX_CSTRING("width"), width);
        result->Add(HX_CSTRING("height"), height);
        result->Add(HX_CSTRING("comp"), comp);


        return result;

      }
Ejemplo n.º 26
0
int main(int argc, char **argv) {
  int frame_cnt = 0;
  FILE *outfile = NULL;
  vpx_codec_ctx_t codec;
  const VpxInterface *decoder = NULL;
  VpxVideoReader *reader = NULL;
  const VpxVideoInfo *info = NULL;
  int n = 0;
  int m = 0;
  int is_range = 0;
  char *nptr = NULL;

  exec_name = argv[0];

  if (argc != 4)
    die("Invalid number of arguments.");

  reader = vpx_video_reader_open(argv[1]);
  if (!reader)
    die("Failed to open %s for reading.", argv[1]);

  if (!(outfile = fopen(argv[2], "wb")))
    die("Failed to open %s for writing.", argv[2]);

  n = strtol(argv[3], &nptr, 0);
  m = strtol(nptr + 1, NULL, 0);
  is_range = (*nptr == '-');
  if (!n || !m || (*nptr != '-' && *nptr != '/'))
    die("Couldn't parse pattern %s.\n", argv[3]);

  info = vpx_video_reader_get_info(reader);

  decoder = get_vpx_decoder_by_fourcc(info->codec_fourcc);
  if (!decoder)
    die("Unknown input codec.");

  printf("Using %s\n", vpx_codec_iface_name(decoder->interface()));

  if (vpx_codec_dec_init(&codec, decoder->interface(), NULL, 0))
    die_codec(&codec, "Failed to initialize decoder.");

  while (vpx_video_reader_read_frame(reader)) {
    vpx_codec_iter_t iter = NULL;
    vpx_image_t *img = NULL;
    size_t frame_size = 0;
    int skip;
    const unsigned char *frame = vpx_video_reader_get_frame(reader,
                                                            &frame_size);
    if (vpx_codec_decode(&codec, frame, (unsigned int)frame_size, NULL, 0))
      die_codec(&codec, "Failed to decode frame.");

    ++frame_cnt;

    skip = (is_range && frame_cnt >= n && frame_cnt <= m) ||
           (!is_range && m - (frame_cnt - 1) % m <= n);

    if (!skip) {
      putc('.', stdout);

      while ((img = vpx_codec_get_frame(&codec, &iter)) != NULL)
        vpx_img_write(img, outfile);
    } else {
      putc('X', stdout);
    }

    fflush(stdout);
  }

  printf("Processed %d frames.\n", frame_cnt);
  if (vpx_codec_destroy(&codec))
    die_codec(&codec, "Failed to destroy codec.");

  printf("Play: ffplay -f rawvideo -pix_fmt yuv420p -s %dx%d %s\n",
         info->frame_width, info->frame_height, argv[2]);

  vpx_video_reader_close(reader);
  fclose(outfile);

  return EXIT_SUCCESS;
}
Ejemplo n.º 27
0
/****************************************************************************
 * Decode: the whole thing
 ****************************************************************************/
static picture_t *Decode(decoder_t *dec, block_t **pp_block)
{
    struct vpx_codec_ctx *ctx = &dec->p_sys->ctx;

    block_t *block = *pp_block;
    if (!block)
        return NULL;

    if (block->i_flags & (BLOCK_FLAG_DISCONTINUITY|BLOCK_FLAG_CORRUPTED))
        return NULL;

    /* Associate packet PTS with decoded frame */
    mtime_t *pkt_pts = malloc(sizeof(*pkt_pts));
    if (!pkt_pts) {
        block_Release(block);
        *pp_block = NULL;
        return NULL;
    }

    *pkt_pts = block->i_pts;

    vpx_codec_err_t err;
    err = vpx_codec_decode(ctx, block->p_buffer, block->i_buffer, pkt_pts, 0);

    block_Release(block);
    *pp_block = NULL;

    if (err != VPX_CODEC_OK) {
        free(pkt_pts);
        const char *error  = vpx_codec_error(ctx);
        const char *detail = vpx_codec_error_detail(ctx);
        if (!detail)
            detail = "no specific information";
        msg_Err(dec, "Failed to decode frame: %s (%s)", error, detail);
        return NULL;
    }

    const void *iter = NULL;
    struct vpx_image *img = vpx_codec_get_frame(ctx, &iter);
    if (!img) {
        free(pkt_pts);
        return NULL;
    }

    /* fetches back the PTS */
    pkt_pts = img->user_priv;
    mtime_t pts = *pkt_pts;
    free(pkt_pts);

    if (img->fmt != VPX_IMG_FMT_I420) {
        msg_Err(dec, "Unsupported output colorspace %d", img->fmt);
        return NULL;
    }

    video_format_t *v = &dec->fmt_out.video;

    if (img->d_w != v->i_visible_width || img->d_h != v->i_visible_height) {
        v->i_visible_width = img->d_w;
        v->i_visible_height = img->d_h;
    }

    picture_t *pic = decoder_NewPicture(dec);
    if (!pic)
        return NULL;

    for (int plane = 0; plane < pic->i_planes; plane++ ) {
        uint8_t *src = img->planes[plane];
        uint8_t *dst = pic->p[plane].p_pixels;
        int src_stride = img->stride[plane];
        int dst_stride = pic->p[plane].i_pitch;

        int size = __MIN( src_stride, dst_stride );
        for( int line = 0; line < pic->p[plane].i_visible_lines; line++ ) {
            memcpy( dst, src, size );
            src += src_stride;
            dst += dst_stride;
        }
    }

    pic->b_progressive = true; /* codec does not support interlacing */
    pic->date = pts;

    return pic;
}
Ejemplo n.º 28
0
int main_loop(int argc, const char **argv_) {
  vpx_codec_ctx_t       decoder;
  char                  *fn = NULL;
  int                    i;
  uint8_t               *buf = NULL;
  size_t                 bytes_in_buffer = 0, buffer_size = 0;
  FILE                  *infile;
  int                    frame_in = 0, frame_out = 0, flipuv = 0, noblit = 0;
  int                    do_md5 = 0, progress = 0;
  int                    stop_after = 0, postproc = 0, summary = 0, quiet = 1;
  int                    arg_skip = 0;
  int                    ec_enabled = 0;
  const VpxInterface *interface = NULL;
  const VpxInterface *fourcc_interface = NULL;
  uint64_t dx_time = 0;
  struct arg               arg;
  char                   **argv, **argi, **argj;

  int                     single_file;
  int                     use_y4m = 1;
  vpx_codec_dec_cfg_t     cfg = {0};
#if CONFIG_VP8_DECODER
  vp8_postproc_cfg_t      vp8_pp_cfg = {0};
  int                     vp8_dbg_color_ref_frame = 0;
  int                     vp8_dbg_color_mb_modes = 0;
  int                     vp8_dbg_color_b_modes = 0;
  int                     vp8_dbg_display_mv = 0;
#endif
  int                     frames_corrupted = 0;
  int                     dec_flags = 0;
  int                     do_scale = 0;
  vpx_image_t             *scaled_img = NULL;
  int                     frame_avail, got_data;
  int                     num_external_frame_buffers = 0;
  struct ExternalFrameBufferList ext_fb_list = {0};

  const char *outfile_pattern = NULL;
  char outfile_name[PATH_MAX] = {0};
  FILE *outfile = NULL;

  MD5Context md5_ctx;
  unsigned char md5_digest[16];

  struct VpxDecInputContext input = {0};
  struct VpxInputContext vpx_input_ctx = {0};
  struct WebmInputContext webm_ctx = {0};
  input.vpx_input_ctx = &vpx_input_ctx;
  input.webm_ctx = &webm_ctx;

  /* Parse command line */
  exec_name = argv_[0];
  argv = argv_dup(argc - 1, argv_ + 1);

  for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) {
    memset(&arg, 0, sizeof(arg));
    arg.argv_step = 1;

    if (arg_match(&arg, &codecarg, argi)) {
      interface = get_vpx_decoder_by_name(arg.val);
      if (!interface)
        die("Error: Unrecognized argument (%s) to --codec\n", arg.val);
    } else if (arg_match(&arg, &looparg, argi)) {
      // no-op
    } else if (arg_match(&arg, &outputfile, argi))
      outfile_pattern = arg.val;
    else if (arg_match(&arg, &use_yv12, argi)) {
      use_y4m = 0;
      flipuv = 1;
    } else if (arg_match(&arg, &use_i420, argi)) {
      use_y4m = 0;
      flipuv = 0;
    } else if (arg_match(&arg, &flipuvarg, argi))
      flipuv = 1;
    else if (arg_match(&arg, &noblitarg, argi))
      noblit = 1;
    else if (arg_match(&arg, &progressarg, argi))
      progress = 1;
    else if (arg_match(&arg, &limitarg, argi))
      stop_after = arg_parse_uint(&arg);
    else if (arg_match(&arg, &skiparg, argi))
      arg_skip = arg_parse_uint(&arg);
    else if (arg_match(&arg, &postprocarg, argi))
      postproc = 1;
    else if (arg_match(&arg, &md5arg, argi))
      do_md5 = 1;
    else if (arg_match(&arg, &summaryarg, argi))
      summary = 1;
    else if (arg_match(&arg, &threadsarg, argi))
      cfg.threads = arg_parse_uint(&arg);
    else if (arg_match(&arg, &verbosearg, argi))
      quiet = 0;
    else if (arg_match(&arg, &scalearg, argi))
      do_scale = 1;
    else if (arg_match(&arg, &fb_arg, argi))
      num_external_frame_buffers = arg_parse_uint(&arg);

#if CONFIG_VP8_DECODER
    else if (arg_match(&arg, &addnoise_level, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_ADDNOISE;
      vp8_pp_cfg.noise_level = arg_parse_uint(&arg);
    } else if (arg_match(&arg, &demacroblock_level, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_DEMACROBLOCK;
      vp8_pp_cfg.deblocking_level = arg_parse_uint(&arg);
    } else if (arg_match(&arg, &deblock, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_DEBLOCK;
    } else if (arg_match(&arg, &mfqe, argi)) {
      postproc = 1;
      vp8_pp_cfg.post_proc_flag |= VP8_MFQE;
    } else if (arg_match(&arg, &pp_debug_info, argi)) {
      unsigned int level = arg_parse_uint(&arg);

      postproc = 1;
      vp8_pp_cfg.post_proc_flag &= ~0x7;

      if (level)
        vp8_pp_cfg.post_proc_flag |= level;
    } else if (arg_match(&arg, &pp_disp_ref_frame, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_color_ref_frame = flags;
      }
    } else if (arg_match(&arg, &pp_disp_mb_modes, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_color_mb_modes = flags;
      }
    } else if (arg_match(&arg, &pp_disp_b_modes, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_color_b_modes = flags;
      }
    } else if (arg_match(&arg, &pp_disp_mvs, argi)) {
      unsigned int flags = arg_parse_int(&arg);
      if (flags) {
        postproc = 1;
        vp8_dbg_display_mv = flags;
      }
    } else if (arg_match(&arg, &error_concealment, argi)) {
      ec_enabled = 1;
    }

#endif
    else
      argj++;
  }

  /* Check for unrecognized options */
  for (argi = argv; *argi; argi++)
    if (argi[0][0] == '-' && strlen(argi[0]) > 1)
      die("Error: Unrecognized option %s\n", *argi);

  /* Handle non-option arguments */
  fn = argv[0];

  if (!fn)
    usage_exit();

  /* Open file */
  infile = strcmp(fn, "-") ? fopen(fn, "rb") : set_binary_mode(stdin);

  if (!infile) {
    fprintf(stderr, "Failed to open file '%s'", strcmp(fn, "-") ? fn : "stdin");
    return EXIT_FAILURE;
  }
#if CONFIG_OS_SUPPORT
  /* Make sure we don't dump to the terminal, unless forced to with -o - */
  if (!outfile_pattern && isatty(fileno(stdout)) && !do_md5 && !noblit) {
    fprintf(stderr,
            "Not dumping raw video to your terminal. Use '-o -' to "
            "override.\n");
    return EXIT_FAILURE;
  }
#endif
  input.vpx_input_ctx->file = infile;
  if (file_is_ivf(input.vpx_input_ctx))
    input.vpx_input_ctx->file_type = FILE_TYPE_IVF;
#if CONFIG_WEBM_IO
  else if (file_is_webm(input.webm_ctx, input.vpx_input_ctx))
    input.vpx_input_ctx->file_type = FILE_TYPE_WEBM;
#endif
  else if (file_is_raw(input.vpx_input_ctx))
    input.vpx_input_ctx->file_type = FILE_TYPE_RAW;
  else {
    fprintf(stderr, "Unrecognized input file type.\n");
#if !CONFIG_WEBM_IO
    fprintf(stderr, "vpxdec was built without WebM container support.\n");
#endif
    return EXIT_FAILURE;
  }

  outfile_pattern = outfile_pattern ? outfile_pattern : "-";
  single_file = is_single_file(outfile_pattern);

  if (!noblit && single_file) {
    generate_filename(outfile_pattern, outfile_name, PATH_MAX,
                      vpx_input_ctx.width, vpx_input_ctx.height, 0);
    if (do_md5)
      MD5Init(&md5_ctx);
    else
      outfile = open_outfile(outfile_name);
  }

  if (use_y4m && !noblit) {
    if (!single_file) {
      fprintf(stderr, "YUV4MPEG2 not supported with output patterns,"
              " try --i420 or --yv12.\n");
      return EXIT_FAILURE;
    }

#if CONFIG_WEBM_IO
    if (vpx_input_ctx.file_type == FILE_TYPE_WEBM) {
      if (webm_guess_framerate(input.webm_ctx, input.vpx_input_ctx)) {
        fprintf(stderr, "Failed to guess framerate -- error parsing "
                "webm file?\n");
        return EXIT_FAILURE;
      }
    }
#endif
  }

  fourcc_interface = get_vpx_decoder_by_fourcc(vpx_input_ctx.fourcc);
  if (interface && fourcc_interface && interface != fourcc_interface)
    warn("Header indicates codec: %s\n", fourcc_interface->name);
  else
    interface = fourcc_interface;

  if (!interface)
    interface = get_vpx_decoder_by_index(0);

  dec_flags = (postproc ? VPX_CODEC_USE_POSTPROC : 0) |
              (ec_enabled ? VPX_CODEC_USE_ERROR_CONCEALMENT : 0);
  if (vpx_codec_dec_init(&decoder, interface->interface(), &cfg, dec_flags)) {
    fprintf(stderr, "Failed to initialize decoder: %s\n",
            vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (!quiet)
    fprintf(stderr, "%s\n", decoder.name);

#if CONFIG_VP8_DECODER

  if (vp8_pp_cfg.post_proc_flag
      && vpx_codec_control(&decoder, VP8_SET_POSTPROC, &vp8_pp_cfg)) {
    fprintf(stderr, "Failed to configure postproc: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_color_ref_frame
      && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_REF_FRAME, vp8_dbg_color_ref_frame)) {
    fprintf(stderr, "Failed to configure reference block visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_color_mb_modes
      && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_MB_MODES, vp8_dbg_color_mb_modes)) {
    fprintf(stderr, "Failed to configure macro block visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_color_b_modes
      && vpx_codec_control(&decoder, VP8_SET_DBG_COLOR_B_MODES, vp8_dbg_color_b_modes)) {
    fprintf(stderr, "Failed to configure block visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (vp8_dbg_display_mv
      && vpx_codec_control(&decoder, VP8_SET_DBG_DISPLAY_MV, vp8_dbg_display_mv)) {
    fprintf(stderr, "Failed to configure motion vector visualizer: %s\n", vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }
#endif


  if (arg_skip)
    fprintf(stderr, "Skipping first %d frames.\n", arg_skip);
  while (arg_skip) {
    if (read_frame(&input, &buf, &bytes_in_buffer, &buffer_size))
      break;
    arg_skip--;
  }

  if (num_external_frame_buffers > 0) {
    ext_fb_list.num_external_frame_buffers = num_external_frame_buffers;
    ext_fb_list.ext_fb = (struct ExternalFrameBuffer *)calloc(
        num_external_frame_buffers, sizeof(*ext_fb_list.ext_fb));
    if (vpx_codec_set_frame_buffer_functions(
            &decoder, get_vp9_frame_buffer, release_vp9_frame_buffer,
            &ext_fb_list)) {
      fprintf(stderr, "Failed to configure external frame buffers: %s\n",
              vpx_codec_error(&decoder));
      return EXIT_FAILURE;
    }
  }

  frame_avail = 1;
  got_data = 0;

  /* Decode file */
  while (frame_avail || got_data) {
    vpx_codec_iter_t  iter = NULL;
    vpx_image_t    *img;
    struct vpx_usec_timer timer;
    int                   corrupted;

    frame_avail = 0;
    if (!stop_after || frame_in < stop_after) {
      if (!read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) {
        frame_avail = 1;
        frame_in++;

        vpx_usec_timer_start(&timer);

        if (vpx_codec_decode(&decoder, buf, (unsigned int)bytes_in_buffer,
                             NULL, 0)) {
          const char *detail = vpx_codec_error_detail(&decoder);
          warn("Failed to decode frame %d: %s",
               frame_in, vpx_codec_error(&decoder));

          if (detail)
            warn("Additional information: %s", detail);
          goto fail;
        }

        vpx_usec_timer_mark(&timer);
        dx_time += vpx_usec_timer_elapsed(&timer);
      }
    }

    vpx_usec_timer_start(&timer);

    got_data = 0;
    if ((img = vpx_codec_get_frame(&decoder, &iter))) {
      ++frame_out;
      got_data = 1;
    }

    vpx_usec_timer_mark(&timer);
    dx_time += (unsigned int)vpx_usec_timer_elapsed(&timer);

    if (vpx_codec_control(&decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted)) {
      warn("Failed VP8_GET_FRAME_CORRUPTED: %s", vpx_codec_error(&decoder));
      goto fail;
    }
    frames_corrupted += corrupted;

    if (progress)
      show_progress(frame_in, frame_out, dx_time);

    if (!noblit && img) {
      const int PLANES_YUV[] = {VPX_PLANE_Y, VPX_PLANE_U, VPX_PLANE_V};
      const int PLANES_YVU[] = {VPX_PLANE_Y, VPX_PLANE_V, VPX_PLANE_U};
      const int *planes = flipuv ? PLANES_YVU : PLANES_YUV;

      if (do_scale) {
        if (frame_out == 1) {
          // If the output frames are to be scaled to a fixed display size then
          // use the width and height specified in the container. If either of
          // these is set to 0, use the display size set in the first frame
          // header. If that is unavailable, use the raw decoded size of the
          // first decoded frame.
          int display_width = vpx_input_ctx.width;
          int display_height = vpx_input_ctx.height;
          if (!display_width || !display_height) {
            int display_size[2];
            if (vpx_codec_control(&decoder, VP9D_GET_DISPLAY_SIZE,
                                  display_size)) {
              // As last resort use size of first frame as display size.
              display_width = img->d_w;
              display_height = img->d_h;
            } else {
              display_width = display_size[0];
              display_height = display_size[1];
            }
          }
          scaled_img = vpx_img_alloc(NULL, VPX_IMG_FMT_I420, display_width,
                                     display_height, 16);
        }

        if (img->d_w != scaled_img->d_w || img->d_h != scaled_img->d_h) {
          vpx_image_scale(img, scaled_img, kFilterBox);
          img = scaled_img;
        }
      }

      if (single_file) {
        if (use_y4m) {
          char buf[Y4M_BUFFER_SIZE] = {0};
          size_t len = 0;
          if (frame_out == 1) {
            // Y4M file header
            len = y4m_write_file_header(buf, sizeof(buf),
                                        vpx_input_ctx.width,
                                        vpx_input_ctx.height,
                                        &vpx_input_ctx.framerate, img->fmt);
            if (do_md5) {
              MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len);
            } else {
              fputs(buf, outfile);
            }
          }

          // Y4M frame header
          len = y4m_write_frame_header(buf, sizeof(buf));
          if (do_md5) {
            MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len);
          } else {
            fputs(buf, outfile);
          }
        }

        if (do_md5) {
          update_image_md5(img, planes, &md5_ctx);
        } else {
          write_image_file(img, planes, outfile);
        }
      } else {
        generate_filename(outfile_pattern, outfile_name, PATH_MAX,
                          img->d_w, img->d_h, frame_in);
        if (do_md5) {
          MD5Init(&md5_ctx);
          update_image_md5(img, planes, &md5_ctx);
          MD5Final(md5_digest, &md5_ctx);
          print_md5(md5_digest, outfile_name);
        } else {
          outfile = open_outfile(outfile_name);
          write_image_file(img, planes, outfile);
          fclose(outfile);
        }
      }
    }

    if (stop_after && frame_in >= stop_after)
      break;
  }

  if (summary || progress) {
    show_progress(frame_in, frame_out, dx_time);
    fprintf(stderr, "\n");
  }

  if (frames_corrupted)
    fprintf(stderr, "WARNING: %d frames corrupted.\n", frames_corrupted);

fail:

  if (vpx_codec_destroy(&decoder)) {
    fprintf(stderr, "Failed to destroy decoder: %s\n",
            vpx_codec_error(&decoder));
    return EXIT_FAILURE;
  }

  if (!noblit && single_file) {
    if (do_md5) {
      MD5Final(md5_digest, &md5_ctx);
      print_md5(md5_digest, outfile_name);
    } else {
      fclose(outfile);
    }
  }

#if CONFIG_WEBM_IO
  if (input.vpx_input_ctx->file_type == FILE_TYPE_WEBM)
    webm_free(input.webm_ctx);
#endif

  if (input.vpx_input_ctx->file_type != FILE_TYPE_WEBM)
    free(buf);

  if (scaled_img) vpx_img_free(scaled_img);

  for (i = 0; i < ext_fb_list.num_external_frame_buffers; ++i) {
    free(ext_fb_list.ext_fb[i].data);
  }
  free(ext_fb_list.ext_fb);

  fclose(infile);
  free(argv);

  return frames_corrupted ? EXIT_FAILURE : EXIT_SUCCESS;
}
Ejemplo n.º 29
0
int main(int argc, char **argv) {
  FILE *infile, *outfile;
  vpx_codec_ctx_t codec;
  vpx_codec_iface_t *iface;
  int flags = 0, frame_cnt = 0;
  unsigned char file_hdr[IVF_FILE_HDR_SZ];
  unsigned char frame_hdr[IVF_FRAME_HDR_SZ];
  unsigned char frame[256 * 1024];

  if (argc != 3)
    die("Usage: %s <infile> <outfile>\n", argv[0]);

  if (!(infile = fopen(argv[1], "rb")))
    die("Failed to open %s for reading", argv[1]);

  if (!(outfile = fopen(argv[2], "wb")))
    die("Failed to open %s for writing", argv[2]);

  if (!(fread(file_hdr, 1, IVF_FILE_HDR_SZ, infile) == IVF_FILE_HDR_SZ &&
     file_hdr[0] == 'D' && file_hdr[1] == 'K' &&
     file_hdr[2] == 'I' && file_hdr[3] == 'F'))
    die("%s is not an IVF file.", argv[1]);

  iface = get_codec_interface(mem_get_le32(file_hdr + 8));
  if (!iface)
    die("Unknown FOURCC code.");


  printf("Using %s\n", vpx_codec_iface_name(iface));

  if (vpx_codec_dec_init(&codec, iface, NULL, flags))
    die_codec(&codec, "Failed to initialize decoder");

  while (fread(frame_hdr, 1, IVF_FRAME_HDR_SZ, infile) == IVF_FRAME_HDR_SZ) {
    const int frame_size = mem_get_le32(frame_hdr);
    vpx_codec_iter_t iter = NULL;
    vpx_image_t *img;

    if (frame_size > sizeof(frame))
      die("Frame %d data too big for example code buffer", frame_size);

    if (fread(frame, 1, frame_size, infile) != frame_size)
      die("Failed to read complete frame");

    if (vpx_codec_decode(&codec, frame, frame_size, NULL, 0))
      die_codec(&codec, "Failed to decode frame");

    while ((img = vpx_codec_get_frame(&codec, &iter)) != NULL) {
      unsigned char digest[16];

      get_image_md5(img, digest);
      print_md5(outfile, digest);
      fprintf(outfile, "  img-%dx%d-%04d.i420\n",
              img->d_w, img->d_h, ++frame_cnt);
    }
  }

  printf("Processed %d frames.\n", frame_cnt);
  if (vpx_codec_destroy(&codec))
    die_codec(&codec, "Failed to destroy codec");

  fclose(outfile);
  fclose(infile);
  return EXIT_SUCCESS;
}
Ejemplo n.º 30
0
void cs_do(CSSession *cs)
{
    /* Codec session should always be protected by call mutex so no need to check for cs validity
     */

    if (!cs) return;

    Payload *p;
    int rc;

    int success = 0;

    pthread_mutex_lock(cs->queue_mutex);
    RTPMessage *msg;

    while ((msg = jbuf_read(cs->j_buf, &success)) || success == 2) {
        pthread_mutex_unlock(cs->queue_mutex);

        uint16_t fsize = ((cs->audio_decoder_sample_rate * cs->audio_decoder_frame_duration) / 1000);
        int16_t tmp[fsize * cs->audio_decoder_channels];

        if (success == 2) {
            rc = opus_decode(cs->audio_decoder, 0, 0, tmp, fsize, 1);
        } else {
            rc = opus_decode(cs->audio_decoder, msg->data, msg->length, tmp, fsize, 0);
            rtp_free_msg(NULL, msg);
        }

        if (rc < 0) {
            LOGGER_WARNING("Decoding error: %s", opus_strerror(rc));
        } else if (cs->acb.first) {
            /* Play */
            cs->acb.first(cs->agent, cs->call_idx, tmp, rc, cs->acb.second);
        }

        pthread_mutex_lock(cs->queue_mutex);
    }

    if (cs->vbuf_raw && !buffer_empty(cs->vbuf_raw)) {
        /* Decode video */
        buffer_read(cs->vbuf_raw, &p);

        /* Leave space for (possibly) other thread to queue more data after we read it here */
        pthread_mutex_unlock(cs->queue_mutex);

        rc = vpx_codec_decode(&cs->v_decoder, p->data, p->size, NULL, MAX_DECODE_TIME_US);
        free(p);

        if (rc != VPX_CODEC_OK) {
            LOGGER_ERROR("Error decoding video: %s", vpx_codec_err_to_string(rc));
        } else {
            vpx_codec_iter_t iter = NULL;
            vpx_image_t *dest = vpx_codec_get_frame(&cs->v_decoder, &iter);

            /* Play decoded images */
            for (; dest; dest = vpx_codec_get_frame(&cs->v_decoder, &iter)) {
                if (cs->vcb.first)
                    cs->vcb.first(cs->agent, cs->call_idx, dest, cs->vcb.second);

                vpx_img_free(dest);
            }
        }

        return;
    }

    pthread_mutex_unlock(cs->queue_mutex);
}