Esempio n. 1
0
static void h264_dec_output_cb(VTH264DecCtx *ctx, void *sourceFrameRefCon,
							   OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef imageBuffer,
							   CMTime presentationTimeStamp, CMTime presentationDuration ) {

	CGSize vsize;
	MSPicture pixbuf_desc;
	mblk_t *pixbuf = NULL;
	uint8_t *src_planes[4] = { NULL };
	int src_strides[4] = { 0 };
	size_t i;

	if(status != noErr || imageBuffer == NULL) {
		ms_error("VideoToolboxDecoder: fail to decode one frame: error %d", status);
		ms_filter_notify_no_arg(ctx->f, MS_VIDEO_DECODER_DECODING_ERRORS);
		ms_filter_lock(ctx->f);
		if(ctx->enable_avpf) {
			ms_error("VideoToolboxDecoder: sending PLI");
			ms_filter_notify_no_arg(ctx->f, MS_VIDEO_DECODER_SEND_PLI);
		}
		ms_filter_unlock(ctx->f);
		return;
	}

	vsize = CVImageBufferGetEncodedSize(imageBuffer);
	ctx->vsize.width = (int)vsize.width;
	ctx->vsize.height = (int)vsize.height;
	pixbuf = ms_yuv_buf_allocator_get(ctx->pixbuf_allocator, &pixbuf_desc, (int)vsize.width, (int)vsize.height);

	CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
	for(i=0; i<3; i++) {
		src_planes[i] = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i);
		src_strides[i] = (int)CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i);
	}
	ms_yuv_buf_copy(src_planes, src_strides, pixbuf_desc.planes, pixbuf_desc.strides, ctx->vsize);
	CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);

	ms_mutex_lock(&ctx->mutex);
	ms_queue_put(&ctx->queue, pixbuf);
	ms_mutex_unlock(&ctx->mutex);
}
Esempio n. 2
0
/* Destination and source images may have their dimensions inverted.*/
mblk_t *copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(MSYuvBufAllocator *allocator, const uint8_t* y, const uint8_t * cbcr, int rotation, int w, int h, int y_byte_per_row,int cbcr_byte_per_row, bool_t uFirstvSecond, bool_t down_scale) {
	MSPicture pict;
	int uv_w;
	int uv_h;
	const uint8_t* ysrc;
	uint8_t* ydst;
	const uint8_t* uvsrc;
	const uint8_t* srcu;
	uint8_t* dstu;
	const uint8_t* srcv;
	uint8_t* dstv;

	mblk_t *yuv_block = ms_yuv_buf_allocator_get(allocator, &pict, w, h);

#ifdef ANDROID
	if (hasNeon == -1) {
		hasNeon = (android_getCpuFamily() == ANDROID_CPU_FAMILY_ARM && (android_getCpuFeatures() & ANDROID_CPU_ARM_FEATURE_NEON) != 0);
	#ifdef __arm64__
		ms_warning("Warning: ARM64 NEON routines for video rotation are not yes implemented for Android: using SOFT version!");
	#endif
	}
#endif


#if MS_HAS_ARM
	if (down_scale && !hasNeon) {
		ms_error("down scaling by two requires NEON, returning empty block");
		return yuv_block;
	}
#endif

	if (!uFirstvSecond) {
		unsigned char* tmp = pict.planes[1];
		pict.planes[1] = pict.planes[2];
		pict.planes[2] = tmp;
	}

	uv_w = w/2;
	uv_h = h/2;

	if (rotation % 180 == 0) {
		int i,j;
		uint8_t* u_dest=pict.planes[1], *v_dest=pict.planes[2];

		if (rotation == 0) {
#if MS_HAS_ARM
			if (hasNeon) {
				deinterlace_down_scale_neon(y, cbcr, pict.planes[0], u_dest, v_dest, w, h, y_byte_per_row, cbcr_byte_per_row,down_scale);
			} else
#endif
			{
				// plain copy
				for(i=0; i<h; i++) {
					memcpy(&pict.planes[0][i*w], &y[i*y_byte_per_row], w);
				}
				// de-interlace u/v
				for (i=0; i<uv_h; i++) {
					for(j=0; j<uv_w; j++) {
						*u_dest++ = cbcr[cbcr_byte_per_row*i + 2*j];
						*v_dest++ = cbcr[cbcr_byte_per_row*i + 2*j + 1];
					}
				}
			}
		} else {
#if defined(__arm__)
			if (hasNeon) {
				deinterlace_down_scale_and_rotate_180_neon(y, cbcr, pict.planes[0], u_dest, v_dest, w, h, y_byte_per_row, cbcr_byte_per_row,down_scale);
			} else
#endif
{
				// 180° y rotation
				ysrc=y;
				ydst=&pict.planes[0][h*w-1];
				for(i=0; i<h*w; i++) {
					*ydst-- = *ysrc++;
				}
				// 180° rotation + de-interlace u/v
				uvsrc=&cbcr[uv_h*uv_w*2-2];
				for (i=0; i<uv_h*uv_w; i++) {
					*u_dest++ = *uvsrc--;
					*v_dest++ = *uvsrc--;
				}
			}
		}
	} else {
		bool_t clockwise = rotation == 90 ? TRUE : FALSE;
		// Rotate Y
#if defined(__arm__)
		if (hasNeon) {
			if (clockwise) {
				rotate_down_scale_plane_neon_clockwise(w,h,y_byte_per_row,(uint8_t*)y,pict.planes[0],down_scale);
			} else {
				rotate_down_scale_plane_neon_anticlockwise(w,h,y_byte_per_row,(uint8_t*)y,pict.planes[0], down_scale);
			}
		} else
#endif
{
			uint8_t* dsty = pict.planes[0];
			uint8_t* srcy = (uint8_t*) y;
			rotate_plane(w,h,y_byte_per_row,srcy,dsty,1, clockwise);
		}

#if defined(__arm__)
		if (hasNeon) {
			rotate_down_scale_cbcr_to_cr_cb(uv_w,uv_h, cbcr_byte_per_row/2, (uint8_t*)cbcr, pict.planes[2], pict.planes[1],clockwise,down_scale);
		} else
#endif
{
			// Copying U
			srcu = cbcr;
			dstu = pict.planes[1];
			rotate_plane(uv_w,uv_h,cbcr_byte_per_row/2,srcu,dstu, 2, clockwise);
			// Copying V
			srcv = srcu + 1;
			dstv = pict.planes[2];
			rotate_plane(uv_w,uv_h,cbcr_byte_per_row/2,srcv,dstv, 2, clockwise);
		}
	}

	return yuv_block;
}
static void dec_process(MSFilter *f){
	DecData *d=(DecData*)f->data;
	MSPicture pic = {0};
	mblk_t *im,*om = NULL;
	ssize_t oBufidx = -1;
	size_t bufsize;
	bool_t need_reinit=FALSE;
	bool_t request_pli=FALSE;
	MSQueue nalus;
	AMediaCodecBufferInfo info;
	
	ms_queue_init(&nalus);

	while((im=ms_queue_get(f->inputs[0]))!=NULL){
		if (d->packet_num==0 && d->sps && d->pps){
			mblk_set_timestamp_info(d->sps,mblk_get_timestamp_info(im));
			mblk_set_timestamp_info(d->pps,mblk_get_timestamp_info(im));
			rfc3984_unpack(&d->unpacker, d->sps, &nalus);
            rfc3984_unpack(&d->unpacker, d->pps, &nalus);
			d->sps=NULL;
			d->pps=NULL;
		}

		if(rfc3984_unpack(&d->unpacker,im,&nalus) <0){
			request_pli=TRUE;
		}
		if (!ms_queue_empty(&nalus)){
			int size;
			uint8_t *buf=NULL;
			ssize_t iBufidx;

			size=nalusToFrame(d,&nalus,&need_reinit);

			if (need_reinit) {
				//In case of rotation, the decoder needs to flushed in order to restart with the new video size
				AMediaCodec_flush(d->codec);
				d->first_buffer_queued = FALSE;
			}

			/*First put our H264 bitstream into the decoder*/
			iBufidx = AMediaCodec_dequeueInputBuffer(d->codec, TIMEOUT_US);
			if (iBufidx >= 0) {
				buf = AMediaCodec_getInputBuffer(d->codec, iBufidx, &bufsize);
				if(buf == NULL) {
					ms_error("MSMediaCodecH264Dec: AMediaCodec_getInputBuffer() returned NULL");
					break;
				}
				if((size_t)size > bufsize) {
					ms_error("Cannot copy the bitstream into the input buffer size : %i and bufsize %i",size,(int) bufsize);
					break;
				} else {
					struct timespec ts;
					clock_gettime(CLOCK_MONOTONIC, &ts);
					memcpy(buf,d->bitstream,(size_t)size);
					AMediaCodec_queueInputBuffer(d->codec, iBufidx, 0, (size_t)size, (ts.tv_nsec/1000) + 10000LL, 0);
					d->first_buffer_queued = TRUE;
				}
			}else if (iBufidx == AMEDIA_ERROR_UNKNOWN){
				ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueInputBuffer() had an exception");
			}
		}
		d->packet_num++;
		if (d->sps && d->pps) request_pli = FALSE;
		else request_pli = TRUE;
	}
	
	/*secondly try to get decoded frames from the decoder, this is performed every tick*/
	while (d->first_buffer_queued && (oBufidx = AMediaCodec_dequeueOutputBuffer(d->codec, &info, TIMEOUT_US)) >= 0){
		AMediaFormat *format;
		int width = 0, height = 0, color = 0;
		uint8_t *buf = AMediaCodec_getOutputBuffer(d->codec, oBufidx, &bufsize);
		
		if(buf == NULL){
			ms_filter_notify_no_arg(f,MS_VIDEO_DECODER_DECODING_ERRORS);
			ms_error("MSMediaCodecH264Dec: AMediaCodec_getOutputBuffer() returned NULL");
		}

		format = AMediaCodec_getOutputFormat(d->codec);
		if(format != NULL){
			AMediaFormat_getInt32(format, "width", &width);
			AMediaFormat_getInt32(format, "height", &height);
			AMediaFormat_getInt32(format, "color-format", &color);

			d->vsize.width=width;
			d->vsize.height=height;
			AMediaFormat_delete(format);
		}

		if(buf != NULL && d->sps && d->pps){ /*some decoders output garbage while no sps or pps have been received yet !*/
			if(width != 0 && height != 0 ){
				if(color == 19) {
					//YUV
					int ysize = width*height;
					int usize = ysize/4;
					om = ms_yuv_buf_allocator_get(d->buf_allocator,&pic,width,height);
					memcpy(pic.planes[0],buf,ysize);
					memcpy(pic.planes[1],buf+ysize,usize);
					memcpy(pic.planes[2],buf+ysize+usize,usize);
				} else {
					uint8_t* cbcr_src = (uint8_t*) (buf + width * height);
					om = copy_ycbcrbiplanar_to_true_yuv_with_rotation_and_down_scale_by_2(d->buf_allocator, buf, cbcr_src, 0, width, height, width, width, TRUE, FALSE);
				}

				if (!d->first_image_decoded) {
					ms_message("First frame decoded %ix%i",width,height);
					d->first_image_decoded = true;
					ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_FIRST_IMAGE_DECODED);
				}
				ms_queue_put(f->outputs[0], om);
			}else{
				ms_error("MSMediaCodecH264Dec: width and height are not known !");
			}
		}
		AMediaCodec_releaseOutputBuffer(d->codec, oBufidx, FALSE);
	}
	if (oBufidx == AMEDIA_ERROR_UNKNOWN){
		ms_error("MSMediaCodecH264Dec: AMediaCodec_dequeueOutputBuffer() had an exception");
	}

	if (d->avpf_enabled && request_pli) {
    	ms_filter_notify_no_arg(f, MS_VIDEO_DECODER_SEND_PLI);
    }
    ms_queue_flush(f->inputs[0]);
}
Esempio n. 4
0
static mblk_t * pixconv_alloc_mblk(PixConvState *s){
	return ms_yuv_buf_allocator_get(s->allocator, &s->outbuf, s->size.width,s->size.height);
}