Beispiel #1
0
void dx_copy_pixels(GF_VideoSurface *dst_s, const GF_VideoSurface *src_s, const GF_Window *src_wnd)
{
	/*handle YUV input*/
	if (get_yuv_base(src_s->pixel_format)==GF_PIXEL_YV12) {
		if (format_is_yuv(dst_s->pixel_format)) {
			/*generic YV planar to YUV (planar or not) */
			write_yv12_to_yuv(dst_s, src_s->video_buffer, src_s->pitch_y, src_s->pixel_format, src_s->width, src_s->height, src_wnd, src_s->u_ptr, src_s->v_ptr);
			return;
		}
	} else if (get_yuv_base(src_s->pixel_format)==GF_PIXEL_YV12_10) {
		if (format_is_yuv(dst_s->pixel_format)) {
			/*generic YV planar to YUV (planar or not) */
			gf_color_write_yv12_10_to_yuv(dst_s, src_s->video_buffer, src_s->u_ptr, src_s->v_ptr, src_s->pitch_y, src_s->width, src_s->height, src_wnd);
			return;
		}
	} else if (format_is_yuv(src_s->pixel_format)) {
		if (format_is_yuv(dst_s->pixel_format)) {
			write_yvyu_to_yuv(dst_s, src_s->video_buffer, src_s->pitch_y, src_s->pixel_format, src_s->width, src_s->height, src_wnd);
			return;
		}
	} else {
		switch (dst_s->pixel_format) {
		case GF_PIXEL_RGB_555:
			rgb_to_555(dst_s, src_s->video_buffer, src_s->pitch_y, src_s->width, src_s->height, src_s->pixel_format, src_wnd);
			return;
		case GF_PIXEL_RGB_565:
			rgb_to_565(dst_s, src_s->video_buffer, src_s->pitch_y, src_s->width, src_s->height, src_s->pixel_format, src_wnd);
			return;
		case GF_PIXEL_RGB_24:
		case GF_PIXEL_RGBS:
		case GF_PIXEL_BGR_24:
			rgb_to_24(dst_s, src_s->video_buffer, src_s->pitch_y, src_s->width, src_s->height, src_s->pixel_format, src_wnd);
			return;
		case GF_PIXEL_RGB_32:
		case GF_PIXEL_RGBD:
		case GF_PIXEL_RGBDS:
		case GF_PIXEL_BGR_32:
			rgb_to_32(dst_s, src_s->video_buffer, src_s->pitch_y, src_s->width, src_s->height, src_s->pixel_format, src_wnd);
			return;
		}
	}

	gf_stretch_bits(dst_s, (GF_VideoSurface*) src_s, NULL, (GF_Window *)src_wnd, 0xFF, 0, NULL, NULL);
}
Beispiel #2
0
static GF_Err HEVC_flush_picture(HEVCDec *ctx, char *outBuffer, u32 *outBufferLength, u32 *CTS)
{
	unsigned int a_w, a_h, a_stride, bit_depth;
	OpenHevc_Frame_cpy openHevcFrame;
	int        chromat_format;

	if (ctx->direct_output)
		libOpenHevcGetPictureInfo(ctx->openHevcHandle, &openHevcFrame.frameInfo);
	else
		libOpenHevcGetPictureInfoCpy(ctx->openHevcHandle, &openHevcFrame.frameInfo);

	a_w      = openHevcFrame.frameInfo.nWidth;
	a_h      = openHevcFrame.frameInfo.nHeight;
	a_stride = openHevcFrame.frameInfo.nYPitch;
	bit_depth = openHevcFrame.frameInfo.nBitDepth;
   chromat_format = openHevcFrame.frameInfo.chromat_format;
	*CTS = (u32) openHevcFrame.frameInfo.nTimeStamp;
     ctx->conv_to_8bit = GF_FALSE;
	if (!ctx->output_as_8bit) {
		if ((ctx->luma_bpp>8) || (ctx->chroma_bpp>8)) {
			ctx->pack_mode = GF_FALSE;
		}
	} else {
		if (bit_depth>8) {
			bit_depth=8;
			ctx->conv_to_8bit = GF_TRUE;
			a_stride /= 2;
			ctx->pack_mode = GF_FALSE;
		}
	}

	if ((ctx->width != a_w) || (ctx->height!=a_h) || (ctx->stride != a_stride) || (ctx->luma_bpp!= bit_depth)  || (ctx->chroma_bpp != bit_depth) || (ctx->chroma_format_idc != (chromat_format + 1)) ){
		ctx->width = a_w;
		ctx->stride = a_stride;
		ctx->height = a_h;
		if( chromat_format == YUV420 ) {
		ctx->out_size = ctx->stride * a_w * 3 / 2;
		}
		else if  ( chromat_format == YUV422 ) {
			ctx->out_size = ctx->stride * a_w * 2;
		}
		else if ( chromat_format == YUV444 ) {
			ctx->out_size = ctx->stride * a_w * 3;
		} 
		ctx->had_pic = GF_TRUE;
		ctx->luma_bpp = ctx->chroma_bpp = bit_depth;
		ctx->chroma_format_idc = chromat_format + 1;
		/*always force layer resize*/
		*outBufferLength = ctx->out_size;

		if (ctx->conv_to_8bit && ctx->direct_output) {
			ctx->conv_buffer = (char*)gf_realloc(ctx->conv_buffer, sizeof(char)*ctx->out_size);
		}
		return GF_BUFFER_TOO_SMALL;
	}
	if (!ctx->conv_to_8bit && ctx->direct_output) {
		*outBufferLength = ctx->out_size;
		ctx->has_pic = GF_TRUE;
		return GF_OK;
	}

	if (ctx->conv_to_8bit) {
		OpenHevc_Frame openHevcFramePtr;
		if (libOpenHevcGetOutput(ctx->openHevcHandle, 1, &openHevcFramePtr)) {
			GF_VideoSurface dst;
			memset(&dst, 0, sizeof(GF_VideoSurface));
			dst.width = ctx->width;
			dst.height = ctx->height;
			dst.pitch_y = ctx->width;
			dst.video_buffer = ctx->direct_output ? ctx->conv_buffer : outBuffer;
			if( chromat_format == YUV444 ) 
			{
					gf_color_write_yuv444_10_to_yuv444(&dst, (u8 *) openHevcFramePtr.pvY, (u8 *) openHevcFramePtr.pvU, (u8 *) openHevcFramePtr.pvV, openHevcFramePtr.frameInfo.nYPitch, ctx->width, ctx->height, NULL, GF_FALSE);
			}
			else if ( chromat_format == YUV420 )
			{
					gf_color_write_yv12_10_to_yuv(&dst, (u8 *) openHevcFramePtr.pvY, (u8 *) openHevcFramePtr.pvU, (u8 *) openHevcFramePtr.pvV, openHevcFramePtr.frameInfo.nYPitch, ctx->width, ctx->height, NULL, GF_FALSE);
			}
			else if (chromat_format == YUV422)
			{
				gf_color_write_yuv422_10_to_yuv422(&dst, (u8 *) openHevcFramePtr.pvY, (u8 *) openHevcFramePtr.pvU, (u8 *) openHevcFramePtr.pvV, openHevcFramePtr.frameInfo.nYPitch, ctx->width, ctx->height, NULL, GF_FALSE);
			}
			else
			{
				return GF_NOT_SUPPORTED;
			}
			*outBufferLength = ctx->out_size;

			if (ctx->direct_output )
			 	ctx->has_pic = GF_TRUE;
		}
		return GF_OK;
	}

	if (ctx->pack_mode) {
		OpenHevc_Frame openHFrame;
		u8 *pY, *pU, *pV;

		u32 idx_w, idx_h;
		idx_w = ((ctx->frame_idx==0) || (ctx->frame_idx==2)) ? 0 : ctx->width;
		idx_h = ((ctx->frame_idx==0) || (ctx->frame_idx==1)) ? 0 : ctx->height*2*ctx->stride;

		pY = (u8*) (outBuffer + idx_h + idx_w );
		pU = (u8*) (outBuffer + 2*ctx->stride*2*ctx->height + idx_w/2 +  idx_h/4);
		pV = (u8*) (outBuffer + 2*ctx->stride*2*ctx->height + ctx->stride*ctx->height + idx_w/2 + idx_h/4);


		*outBufferLength = 0;
		if (libOpenHevcGetOutput(ctx->openHevcHandle, 1, &openHFrame)) {
			u32 i, s_stride, qs_stride, d_stride, dd_stride, hd_stride;

			s_stride = openHFrame.frameInfo.nYPitch;
			qs_stride = s_stride / 4;

			d_stride = ctx->stride;
			dd_stride = 2*ctx->stride;
			hd_stride = ctx->stride/2;

			for (i=0; i<ctx->height; i++) {
				memcpy(pY,  (u8 *) openHFrame.pvY + i*s_stride, d_stride);
				pY += dd_stride;

				if (! (i%2) ) {
					memcpy(pU,  (u8 *) openHFrame.pvU + i*qs_stride, hd_stride);
					pU += d_stride;

					memcpy(pV,  (u8 *) openHFrame.pvV + i*qs_stride, hd_stride);
					pV += d_stride;
				}
			}

			ctx->frame_idx++;
			if (ctx->frame_idx==4) {
				*outBufferLength = 4 * ctx->out_size;
				ctx->frame_idx = 0;
			}
		}
		return GF_OK;
	}


	openHevcFrame.pvY = (void*) outBuffer;
	openHevcFrame.pvU = (void*) (outBuffer + ctx->stride * ctx->height);
		if( chromat_format == YUV420 ) 
		{
			openHevcFrame.pvV = (void*) (outBuffer + 5*ctx->stride * ctx->height/4);
		}
		else if (chromat_format == YUV422)
		{
			openHevcFrame.pvV = (void*) (outBuffer + 3*ctx->stride * ctx->height/2);
		}
		else if ( chromat_format == YUV444)
		{
			openHevcFrame.pvV = (void*) (outBuffer + 2*ctx->stride * ctx->height);
		}
	*outBufferLength = 0;
	if (libOpenHevcGetOutputCpy(ctx->openHevcHandle, 1, &openHevcFrame)) {
		*outBufferLength = ctx->out_size;
	}
	return GF_OK;
}