Пример #1
0
static void display_exit()
{
#ifdef CSC_USE_IPU
	close(fd_ipu);
#else
	g2d_close(ghandler);
#endif
	close(fd_out);
}
Пример #2
0
static void exynos_drm_postclose(struct drm_device *dev, struct drm_file *file)
{
	g2d_close(dev, file);
	kfree(file->driver_priv);
	file->driver_priv = NULL;
}
Пример #3
0
static void draw_image_to_framebuffer(struct g2d_buf *buf, int img_width, int img_height, int img_format, 
		 struct fb_var_screeninfo *screen_info, int left, int top, int to_width, int to_height, int set_alpha, int rotation)
{
	int i;
	struct g2d_surface src,dst;
	void *g2dHandle;

	if ( ( (left+to_width) > (int)screen_info->xres ) || ( (top+to_height) > (int)screen_info->yres ) )  {
		printf("Bad display image dimensions!\n");
		return;
	}

#if CACHEABLE
        g2d_cache_op(buf, G2D_CACHE_FLUSH);
#endif

	if(g2d_open(&g2dHandle) == -1 || g2dHandle == NULL) {
		printf("Fail to open g2d device!\n");
		g2d_free(buf);
		return;
	}

/*
	NOTE: in this example, all the test image data meet with the alignment requirement.
	Thus, in your code, you need to pay attention on that.

	Pixel buffer address alignment requirement,
	RGB/BGR:  pixel data in planes [0] with 16bytes alignment,
	NV12/NV16:  Y in planes [0], UV in planes [1], with 64bytes alignment,
	I420:    Y in planes [0], U in planes [1], V in planes [2], with 64 bytes alignment,
	YV12:  Y in planes [0], V in planes [1], U in planes [2], with 64 bytes alignment,
	NV21/NV61:  Y in planes [0], VU in planes [1], with 64bytes alignment,
	YUYV/YVYU/UYVY/VYUY:  in planes[0], buffer address is with 16bytes alignment.
*/

	src.format = img_format;
	switch (src.format) {
	case G2D_RGB565:
	case G2D_RGBA8888:
	case G2D_RGBX8888:
	case G2D_BGRA8888:
	case G2D_BGRX8888:
	case G2D_BGR565:
	case G2D_YUYV:
	case G2D_UYVY:
		src.planes[0] = buf->buf_paddr;
		break;
	case G2D_NV12:
		src.planes[0] = buf->buf_paddr;
		src.planes[1] = buf->buf_paddr + img_width * img_height;
		break;
	case G2D_I420:
		src.planes[0] = buf->buf_paddr;
		src.planes[1] = buf->buf_paddr + img_width * img_height;
		src.planes[2] = src.planes[1]  + img_width * img_height / 4;
		break;
	case G2D_NV16:
		src.planes[0] = buf->buf_paddr;
		src.planes[1] = buf->buf_paddr + img_width * img_height;
                break;
	default:
		printf("Unsupport image format in the example code\n");
		return;
	}

	src.left = 0;
	src.top = 0;
	src.right = img_width;
	src.bottom = img_height;
	src.stride = img_width;
	src.width  = img_width;
	src.height = img_height;
	src.rot  = G2D_ROTATION_0;

	dst.planes[0] = g_fb_phys;
	dst.left = left;
	dst.top = top;
	dst.right = left + to_width;
	dst.bottom = top + to_height;
	dst.stride = screen_info->xres;
	dst.width  = screen_info->xres;
	dst.height = screen_info->yres;
	dst.rot    = rotation;
	dst.format = screen_info->bits_per_pixel == 16 ? G2D_RGB565 : (screen_info->red.offset == 0 ? G2D_RGBA8888 : G2D_BGRA8888);

	if (set_alpha)
	{
		src.blendfunc = G2D_ONE;
		dst.blendfunc = G2D_ONE_MINUS_SRC_ALPHA;
	
		src.global_alpha = 0x80;
		dst.global_alpha = 0xff;
	
		g2d_enable(g2dHandle, G2D_BLEND);
		g2d_enable(g2dHandle, G2D_GLOBAL_ALPHA);
	}

	g2d_blit(g2dHandle, &src, &dst);
	g2d_finish(g2dHandle);

	if (set_alpha)
	{
		g2d_disable(g2dHandle, G2D_GLOBAL_ALPHA);
		g2d_disable(g2dHandle, G2D_BLEND);
	}

	g2d_close(g2dHandle);
}
Пример #4
0
JNIEXPORT void JNICALL Java_com_example_enzocamtest_CamView_loadNextFrame(JNIEnv* env,
		jobject thiz, jobject bitmap)
{
	AndroidBitmapInfo info;
	int result;

	if((result = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
		err_msg("AndroidBitmap_getInfo() failed, error=%d", result);
		return;
	}

	if(info.format != ANDROID_BITMAP_FORMAT_RGB_565) {
		err_msg("Bitmap format is not RGBA_565 !");
		return;
	}

	char *colors;
	if((result = AndroidBitmap_lockPixels(env, bitmap, (void*)&colors)) < 0) {
		err_msg("AndroidBitmap_lockPixels() failed, error=%d", result);
	}

	//info_msg("Getting camera frame...\n");
	result = cameraGetFrame(usbCam, camData);
	if (result < 0) {
		err_msg("Could not get camera frame\n");
	}

	//info_msg("Decoding camera frame...\n");
	result = decoderDecodeFrame(mjpgDec, camData, yuvData);
	if (result < 0) {
		err_msg("Could not decode MJPG frame\n");
	}

	if(g2d_open(&g2d_handle)) {
		err_msg("Encoder: g2d_open fail.\n");
		return;
	}

	int y_size = info.width * info.height;

	y422_buf->buf_paddr = (unsigned char *)yuvData->pBufOut;
	y422_buf->buf_vaddr = (unsigned char *)yuvData->vBufOut;

	g2d_copy(g2d_handle, y420_buf, y422_buf, y_size);
	g2d_finish(g2d_handle);

	unsigned char *u_src;
	unsigned char *u_dst;
	int i = 0;

	u_src = y422_buf->buf_vaddr + y_size;
	u_dst = y420_buf->buf_vaddr + y_size;
	while (i < info.height) {
		memcpy(u_dst, u_src, info.width/2);
		u_dst += info.width/2;
		u_src += info.width;
		i++;
	}

	//info_msg("Converting frame to RGB565...\n");
	g2d_blit(g2d_handle, &y420_surf, &rgb_surf);
	g2d_finish(g2d_handle);
	g2d_close(g2d_handle);

	//info_msg("Copy RGB frame to bitmap...\n");
	memcpy(colors, rgb_buf->buf_vaddr, info.width * info.height * 2);

	AndroidBitmap_unlockPixels(env, bitmap);
}