Пример #1
0
int GP_ReadJP2Ex(GP_IO *io, GP_Context **rimg, GP_DataStorage *storage,
                 GP_ProgressCallback *callback)
{
	opj_dparameters_t params;
	opj_codec_t *codec;
	opj_stream_t *stream;
	opj_image_t *img;

	GP_PixelType pixel_type;
	GP_Context *res = NULL;
	unsigned int i, x, y;
	int err = 0, ret = 1;

	opj_set_default_decoder_parameters(&params);

	codec = opj_create_decompress(OPJ_CODEC_JP2);

	if (!codec) {
		GP_DEBUG(1, "opj_create_decompress failed");
		err = ENOMEM;
		goto err0;
	}

	opj_set_error_handler(codec, jp2_err_callback, NULL);
	opj_set_warning_handler(codec, jp2_warn_callback, NULL);
	opj_set_info_handler(codec, jp2_info_callback, callback);

	if (!opj_setup_decoder(codec, &params)) {
		GP_DEBUG(1, "opj_setup_decoder failed");
		err = ENOMEM;
		goto err1;
	}

	stream = opj_stream_default_create(OPJ_TRUE);

	if (!stream) {
		GP_DEBUG(1, "opj_stream_create_default_file_stream faled");
		err = ENOMEM;
		goto err1;
	}

	//TODO: Do we need seek and skip?
	opj_stream_set_read_function(stream, jp2_io_read);
	opj_stream_set_user_data(stream, io);

	if (!opj_read_header(stream, codec, &img)) {
		GP_DEBUG(1, "opj_read_header failed");
		err = EINVAL;
		goto err2;
	}

	if (storage)
		fill_metadata(img, storage);

	GP_DEBUG(1, "Have image %ux%u-%ux%u colorspace=%s numcomps=%u",
	         img->x0, img->y0, img->x1, img->y1,
	         color_space_name(img->color_space), img->numcomps);

	if (!rimg)
		return 0;

	/*
	 * Try to match the image information into pixel type.
	 *
	 * Unfortunately the images I had have color_space set
	 * to unspecified yet they were RGB888.
	 */
	for (i = 0; i < img->numcomps; i++) {
		opj_image_comp_t *comp = &img->comps[i];

		GP_DEBUG(2, "Component %u %ux%u bpp=%u",
		         i, comp->w, comp->h, comp->prec);

		if (comp->w != img->comps[0].w ||
		    comp->h != img->comps[0].h) {
			GP_DEBUG(1, "Component %u has different size", 1);
			err = ENOSYS;
			goto err3;
		}

		if (comp->prec != 8) {
			GP_DEBUG(1, "Component %u has different bpp", 1);
			err = ENOSYS;
			goto err3;
		}
	}

	switch (img->color_space) {
	case OPJ_CLRSPC_UNSPECIFIED:
		if (img->numcomps != 3) {
			GP_DEBUG(1, "Unexpected number of components");
			err = ENOSYS;
			goto err3;
		}
		pixel_type = GP_PIXEL_RGB888;
	break;
	default:
		GP_DEBUG(1, "Unsupported colorspace");
		err = ENOSYS;
		goto err3;
	}

	GP_ProgressCallbackReport(callback, 0, 100, 100);

	if (!opj_decode(codec, stream, img)) {
		GP_DEBUG(1, "opj_decode failed");
		err = EINVAL;
		goto err3;
	}

	res = GP_ContextAlloc(img->comps[0].w, img->comps[0].h, pixel_type);

	if (!res) {
		GP_DEBUG(1, "Malloc failed :(");
		err = ENOMEM;
		goto err3;
	}

	for (y = 0; y < res->h; y++) {
		for (x = 0; x < res->w; x++) {
			i = y * res->w + x;

			GP_Pixel p = img->comps[0].data[i] << 16|
			             img->comps[1].data[i] << 8 |
			             img->comps[2].data[i];

			GP_PutPixel_Raw_24BPP(res, x, y, p);
		}
	}

	GP_ProgressCallbackDone(callback);
	*rimg = res;
	ret = 0;
err3:
	opj_image_destroy(img);
err2:
	opj_stream_destroy(stream);
err1:
	opj_destroy_codec(codec);
err0:
	if (err)
		errno = err;
	return ret;
}
Пример #2
0
/*
 * Apply sobel operator.
 */
static int sobel(const GP_Context *src, GP_Context *dx, GP_Context *dy,
                   GP_ProgressCallback *callback)
{
	float dx_kern[] = {
		-1,  0,  1,
		-2,  0,  2,
		-1,  0,  1,
	};

	GP_ConvolutionParams dx_conv = {
		.src = src,
		.x_src = 0,
		.y_src = 0,
		.w_src = src->w,
		.h_src = src->h,
		.dst = dx,
		.x_dst = 0,
		.y_dst = 0,
		.kernel = dx_kern,
		.kw = 3,
		.kh = 3,
		.kern_div = 1,
		.callback = callback,
	};

	if (GP_FilterConvolution_Raw(&dx_conv))
		return 1;

	float dy_kern[] = {
		-1, -2, -1,
		 0,  0,  0,
		 1,  2,  1,
	};

	GP_ConvolutionParams dy_conv = {
		.src = src,
		.x_src = 0,
		.y_src = 0,
		.w_src = src->w,
		.h_src = src->h,
		.dst = dy,
		.x_dst = 0,
		.y_dst = 0,
		.kernel = dy_kern,
		.kw = 3,
		.kh = 3,
		.kern_div = 1,
		.callback = callback,
	};

	if (GP_FilterConvolution_Raw(&dy_conv))
		return 1;

	return 0;
}

static int edge_detect(const GP_Context *src,
                       GP_Context **E, GP_Context **Phi, int type,
		       GP_ProgressCallback *callback)
{
	//TODO
	GP_ASSERT(src->pixel_type == GP_PIXEL_RGB888);

	GP_Context *dx, *dy;

	dx = GP_ContextCopy(src, 0);
	dy = GP_ContextCopy(src, 0);

	if (dx == NULL || dy == NULL)
		goto err0;

	switch (type) {
	case 0:
		if (sobel(src, dx, dy, callback))
			goto err0;
	break;
	case 1:
		if (prewitt(src, dx, dy, callback))
			goto err0;
	break;
	default:
		goto err0;
	}

	uint32_t i, j;

	for (i = 0; i < src->w; i++) {
		for (j = 0; j < src->h; j++) {
			GP_Pixel pix_x = GP_GetPixel_Raw_24BPP(dx, i, j);
			GP_Pixel pix_y = GP_GetPixel_Raw_24BPP(dy, i, j);
			int Rx, Gx, Bx;
			int Ry, Gy, By;
			int RE, GE, BE;
			int RPhi, GPhi, BPhi;

			Rx = GP_Pixel_GET_R_RGB888(pix_x);
			Gx = GP_Pixel_GET_G_RGB888(pix_x);
			Bx = GP_Pixel_GET_B_RGB888(pix_x);

			Ry = GP_Pixel_GET_R_RGB888(pix_y);
			Gy = GP_Pixel_GET_G_RGB888(pix_y);
			By = GP_Pixel_GET_B_RGB888(pix_y);

			RE = sqrt(Rx*Rx + Ry*Ry) + 0.5;
			GE = sqrt(Gx*Gx + Gy*Gy) + 0.5;
			BE = sqrt(Bx*Bx + By*By) + 0.5;

			GP_PutPixel_Raw_24BPP(dx, i, j,
			                      GP_Pixel_CREATE_RGB888(RE, GE, BE));

			if (Rx != 0 && Ry != 0)
				RPhi = ((atan2(Rx, Ry) + M_PI) * 255)/(2*M_PI);
			else
				RPhi = 0;

			if (Gx != 0 && Gy != 0)
				GPhi = ((atan2(Gx, Gy) + M_PI) * 255)/(2*M_PI);
			else
				GPhi = 0;

			if (Bx != 0 && By != 0)
				BPhi = ((atan2(Bx, By) + M_PI) * 255)/(2*M_PI);
			else
				BPhi = 0;

			GP_PutPixel_Raw_24BPP(dy, i, j,
			                      GP_Pixel_CREATE_RGB888(RPhi, GPhi, BPhi));
		}
	}

	if (Phi != NULL)
		*Phi = dy;
	else
		GP_ContextFree(dy);

	if (E != NULL)
		*E = dx;
	else
		GP_ContextFree(dx);

	return 0;
err0:
	GP_ContextFree(dx);
	GP_ContextFree(dy);
	return 1;
}

int GP_FilterEdgeSobel(const GP_Context *src,
                       GP_Context **E, GP_Context **Phi,
                       GP_ProgressCallback *callback)
{
	GP_DEBUG(1, "Sobel edge detection image %ux%u", src->w, src->h);

	return edge_detect(src, E, Phi, 0, callback);
}
Пример #3
0
int GP_ReadGIFEx(GP_IO *io, GP_Context **img,
                 GP_DataStorage *storage, GP_ProgressCallback *callback)
{
	GifFileType *gf;
	GifRecordType rec_type;
	GP_Context *res = NULL;
	GP_Pixel bg;
	int32_t x, y;
	int err;

	errno = 0;
#if defined(GIFLIB_MAJOR) && GIFLIB_MAJOR >= 5
	gf = DGifOpen(io, gif_input_func, NULL);
#else
	gf = DGifOpen(io, gif_input_func);
#endif

	if (gf == NULL) {
		/*
		 * The giflib uses open() so when we got a failure and errno
		 * is set => open() has failed.
		 *
		 * When errno is not set the file content was not valid so we
		 * set errno to EIO.
		 */
		if (errno == 0)
			errno = EIO;

		return 1;
	}

	GP_DEBUG(1, "Have GIF image %ix%i, %i colors, %i bpp",
	         gf->SWidth, gf->SHeight, gf->SColorResolution,
		 gf->SColorMap ? gf->SColorMap->BitsPerPixel : -1);

	do {
		if (DGifGetRecordType(gf, &rec_type) != GIF_OK) {
			//TODO: error handling
			GP_DEBUG(1, "DGifGetRecordType() error %s (%i)",
			         gif_err_name(gif_err(gf)), gif_err(gf));
			err = EIO;
			goto err1;
		}

		GP_DEBUG(2, "Have GIF record type %s",
		         rec_type_name(rec_type));

		switch (rec_type) {
		case EXTENSION_RECORD_TYPE:
			if ((err = read_extensions(gf)))
				goto err1;
			continue;
		case IMAGE_DESC_RECORD_TYPE:
		break;
		default:
			continue;
		}

		if (DGifGetImageDesc(gf) != GIF_OK) {
			//TODO: error handling
			GP_DEBUG(1, "DGifGetImageDesc() error %s (%i)",
			         gif_err_name(gif_err(gf)), gif_err(gf)); 
			err = EIO;
			goto err1;
		}

		if (storage)
			fill_metadata(gf, storage);

		GP_DEBUG(1, "Have GIF Image left-top %ix%i, width-height %ix%i,"
		         " interlace %i, bpp %i", gf->Image.Left, gf->Image.Top,
			 gf->Image.Width, gf->Image.Height, gf->Image.Interlace,
			 gf->Image.ColorMap ? gf->Image.ColorMap->BitsPerPixel : -1);

		if (!img)
			break;

		res = GP_ContextAlloc(gf->SWidth, gf->SHeight, GP_PIXEL_RGB888);

		if (res == NULL) {
			err = ENOMEM;
			goto err1;
		}

		/* If background color is defined, use it */
		if (get_bg_color(gf, &bg)) {
			GP_DEBUG(1, "Filling bg color %x", bg);
			GP_Fill(res, bg);
		}

		/* Now finally read gif image data */
		for (y = gf->Image.Top; y < gf->Image.Height; y++) {
			uint8_t line[gf->Image.Width];

			DGifGetLine(gf, line, gf->Image.Width);

			unsigned int real_y = y;

			if (gf->Image.Interlace == 64) {
				real_y = interlace_real_y(gf, y);
				GP_DEBUG(3, "Interlace y -> real_y %u %u", y, real_y);
			}

			//TODO: just now we have only 8BPP
			for (x = 0; x < gf->Image.Width; x++)
				GP_PutPixel_Raw_24BPP(res, x + gf->Image.Left, real_y, get_color(gf, line[x]));

			if (GP_ProgressCallbackReport(callback, y - gf->Image.Top,
			                              gf->Image.Height,
						      gf->Image.Width)) {
				GP_DEBUG(1, "Operation aborted");
				err = ECANCELED;
				goto err2;
			}
		}

		//TODO: now we exit after reading first image
		break;

	} while (rec_type != TERMINATE_RECORD_TYPE);

	DGifCloseFile(gf);

	/* No Image record found :( */
	if (img && !res) {
		errno = EINVAL;
		return 1;
	}

	if (img)
		*img = res;

	return 0;
err2:
	GP_ContextFree(res);
err1:
	DGifCloseFile(gf);
	errno = err;
	return 1;
}