static int libopenjpeg_decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt) { const uint8_t *buf = avpkt->data; int buf_size = avpkt->size; LibOpenJPEGContext *ctx = avctx->priv_data; AVFrame *picture = &ctx->image, *output = data; opj_dinfo_t *dec; opj_cio_t *stream; opj_image_t *image; int width, height, has_alpha = 0, ret = -1; int x, y, index; uint8_t *img_ptr; int adjust[4]; *data_size = 0; // Check if input is a raw jpeg2k codestream or in jp2 wrapping if((AV_RB32(buf) == 12) && (AV_RB32(buf + 4) == JP2_SIG_TYPE) && (AV_RB32(buf + 8) == JP2_SIG_VALUE)) { dec = opj_create_decompress(CODEC_JP2); } else { // If the AVPacket contains a jp2c box, then skip to // the starting byte of the codestream. if (AV_RB32(buf + 4) == AV_RB32("jp2c")) buf += 8; dec = opj_create_decompress(CODEC_J2K); } if(!dec) { av_log(avctx, AV_LOG_ERROR, "Error initializing decoder.\n"); return -1; } opj_set_event_mgr((opj_common_ptr)dec, NULL, NULL); ctx->dec_params.cp_reduce = avctx->lowres; // Tie decoder with decoding parameters opj_setup_decoder(dec, &ctx->dec_params); stream = opj_cio_open((opj_common_ptr)dec, buf, buf_size); if(!stream) { av_log(avctx, AV_LOG_ERROR, "Codestream could not be opened for reading.\n"); opj_destroy_decompress(dec); return -1; } // Decode the codestream image = opj_decode_with_info(dec, stream, NULL); opj_cio_close(stream); if(!image) { av_log(avctx, AV_LOG_ERROR, "Error decoding codestream.\n"); opj_destroy_decompress(dec); return -1; } width = image->comps[0].w << avctx->lowres; height = image->comps[0].h << avctx->lowres; if(avcodec_check_dimensions(avctx, width, height) < 0) { av_log(avctx, AV_LOG_ERROR, "%dx%d dimension invalid.\n", width, height); goto done; } avcodec_set_dimensions(avctx, width, height); switch(image->numcomps) { case 1: avctx->pix_fmt = PIX_FMT_GRAY8; break; case 3: if(check_image_attributes(image)) { avctx->pix_fmt = PIX_FMT_RGB24; } else { avctx->pix_fmt = PIX_FMT_GRAY8; av_log(avctx, AV_LOG_ERROR, "Only first component will be used.\n"); } break; case 4: has_alpha = 1; avctx->pix_fmt = PIX_FMT_RGBA; break; default: av_log(avctx, AV_LOG_ERROR, "%d components unsupported.\n", image->numcomps); goto done; } if(picture->data[0]) avctx->release_buffer(avctx, picture); if(avctx->get_buffer(avctx, picture) < 0) { av_log(avctx, AV_LOG_ERROR, "Couldn't allocate image buffer.\n"); return -1; } for(x = 0; x < image->numcomps; x++) { adjust[x] = FFMAX(image->comps[x].prec - 8, 0); } for(y = 0; y < avctx->height; y++) { index = y*avctx->width; img_ptr = picture->data[0] + y*picture->linesize[0]; for(x = 0; x < avctx->width; x++, index++) { *img_ptr++ = image->comps[0].data[index] >> adjust[0]; if(image->numcomps > 2 && check_image_attributes(image)) { *img_ptr++ = image->comps[1].data[index] >> adjust[1]; *img_ptr++ = image->comps[2].data[index] >> adjust[2]; if(has_alpha) *img_ptr++ = image->comps[3].data[index] >> adjust[3]; } } }
static int libopenjpeg_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame, int *got_packet) { LibOpenJPEGContext *ctx = avctx->priv_data; opj_cinfo_t *compress = ctx->compress; opj_image_t *image = ctx->image; opj_cio_t *stream; int cpyresult = 0; int ret, len; AVFrame gbrframe; // x0, y0 is the top left corner of the image // x1, y1 is the width, height of the reference grid image->x0 = 0; image->y0 = 0; image->x1 = (avctx->width - 1) * ctx->enc_params.subsampling_dx + 1; image->y1 = (avctx->height - 1) * ctx->enc_params.subsampling_dy + 1; switch (avctx->pix_fmt) { case AV_PIX_FMT_RGB24: case AV_PIX_FMT_RGBA: case AV_PIX_FMT_GRAY8A: cpyresult = libopenjpeg_copy_packed8(avctx, frame, image); break; case AV_PIX_FMT_RGB48: case AV_PIX_FMT_RGBA64: cpyresult = libopenjpeg_copy_packed16(avctx, frame, image); break; case AV_PIX_FMT_GBR24P: case AV_PIX_FMT_GBRP9: case AV_PIX_FMT_GBRP10: case AV_PIX_FMT_GBRP12: case AV_PIX_FMT_GBRP14: case AV_PIX_FMT_GBRP16: gbrframe = *frame; gbrframe.data[0] = frame->data[2]; // swap to be rgb gbrframe.data[1] = frame->data[0]; gbrframe.data[2] = frame->data[1]; gbrframe.linesize[0] = frame->linesize[2]; gbrframe.linesize[1] = frame->linesize[0]; gbrframe.linesize[2] = frame->linesize[1]; if (avctx->pix_fmt == AV_PIX_FMT_GBR24P) { cpyresult = libopenjpeg_copy_unpacked8(avctx, &gbrframe, image); } else { cpyresult = libopenjpeg_copy_unpacked16(avctx, &gbrframe, image); } break; case AV_PIX_FMT_GRAY8: case AV_PIX_FMT_YUV410P: case AV_PIX_FMT_YUV411P: case AV_PIX_FMT_YUV420P: case AV_PIX_FMT_YUV422P: case AV_PIX_FMT_YUV440P: case AV_PIX_FMT_YUV444P: case AV_PIX_FMT_YUVA420P: case AV_PIX_FMT_YUVA422P: case AV_PIX_FMT_YUVA444P: cpyresult = libopenjpeg_copy_unpacked8(avctx, frame, image); break; case AV_PIX_FMT_GRAY16: case AV_PIX_FMT_YUV420P9: case AV_PIX_FMT_YUV422P9: case AV_PIX_FMT_YUV444P9: case AV_PIX_FMT_YUVA420P9: case AV_PIX_FMT_YUVA422P9: case AV_PIX_FMT_YUVA444P9: case AV_PIX_FMT_YUV444P10: case AV_PIX_FMT_YUV422P10: case AV_PIX_FMT_YUV420P10: case AV_PIX_FMT_YUVA444P10: case AV_PIX_FMT_YUVA422P10: case AV_PIX_FMT_YUVA420P10: case AV_PIX_FMT_YUV420P12: case AV_PIX_FMT_YUV422P12: case AV_PIX_FMT_YUV444P12: case AV_PIX_FMT_YUV420P14: case AV_PIX_FMT_YUV422P14: case AV_PIX_FMT_YUV444P14: case AV_PIX_FMT_YUV444P16: case AV_PIX_FMT_YUV422P16: case AV_PIX_FMT_YUV420P16: case AV_PIX_FMT_YUVA444P16: case AV_PIX_FMT_YUVA422P16: case AV_PIX_FMT_YUVA420P16: cpyresult = libopenjpeg_copy_unpacked16(avctx, frame, image); break; default: av_log(avctx, AV_LOG_ERROR, "The frame's pixel format '%s' is not supported\n", av_get_pix_fmt_name(avctx->pix_fmt)); return AVERROR(EINVAL); break; } if (!cpyresult) { av_log(avctx, AV_LOG_ERROR, "Could not copy the frame data to the internal image buffer\n"); return -1; } opj_setup_encoder(compress, &ctx->enc_params, image); stream = opj_cio_open((opj_common_ptr)compress, NULL, 0); if (!stream) { av_log(avctx, AV_LOG_ERROR, "Error creating the cio stream\n"); return AVERROR(ENOMEM); } if (!opj_encode(compress, stream, image, NULL)) { opj_cio_close(stream); av_log(avctx, AV_LOG_ERROR, "Error during the opj encode\n"); return -1; } len = cio_tell(stream); if ((ret = ff_alloc_packet2(avctx, pkt, len)) < 0) { opj_cio_close(stream); return ret; } memcpy(pkt->data, stream->buffer, len); pkt->flags |= AV_PKT_FLAG_KEY; *got_packet = 1; opj_cio_close(stream); return 0; }
struct ImBuf *imb_jp2_decode(unsigned char *mem, size_t size, int flags) { struct ImBuf *ibuf = 0; int use_float = 0; /* for precision higher then 8 use float */ long signed_offsets[4]= {0, 0, 0, 0}; int float_divs[4]= {1, 1, 1, 1}; int index; int w, h, depth; opj_dparameters_t parameters; /* decompression parameters */ opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *image = NULL; int i; opj_dinfo_t* dinfo = NULL; /* handle to a decompressor */ opj_cio_t *cio = NULL; if (check_jp2(mem) == 0) return(0); /* configure the event callbacks (not required) */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* set decoding parameters to default values */ opj_set_default_decoder_parameters(¶meters); /* JPEG 2000 compressed image data */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_JP2); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, stderr); /* setup the decoder decoding parameters using the current image and user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, mem, size); /* decode the stream and fill the image structure */ image = opj_decode(dinfo, cio); if(!image) { fprintf(stderr, "ERROR -> j2k_to_image: failed to decode image!\n"); opj_destroy_decompress(dinfo); opj_cio_close(cio); return NULL; } /* close the byte stream */ opj_cio_close(cio); if((image->numcomps * image->x1 * image->y1) == 0) { fprintf(stderr,"\nError: invalid raw image parameters\n"); return NULL; } w = image->comps[0].w; h = image->comps[0].h; switch (image->numcomps) { case 1: /* Greyscale */ case 3: /* Color */ depth= 24; break; default: /* 2 or 4 - Greyscale or Color + alpha */ depth= 32; /* greyscale + alpha */ break; } i = image->numcomps; if (i>4) i= 4; while (i) { i--; if (image->comps[i].prec > 8) use_float = 1; if (image->comps[i].sgnd) signed_offsets[i]= 1 << (image->comps[i].prec - 1); /* only needed for float images but dosnt hurt to calc this */ float_divs[i]= (1<<image->comps[i].prec)-1; } ibuf= IMB_allocImBuf(w, h, depth, use_float ? IB_rectfloat : IB_rect); if (ibuf==NULL) { if(dinfo) opj_destroy_decompress(dinfo); return NULL; } ibuf->ftype = JP2; if (use_float) { float *rect_float= ibuf->rect_float; if (image->numcomps < 3) { /* greyscale 12bits+ */ for (i = 0; i < w * h; i++, rect_float+=4) { index = w * h - ((i) / (w) + 1) * w + (i) % (w); rect_float[0]= rect_float[1]= rect_float[2]= (float)(image->comps[0].data[index] + signed_offsets[0]) / float_divs[0]; if (image->numcomps == 2) rect_float[3]= (image->comps[1].data[index] + signed_offsets[1]) / float_divs[1]; else rect_float[3]= 1.0f; } } else { /* rgb or rgba 12bits+ */ for (i = 0; i < w * h; i++, rect_float+=4) { index = w * h - ((i) / (w) + 1) * w + (i) % (w); rect_float[0]= (float)(image->comps[0].data[index] + signed_offsets[0]) / float_divs[0]; rect_float[1]= (float)(image->comps[1].data[index] + signed_offsets[1]) / float_divs[1]; rect_float[2]= (float)(image->comps[2].data[index] + signed_offsets[2]) / float_divs[2]; if (image->numcomps >= 4) rect_float[3]= (float)(image->comps[3].data[index] + signed_offsets[3]) / float_divs[3]; else rect_float[3]= 1.0f; } } } else { unsigned char *rect= (unsigned char *)ibuf->rect; if (image->numcomps < 3) { /* greyscale */ for (i = 0; i < w * h; i++, rect+=4) { index = w * h - ((i) / (w) + 1) * w + (i) % (w); rect[0]= rect[1]= rect[2]= (image->comps[0].data[index] + signed_offsets[0]); if (image->numcomps == 2) rect[3]= image->comps[1].data[index] + signed_offsets[1]; else rect[3]= 255; } } else { /* 8bit rgb or rgba */ for (i = 0; i < w * h; i++, rect+=4) { int index = w * h - ((i) / (w) + 1) * w + (i) % (w); rect[0]= image->comps[0].data[index] + signed_offsets[0]; rect[1]= image->comps[1].data[index] + signed_offsets[1]; rect[2]= image->comps[2].data[index] + signed_offsets[2]; if (image->numcomps >= 4) rect[3]= image->comps[3].data[index] + signed_offsets[3]; else rect[3]= 255; } } } /* free remaining structures */ if(dinfo) { opj_destroy_decompress(dinfo); } /* free image data structure */ opj_image_destroy(image); if (flags & IB_rect) { IMB_rect_from_float(ibuf); } return(ibuf); }
/* Found write info at http://users.ece.gatech.edu/~slabaugh/personal/c/bitmapUnix.c */ int imb_savejp2(struct ImBuf *ibuf, const char *name, int flags) { int quality = ibuf->ftype & 0xff; int bSuccess; opj_cparameters_t parameters; /* compression parameters */ opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *image = NULL; (void)flags; /* unused */ /* configure the event callbacks (not required) setting of each callback is optionnal */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* set encoding parameters to default values */ opj_set_default_encoder_parameters(¶meters); /* compression ratio */ /* invert range, from 10-100, 100-1 * where jpeg see's 1 and highest quality (lossless) and 100 is very low quality*/ parameters.tcp_rates[0]= ((100-quality)/90.0f*99.0f) + 1; parameters.tcp_numlayers = 1; // only one resolution parameters.cp_disto_alloc = 1; image= ibuftoimage(ibuf, ¶meters); { /* JP2 format output */ int codestream_length; opj_cio_t *cio = NULL; FILE *f = NULL; /* get a JP2 compressor handle */ opj_cinfo_t* cinfo = opj_create_compress(CODEC_JP2); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)cinfo, &event_mgr, stderr); /* setup the encoder parameters using the current image and using user parameters */ opj_setup_encoder(cinfo, ¶meters, image); /* open a byte stream for writing */ /* allocate memory for all tiles */ cio = opj_cio_open((opj_common_ptr)cinfo, NULL, 0); /* encode the image */ bSuccess = opj_encode(cinfo, cio, image, NULL); /* last arg used to be parameters.index but this deprecated */ if (!bSuccess) { opj_cio_close(cio); fprintf(stderr, "failed to encode image\n"); return 0; } codestream_length = cio_tell(cio); /* write the buffer to disk */ f = fopen(name, "wb"); if (!f) { fprintf(stderr, "failed to open %s for writing\n", name); return 1; } fwrite(cio->buffer, 1, codestream_length, f); fclose(f); fprintf(stderr,"Generated outfile %s\n",name); /* close and free the byte stream */ opj_cio_close(cio); /* free remaining compression structures */ opj_destroy_compress(cinfo); } /* free image data */ opj_image_destroy(image); return 1; }
int encode_openjpeg(opendcp_t *opendcp, opj_image_t *opj_image, char *out_file) { bool result; int codestream_length; int max_comp_size; int max_cs_len; opj_cparameters_t parameters; opj_cio_t *cio = NULL; opj_cinfo_t *cinfo = NULL; FILE *f = NULL; int bw; if (opendcp->j2k.bw) { bw = opendcp->j2k.bw; } else { bw = MAX_DCP_JPEG_BITRATE; } /* set the max image and component sizes based on frame_rate */ max_cs_len = ((float)bw)/8/opendcp->frame_rate; /* adjust cs for 3D */ if (opendcp->stereoscopic) { max_cs_len = max_cs_len/2; } max_comp_size = ((float)max_cs_len)/1.25; /* set encoding parameters to default values */ opj_set_default_encoder_parameters(¶meters); /* set default cinema parameters */ set_cinema_encoder_parameters(opendcp, ¶meters); parameters.cp_comment = (char*)malloc(strlen(OPENDCP_NAME)+1); sprintf(parameters.cp_comment,"%s", OPENDCP_NAME); /* adjust cinema enum type */ if (opendcp->cinema_profile == DCP_CINEMA4K) { parameters.cp_cinema = CINEMA4K_24; } else { parameters.cp_cinema = CINEMA2K_24; } /* Decide if MCT should be used */ parameters.tcp_mct = opj_image->numcomps == 3 ? 1 : 0; /* set max image */ parameters.max_comp_size = max_comp_size; parameters.tcp_rates[0]= ((float) (opj_image->numcomps * opj_image->comps[0].w * opj_image->comps[0].h * opj_image->comps[0].prec))/ (max_cs_len * 8 * opj_image->comps[0].dx * opj_image->comps[0].dy); /* get a J2K compressor handle */ dcp_log(LOG_DEBUG,"%-15.15s: creating compressor %s","encode_openjpeg",out_file); cinfo = opj_create_compress(CODEC_J2K); /* set event manager to null (openjpeg 1.3 bug) */ cinfo->event_mgr = NULL; /* setup the encoder parameters using the current image and user parameters */ dcp_log(LOG_DEBUG,"%-15.15s: setup J2k encoder %s","encode_openjpeg",out_file); opj_setup_encoder(cinfo, ¶meters, opj_image); /* open a byte stream for writing */ /* allocate memory for all tiles */ dcp_log(LOG_DEBUG,"%-15.15s: opening J2k output stream %s","encode_openjpeg",out_file); cio = opj_cio_open((opj_common_ptr)cinfo, NULL, 0); dcp_log(LOG_INFO,"Encoding file %s",out_file); result = opj_encode(cinfo, cio, opj_image, NULL); dcp_log(LOG_DEBUG,"%-15.15s: encoding file %s complete","encode_openjepg",out_file); if (!result) { dcp_log(LOG_ERROR,"Unable to encode jpeg2000 file %s",out_file); opj_cio_close(cio); opj_destroy_compress(cinfo); return OPENDCP_ERROR; } codestream_length = cio_tell(cio); f = fopen(out_file, "wb"); if (!f) { dcp_log(LOG_ERROR,"Unable to write jpeg2000 file %s",out_file); opj_cio_close(cio); opj_destroy_compress(cinfo); return OPENDCP_ERROR; } fwrite(cio->buffer, 1, codestream_length, f); fclose(f); /* free openjpeg structure */ opj_cio_close(cio); opj_destroy_compress(cinfo); /* free user parameters structure */ if(parameters.cp_comment) free(parameters.cp_comment); if(parameters.cp_matrice) free(parameters.cp_matrice); return OPENDCP_NO_ERROR; }
BOOL LLImageJ2COJ::encodeImpl(LLImageJ2C &base, const LLImageRaw &raw_image, const char* comment_text, F32 encode_time, BOOL reversible) { const S32 MAX_COMPS = 5; opj_cparameters_t parameters; /* compression parameters */ opj_event_mgr_t event_mgr; /* event manager */ /* configure the event callbacks (not required) setting of each callback is optional */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* set encoding parameters to default values */ opj_set_default_encoder_parameters(¶meters); parameters.cod_format = 0; parameters.cp_disto_alloc = 1; if (reversible) { parameters.tcp_numlayers = 1; parameters.tcp_rates[0] = 0.0f; } else { parameters.tcp_numlayers = 5; parameters.tcp_rates[0] = 1920.0f; parameters.tcp_rates[1] = 480.0f; parameters.tcp_rates[2] = 120.0f; parameters.tcp_rates[3] = 30.0f; parameters.tcp_rates[4] = 10.0f; parameters.irreversible = 1; if (raw_image.getComponents() >= 3) { parameters.tcp_mct = 1; } } if (!comment_text) { parameters.cp_comment = (char *) ""; } else { // Awful hacky cast, too lazy to copy right now. parameters.cp_comment = (char *) comment_text; } // // Fill in the source image from our raw image // OPJ_COLOR_SPACE color_space = CLRSPC_SRGB; opj_image_cmptparm_t cmptparm[MAX_COMPS]; opj_image_t * image = NULL; S32 numcomps = raw_image.getComponents(); S32 width = raw_image.getWidth(); S32 height = raw_image.getHeight(); memset(&cmptparm[0], 0, MAX_COMPS * sizeof(opj_image_cmptparm_t)); for(S32 c = 0; c < numcomps; c++) { cmptparm[c].prec = 8; cmptparm[c].bpp = 8; cmptparm[c].sgnd = 0; cmptparm[c].dx = parameters.subsampling_dx; cmptparm[c].dy = parameters.subsampling_dy; cmptparm[c].w = width; cmptparm[c].h = height; } /* create the image */ image = opj_image_create(numcomps, &cmptparm[0], color_space); image->x1 = width; image->y1 = height; S32 i = 0; const U8 *src_datap = raw_image.getData(); for (S32 y = height - 1; y >= 0; y--) { for (S32 x = 0; x < width; x++) { const U8 *pixel = src_datap + (y*width + x) * numcomps; for (S32 c = 0; c < numcomps; c++) { image->comps[c].data[i] = *pixel; pixel++; } i++; } } /* encode the destination image */ /* ---------------------------- */ int codestream_length; opj_cio_t *cio = NULL; /* get a J2K compressor handle */ opj_cinfo_t* cinfo = opj_create_compress(CODEC_J2K); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)cinfo, &event_mgr, stderr); /* setup the encoder parameters using the current image and using user parameters */ opj_setup_encoder(cinfo, ¶meters, image); /* open a byte stream for writing */ /* allocate memory for all tiles */ cio = opj_cio_open((opj_common_ptr)cinfo, NULL, 0); /* encode the image */ bool bSuccess = opj_encode(cinfo, cio, image, NULL); if (!bSuccess) { opj_cio_close(cio); llinfos << "Failed to encode image." << llendl; return FALSE; } codestream_length = cio_tell(cio); base.copyData(cio->buffer, codestream_length); base.updateData(); // set width, height /* close and free the byte stream */ opj_cio_close(cio); /* free remaining compression structures */ opj_destroy_compress(cinfo); /* free user parameters structure */ if(parameters.cp_matrice) free(parameters.cp_matrice); /* free image data */ opj_image_destroy(image); return TRUE; }
int main(int argc, char *argv[]) { mj2_dparameters_t mj2_parameters; /* decompression parameters */ opj_dinfo_t* dinfo; opj_event_mgr_t event_mgr; /* event manager */ opj_cio_t *cio = NULL; unsigned int tnum, snum; opj_mj2_t *movie; mj2_tk_t *track; mj2_sample_t *sample; unsigned char* frame_codestream; FILE *file, *outfile; char outfilename[50]; opj_image_t *img = NULL; unsigned int max_codstrm_size = 0; double total_time = 0; unsigned int numframes = 0; if (argc != 3) { printf("Usage: %s inputfile.mj2 outputfile.yuv\n",argv[0]); return 1; } file = fopen(argv[1], "rb"); if (!file) { fprintf(stderr, "failed to open %s for reading\n", argv[1]); return 1; } // Checking output file outfile = fopen(argv[2], "w"); if (!file) { fprintf(stderr, "failed to open %s for writing\n", argv[2]); return 1; } fclose(outfile); /* configure the event callbacks (not required) setting of each callback is optionnal */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = NULL; /* get a MJ2 decompressor handle */ dinfo = mj2_create_decompress(); movie = (opj_mj2_t*)dinfo->mj2_handle; /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, stderr); memset(&mj2_parameters, 0, sizeof(mj2_dparameters_t)); /* set J2K decoding parameters to default values */ opj_set_default_decoder_parameters(&mj2_parameters.j2k_parameters); /* setup the decoder decoding parameters using user parameters */ mj2_setup_decoder(movie, &mj2_parameters); if (mj2_read_struct(file, movie)) // Creating the movie structure return 1; // Decode first video track for (tnum=0; tnum < (unsigned int)(movie->num_htk + movie->num_stk + movie->num_vtk); tnum++) { if (movie->tk[tnum].track_type == 0) break; } if (movie->tk[tnum].track_type != 0) { printf("Error. Movie does not contain any video track\n"); return 1; } track = &movie->tk[tnum]; // Output info on first video tracl fprintf(stdout,"The first video track contains %d frames.\nWidth: %d, Height: %d \n\n", track->num_samples, track->w, track->h); max_codstrm_size = track->sample[0].sample_size-8; frame_codestream = (unsigned char*) malloc(max_codstrm_size * sizeof(unsigned char)); numframes = track->num_samples; for (snum=0; snum < numframes; snum++) { double init_time = opj_clock(); double elapsed_time; sample = &track->sample[snum]; if (sample->sample_size-8 > max_codstrm_size) { max_codstrm_size = sample->sample_size-8; if ((frame_codestream = (unsigned char*) realloc(frame_codestream, max_codstrm_size)) == NULL) { printf("Error reallocation memory\n"); return 1; }; } fseek(file,sample->offset+8,SEEK_SET); fread(frame_codestream, sample->sample_size-8, 1, file); // Assuming that jp and ftyp markers size do /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, frame_codestream, sample->sample_size-8); img = opj_decode(dinfo, cio); // Decode J2K to image #ifdef WANT_SYCC_TO_RGB if(img->color_space == CLRSPC_SYCC) { color_sycc_to_rgb(img); } #endif if(img->icc_profile_buf) { #if defined(HAVE_LIBLCMS1) || defined(HAVE_LIBLCMS2) color_apply_icc_profile(img); #endif free(img->icc_profile_buf); img->icc_profile_buf = NULL; img->icc_profile_len = 0; } if (((img->numcomps == 3) && (img->comps[0].dx == img->comps[1].dx / 2) && (img->comps[0].dx == img->comps[2].dx / 2 ) && (img->comps[0].dx == 1)) || (img->numcomps == 1)) { if (!imagetoyuv(img, argv[2])) // Convert image to YUV return 1; } else if ((img->numcomps == 3) && (img->comps[0].dx == 1) && (img->comps[1].dx == 1)&& (img->comps[2].dx == 1))// If YUV 4:4:4 input --> to bmp { fprintf(stdout,"The frames will be output in a bmp format (output_1.bmp, ...)\n"); sprintf(outfilename,"output_%d.bmp",snum); if (imagetobmp(img, outfilename)) // Convert image to BMP return 1; } else { fprintf(stdout,"Image component dimensions are unknown. Unable to output image\n"); fprintf(stdout,"The frames will be output in a j2k file (output_1.j2k, ...)\n"); sprintf(outfilename,"output_%d.j2k",snum); outfile = fopen(outfilename, "wb"); if (!outfile) { fprintf(stderr, "failed to open %s for writing\n",outfilename); return 1; } fwrite(frame_codestream,sample->sample_size-8,1,outfile); fclose(outfile); } /* close the byte stream */ opj_cio_close(cio); /* free image data structure */ opj_image_destroy(img); elapsed_time = opj_clock()-init_time; fprintf(stderr, "Frame number %d/%d decoded in %.2f mseconds\n", snum + 1, numframes, elapsed_time*1000); total_time += elapsed_time; } free(frame_codestream); fclose(file); /* free remaining structures */ if(dinfo) { mj2_destroy_decompress((opj_mj2_t*)dinfo->mj2_handle); } free(dinfo); fprintf(stdout, "%d frame(s) correctly decompressed\n", snum); fprintf(stdout,"Total decoding time: %.2f seconds (%.1f fps)\n", total_time, (float)numframes/total_time); return 0; }
static GstFlowReturn gst_openjpeg_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame) { GstOpenJPEGDec *self = GST_OPENJPEG_DEC (decoder); GstFlowReturn ret = GST_FLOW_OK; gint64 deadline; GstMapInfo map; #ifdef HAVE_OPENJPEG_1 opj_dinfo_t *dec; opj_cio_t *io; #else opj_codec_t *dec; opj_stream_t *stream; MemStream mstream; #endif opj_image_t *image; GstVideoFrame vframe; opj_dparameters_t params; GST_DEBUG_OBJECT (self, "Handling frame"); deadline = gst_video_decoder_get_max_decode_time (decoder, frame); if (deadline < 0) { GST_LOG_OBJECT (self, "Dropping too late frame: deadline %" G_GINT64_FORMAT, deadline); ret = gst_video_decoder_drop_frame (decoder, frame); return ret; } dec = opj_create_decompress (self->codec_format); if (!dec) goto initialization_error; #ifdef HAVE_OPENJPEG_1 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE)) { opj_event_mgr_t callbacks; callbacks.error_handler = gst_openjpeg_dec_opj_error; callbacks.warning_handler = gst_openjpeg_dec_opj_warning; callbacks.info_handler = gst_openjpeg_dec_opj_info; opj_set_event_mgr ((opj_common_ptr) dec, &callbacks, self); } else { opj_set_event_mgr ((opj_common_ptr) dec, NULL, NULL); } #else if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE)) { opj_set_info_handler (dec, gst_openjpeg_dec_opj_info, self); opj_set_warning_handler (dec, gst_openjpeg_dec_opj_warning, self); opj_set_error_handler (dec, gst_openjpeg_dec_opj_error, self); } else { opj_set_info_handler (dec, NULL, NULL); opj_set_warning_handler (dec, NULL, NULL); opj_set_error_handler (dec, NULL, NULL); } #endif params = self->params; if (self->ncomps) params.jpwl_exp_comps = self->ncomps; opj_setup_decoder (dec, ¶ms); if (!gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ)) goto map_read_error; #ifdef HAVE_OPENJPEG_1 io = opj_cio_open ((opj_common_ptr) dec, map.data + (self->is_jp2c ? 8 : 0), map.size - (self->is_jp2c ? 8 : 0)); if (!io) goto open_error; image = opj_decode (dec, io); if (!image) goto decode_error; #else stream = opj_stream_create (4096, OPJ_TRUE); if (!stream) goto open_error; mstream.data = map.data + (self->is_jp2c ? 8 : 0); mstream.offset = 0; mstream.size = map.size - (self->is_jp2c ? 8 : 0); opj_stream_set_read_function (stream, read_fn); opj_stream_set_write_function (stream, write_fn); opj_stream_set_skip_function (stream, skip_fn); opj_stream_set_seek_function (stream, seek_fn); opj_stream_set_user_data (stream, &mstream); opj_stream_set_user_data_length (stream, mstream.size); image = NULL; if (!opj_read_header (stream, dec, &image)) goto decode_error; if (!opj_decode (dec, stream, image)) goto decode_error; #endif gst_buffer_unmap (frame->input_buffer, &map); ret = gst_openjpeg_dec_negotiate (self, image); if (ret != GST_FLOW_OK) goto negotiate_error; ret = gst_video_decoder_allocate_output_frame (decoder, frame); if (ret != GST_FLOW_OK) goto allocate_error; if (!gst_video_frame_map (&vframe, &self->output_state->info, frame->output_buffer, GST_MAP_WRITE)) goto map_write_error; self->fill_frame (&vframe, image); gst_video_frame_unmap (&vframe); #ifdef HAVE_OPENJPEG_1 opj_cio_close (io); opj_image_destroy (image); opj_destroy_decompress (dec); #else opj_end_decompress (dec, stream); opj_stream_destroy (stream); opj_image_destroy (image); opj_destroy_codec (dec); #endif ret = gst_video_decoder_finish_frame (decoder, frame); return ret; initialization_error: { gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to initialize OpenJPEG decoder"), (NULL)); return GST_FLOW_ERROR; } map_read_error: { #ifdef HAVE_OPENJPEG_1 opj_destroy_decompress (dec); #else opj_destroy_codec (dec); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to map input buffer"), (NULL)); return GST_FLOW_ERROR; } open_error: { #ifdef HAVE_OPENJPEG_1 opj_destroy_decompress (dec); #else opj_destroy_codec (dec); #endif gst_buffer_unmap (frame->input_buffer, &map); gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to open OpenJPEG stream"), (NULL)); return GST_FLOW_ERROR; } decode_error: { if (image) opj_image_destroy (image); #ifdef HAVE_OPENJPEG_1 opj_cio_close (io); opj_destroy_decompress (dec); #else opj_stream_destroy (stream); opj_destroy_codec (dec); #endif gst_buffer_unmap (frame->input_buffer, &map); gst_video_codec_frame_unref (frame); GST_VIDEO_DECODER_ERROR (self, 1, STREAM, DECODE, ("Failed to decode OpenJPEG stream"), (NULL), ret); return ret; } negotiate_error: { opj_image_destroy (image); #ifdef HAVE_OPENJPEG_1 opj_cio_close (io); opj_destroy_decompress (dec); #else opj_stream_destroy (stream); opj_destroy_codec (dec); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, NEGOTIATION, ("Failed to negotiate"), (NULL)); return ret; } allocate_error: { opj_image_destroy (image); #ifdef HAVE_OPENJPEG_1 opj_cio_close (io); opj_destroy_decompress (dec); #else opj_stream_destroy (stream); opj_destroy_codec (dec); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to allocate output buffer"), (NULL)); return ret; } map_write_error: { opj_image_destroy (image); #ifdef HAVE_OPENJPEG_1 opj_cio_close (io); opj_destroy_decompress (dec); #else opj_stream_destroy (stream); opj_destroy_codec (dec); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to map output buffer"), (NULL)); return GST_FLOW_ERROR; } }
static int libopenjpeg_decode_frame(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt) { uint8_t *buf = avpkt->data; int buf_size = avpkt->size; LibOpenJPEGContext *ctx = avctx->priv_data; AVFrame *picture = &ctx->image, *output = data; opj_dinfo_t *dec; opj_cio_t *stream; opj_image_t *image; int width, height, ret = -1; int pixel_size = 0; int ispacked = 0; int i; *data_size = 0; // Check if input is a raw jpeg2k codestream or in jp2 wrapping if((AV_RB32(buf) == 12) && (AV_RB32(buf + 4) == JP2_SIG_TYPE) && (AV_RB32(buf + 8) == JP2_SIG_VALUE)) { dec = opj_create_decompress(CODEC_JP2); } else { // If the AVPacket contains a jp2c box, then skip to // the starting byte of the codestream. if (AV_RB32(buf + 4) == AV_RB32("jp2c")) buf += 8; dec = opj_create_decompress(CODEC_J2K); } if(!dec) { av_log(avctx, AV_LOG_ERROR, "Error initializing decoder.\n"); return -1; } opj_set_event_mgr((opj_common_ptr)dec, NULL, NULL); ctx->dec_params.cp_limit_decoding = LIMIT_TO_MAIN_HEADER; // Tie decoder with decoding parameters opj_setup_decoder(dec, &ctx->dec_params); stream = opj_cio_open((opj_common_ptr)dec, buf, buf_size); if(!stream) { av_log(avctx, AV_LOG_ERROR, "Codestream could not be opened for reading.\n"); opj_destroy_decompress(dec); return -1; } // Decode the header only image = opj_decode_with_info(dec, stream, NULL); opj_cio_close(stream); if(!image) { av_log(avctx, AV_LOG_ERROR, "Error decoding codestream.\n"); opj_destroy_decompress(dec); return -1; } width = image->x1 - image->x0; height = image->y1 - image->y0; if(av_image_check_size(width, height, 0, avctx) < 0) { av_log(avctx, AV_LOG_ERROR, "%dx%d dimension invalid.\n", width, height); goto done; } avcodec_set_dimensions(avctx, width, height); if (avctx->pix_fmt != PIX_FMT_NONE) { if (!libopenjpeg_matches_pix_fmt(image, avctx->pix_fmt)) { avctx->pix_fmt = PIX_FMT_NONE; } } if (avctx->pix_fmt == PIX_FMT_NONE) { avctx->pix_fmt = libopenjpeg_guess_pix_fmt(image); } if (avctx->pix_fmt == PIX_FMT_NONE) { av_log(avctx, AV_LOG_ERROR, "Unable to determine pixel format\n"); goto done; } for (i = 0; i < image->numcomps; i++) if (image->comps[i].prec > avctx->bits_per_raw_sample) avctx->bits_per_raw_sample = image->comps[i].prec; if(picture->data[0]) ff_thread_release_buffer(avctx, picture); if(ff_thread_get_buffer(avctx, picture) < 0){ av_log(avctx, AV_LOG_ERROR, "ff_thread_get_buffer() failed\n"); goto done; } ctx->dec_params.cp_limit_decoding = NO_LIMITATION; ctx->dec_params.cp_reduce = avctx->lowres; // Tie decoder with decoding parameters opj_setup_decoder(dec, &ctx->dec_params); stream = opj_cio_open((opj_common_ptr)dec, buf, buf_size); if(!stream) { av_log(avctx, AV_LOG_ERROR, "Codestream could not be opened for reading.\n"); goto done; } opj_image_destroy(image); // Decode the codestream image = opj_decode_with_info(dec, stream, NULL); opj_cio_close(stream); if(!image) { av_log(avctx, AV_LOG_ERROR, "Error decoding codestream.\n"); goto done; } pixel_size = av_pix_fmt_descriptors[avctx->pix_fmt].comp[0].step_minus1 + 1; ispacked = libopenjpeg_ispacked(avctx->pix_fmt); switch (pixel_size) { case 1: if (ispacked) { libopenjpeg_copy_to_packed8(picture, image); } else { libopenjpeg_copyto8(picture, image); } break; case 2: if (ispacked) { libopenjpeg_copy_to_packed8(picture, image); } else { libopenjpeg_copyto16(picture, image); } break; case 3: case 4: if (ispacked) { libopenjpeg_copy_to_packed8(picture, image); } break; case 6: case 8: if (ispacked) { libopenjpeg_copy_to_packed16(picture, image); } break; default: av_log(avctx, AV_LOG_ERROR, "unsupported pixel size %d\n", pixel_size); goto done; } *output = ctx->image; *data_size = sizeof(AVPicture); ret = buf_size; done: opj_image_destroy(image); opj_destroy_decompress(dec); return ret; }
static int test_image(const char *fname, mj2_cparameters_t *cp) { FILE *reader; opj_image_t *image; unsigned char *src; opj_dinfo_t *dinfo; opj_cio_t *cio; opj_dparameters_t dparameters; int success; long src_len; success = 0; if((reader = fopen(fname, "rb")) == NULL) return success; fseek(reader, 0, SEEK_END); src_len = ftell(reader); fseek(reader, 0, SEEK_SET); src = (unsigned char*) malloc(src_len); fread(src, 1, src_len, reader); fclose(reader); if(memcmp(src, J2K_CODESTREAM_MAGIC, 4) != 0) { free(src); return success; } memset(&dparameters, 0, sizeof(opj_dparameters_t)); opj_set_default_decoder_parameters(&dparameters); dinfo = opj_create_decompress(CODEC_J2K); opj_setup_decoder(dinfo, &dparameters); cio = opj_cio_open((opj_common_ptr)dinfo, src, src_len); image = opj_decode(dinfo, cio); free(src); cio->buffer = NULL; opj_cio_close(cio); if(image == NULL) goto fin; cp->numcomps = image->numcomps; cp->w = image->comps[0].w; cp->h = image->comps[0].h; cp->prec = image->comps[0].prec; if(image->numcomps > 2 ) { if((image->comps[0].dx == 1) && (image->comps[1].dx == 2) && (image->comps[2].dx == 2) && (image->comps[0].dy == 1) && (image->comps[1].dy == 2) && (image->comps[2].dy == 2))// horizontal and vertical { // Y420 cp->enumcs = ENUMCS_SYCC; cp->CbCr_subsampling_dx = 2; cp->CbCr_subsampling_dy = 2; } else if((image->comps[0].dx == 1) && (image->comps[1].dx == 2) && (image->comps[2].dx == 2) && (image->comps[0].dy == 1) && (image->comps[1].dy == 1) && (image->comps[2].dy == 1))// horizontal only { // Y422 cp->enumcs = ENUMCS_SYCC; cp->CbCr_subsampling_dx = 2; cp->CbCr_subsampling_dy = 1; } else if((image->comps[0].dx == 1) && (image->comps[1].dx == 1) && (image->comps[2].dx == 1) && (image->comps[0].dy == 1) && (image->comps[1].dy == 1) && (image->comps[2].dy == 1)) { // Y444 or RGB if(image->color_space == CLRSPC_SRGB) { cp->enumcs = ENUMCS_SRGB; // cp->CbCr_subsampling_dx = 0; // cp->CbCr_subsampling_dy = 0; } else { cp->enumcs = ENUMCS_SYCC; cp->CbCr_subsampling_dx = 1; cp->CbCr_subsampling_dy = 1; } } else { goto fin; } } else { cp->enumcs = ENUMCS_GRAY; // cp->CbCr_subsampling_dx = 0; // cp->CbCr_subsampling_dy = 0; } if(image->icc_profile_buf) { cp->meth = 2; free(image->icc_profile_buf); image->icc_profile_buf = NULL; } else cp->meth = 1; success = 1; fin: if(dinfo) opj_destroy_decompress(dinfo); if(image) opj_image_destroy(image); return success; }
int main(int argc, char *argv[]) { opj_cinfo_t* cinfo; opj_event_mgr_t event_mgr; /* event manager */ unsigned int snum; opj_mj2_t *movie; mj2_sample_t *sample; unsigned char* frame_codestream; FILE *mj2file, *j2kfile; char *j2kfilename; unsigned char *buf; int offset, mdat_initpos; opj_image_t img; opj_cio_t *cio; mj2_cparameters_t parameters; if (argc != 3) { printf("Usage: %s source_location mj2_filename\n",argv[0]); printf("Example: %s input/input output.mj2\n",argv[0]); return 1; } mj2file = fopen(argv[2], "wb"); if (!mj2file) { fprintf(stderr, "failed to open %s for writing\n", argv[2]); return 1; } memset(&img, 0, sizeof(opj_image_t)); /* configure the event callbacks (not required) setting of each callback is optionnal */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* get a MJ2 decompressor handle */ cinfo = mj2_create_compress(); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)cinfo, &event_mgr, stderr); /* setup the decoder encoding parameters using user parameters */ memset(¶meters, 0, sizeof(mj2_cparameters_t)); movie = (opj_mj2_t*) cinfo->mj2_handle; j2kfilename = (char*)malloc(strlen(argv[1]) + 12);/* max. '%6d' */ sprintf(j2kfilename, "%s_00001.j2k",argv[1]); if(test_image(j2kfilename, ¶meters) == 0) goto fin; parameters.frame_rate = 25; /* DEFAULT */ mj2_setup_encoder(movie, ¶meters); /* Writing JP, FTYP and MDAT boxes Assuming that the JP and FTYP boxes won't be longer than 300 bytes */ buf = (unsigned char*) malloc (300 * sizeof(unsigned char)); cio = opj_cio_open(movie->cinfo, buf, 300); mj2_write_jp(cio); mj2_write_ftyp(movie, cio); mdat_initpos = cio_tell(cio); cio_skip(cio, 4); cio_write(cio,MJ2_MDAT, 4); fwrite(buf,cio_tell(cio),1,mj2file); free(buf); // Insert each j2k codestream in a JP2C box snum=0; offset = 0; while(1) { sample = &movie->tk[0].sample[snum]; sprintf(j2kfilename,"%s_%05d.j2k",argv[1],snum); j2kfile = fopen(j2kfilename, "rb"); if (!j2kfile) { if (snum==0) { // Could not open a single codestream fprintf(stderr, "failed to open %s for reading\n",j2kfilename); return 1; } else { // Tried to open a inexistant codestream fprintf(stdout,"%d frames are being added to the MJ2 file\n",snum); break; } } // Calculating offset for samples and chunks offset += cio_tell(cio); sample->offset = offset; movie->tk[0].chunk[snum].offset = offset; // There will be one sample per chunk // Calculating sample size fseek(j2kfile,0,SEEK_END); sample->sample_size = ftell(j2kfile) + 8; // Sample size is codestream + JP2C box header fseek(j2kfile,0,SEEK_SET); // Reading siz marker of j2k image for the first codestream if (snum==0) read_siz_marker(j2kfile, &img); // Writing JP2C box header frame_codestream = (unsigned char*) malloc (sample->sample_size+8); cio = opj_cio_open(movie->cinfo, frame_codestream, sample->sample_size); cio_write(cio,sample->sample_size, 4); // Sample size cio_write(cio,JP2_JP2C, 4); // JP2C // Writing codestream from J2K file to MJ2 file fread(frame_codestream+8,sample->sample_size-8,1,j2kfile); fwrite(frame_codestream,sample->sample_size,1,mj2file); cio_skip(cio, sample->sample_size-8); // Ending loop fclose(j2kfile); snum++; movie->tk[0].sample = (mj2_sample_t*) realloc(movie->tk[0].sample, (snum+1) * sizeof(mj2_sample_t)); movie->tk[0].chunk = (mj2_chunk_t*) realloc(movie->tk[0].chunk, (snum+1) * sizeof(mj2_chunk_t)); free(frame_codestream); } // Writing the MDAT box length in header offset += cio_tell(cio); buf = (unsigned char*) malloc (4 * sizeof(unsigned char)); cio = opj_cio_open(movie->cinfo, buf, 4); cio_write(cio,offset-mdat_initpos,4); fseek(mj2file,(long)mdat_initpos,SEEK_SET); fwrite(buf,4,1,mj2file); fseek(mj2file,0,SEEK_END); free(buf); // Setting movie parameters movie->tk[0].num_samples=snum; movie->tk[0].num_chunks=snum; setparams(movie, &img); // Writing MOOV box buf = (unsigned char*) malloc ((TEMP_BUF+snum*20) * sizeof(unsigned char)); cio = opj_cio_open(movie->cinfo, buf, (TEMP_BUF+snum*20)); mj2_write_moov(movie, cio); fwrite(buf,cio_tell(cio),1,mj2file); // Ending program free(img.comps); opj_cio_close(cio); fin: fclose(mj2file); mj2_destroy_compress(movie); free(j2kfilename); return 0; }
static GstFlowReturn gst_openjpeg_enc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame) { GstOpenJPEGEnc *self = GST_OPENJPEG_ENC (encoder); GstFlowReturn ret = GST_FLOW_OK; #ifdef HAVE_OPENJPEG_1 opj_cinfo_t *enc; GstMapInfo map; guint length; opj_cio_t *io; #else opj_codec_t *enc; opj_stream_t *stream; MemStream mstream; #endif opj_image_t *image; GstVideoFrame vframe; GST_DEBUG_OBJECT (self, "Handling frame"); enc = opj_create_compress (self->codec_format); if (!enc) goto initialization_error; #ifdef HAVE_OPENJPEG_1 if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE)) { opj_event_mgr_t callbacks; callbacks.error_handler = gst_openjpeg_enc_opj_error; callbacks.warning_handler = gst_openjpeg_enc_opj_warning; callbacks.info_handler = gst_openjpeg_enc_opj_info; opj_set_event_mgr ((opj_common_ptr) enc, &callbacks, self); } else { opj_set_event_mgr ((opj_common_ptr) enc, NULL, NULL); } #else if (G_UNLIKELY (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_TRACE)) { opj_set_info_handler (enc, gst_openjpeg_enc_opj_info, self); opj_set_warning_handler (enc, gst_openjpeg_enc_opj_warning, self); opj_set_error_handler (enc, gst_openjpeg_enc_opj_error, self); } else { opj_set_info_handler (enc, NULL, NULL); opj_set_warning_handler (enc, NULL, NULL); opj_set_error_handler (enc, NULL, NULL); } #endif if (!gst_video_frame_map (&vframe, &self->input_state->info, frame->input_buffer, GST_MAP_READ)) goto map_read_error; image = gst_openjpeg_enc_fill_image (self, &vframe); if (!image) goto fill_image_error; gst_video_frame_unmap (&vframe); opj_setup_encoder (enc, &self->params, image); #ifdef HAVE_OPENJPEG_1 io = opj_cio_open ((opj_common_ptr) enc, NULL, 0); if (!io) goto open_error; if (!opj_encode (enc, io, image, NULL)) goto encode_error; opj_image_destroy (image); length = cio_tell (io); ret = gst_video_encoder_allocate_output_frame (encoder, frame, length + (self->is_jp2c ? 8 : 0)); if (ret != GST_FLOW_OK) goto allocate_error; gst_buffer_fill (frame->output_buffer, self->is_jp2c ? 8 : 0, io->buffer, length); if (self->is_jp2c) { gst_buffer_map (frame->output_buffer, &map, GST_MAP_WRITE); GST_WRITE_UINT32_BE (map.data, length + 8); GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c')); gst_buffer_unmap (frame->output_buffer, &map); } opj_cio_close (io); opj_destroy_compress (enc); #else stream = opj_stream_create (4096, OPJ_FALSE); if (!stream) goto open_error; mstream.allocsize = 4096; mstream.data = g_malloc (mstream.allocsize); mstream.offset = 0; mstream.size = 0; opj_stream_set_read_function (stream, read_fn); opj_stream_set_write_function (stream, write_fn); opj_stream_set_skip_function (stream, skip_fn); opj_stream_set_seek_function (stream, seek_fn); opj_stream_set_user_data (stream, &mstream); opj_stream_set_user_data_length (stream, mstream.size); if (!opj_start_compress (enc, image, stream)) goto encode_error; if (!opj_encode (enc, stream)) goto encode_error; if (!opj_end_compress (enc, stream)) goto encode_error; opj_image_destroy (image); opj_stream_destroy (stream); opj_destroy_codec (enc); frame->output_buffer = gst_buffer_new (); if (self->is_jp2c) { GstMapInfo map; GstMemory *mem; mem = gst_allocator_alloc (NULL, 8, NULL); gst_memory_map (mem, &map, GST_MAP_WRITE); GST_WRITE_UINT32_BE (map.data, mstream.size + 8); GST_WRITE_UINT32_BE (map.data + 4, GST_MAKE_FOURCC ('j', 'p', '2', 'c')); gst_memory_unmap (mem, &map); gst_buffer_append_memory (frame->output_buffer, mem); } gst_buffer_append_memory (frame->output_buffer, gst_memory_new_wrapped (0, mstream.data, mstream.allocsize, 0, mstream.size, NULL, (GDestroyNotify) g_free)); #endif ret = gst_video_encoder_finish_frame (encoder, frame); return ret; initialization_error: { gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to initialize OpenJPEG encoder"), (NULL)); return GST_FLOW_ERROR; } map_read_error: { #ifdef HAVE_OPENJPEG_1 opj_destroy_compress (enc); #else opj_destroy_codec (enc); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to map input buffer"), (NULL)); return GST_FLOW_ERROR; } fill_image_error: { #ifdef HAVE_OPENJPEG_1 opj_destroy_compress (enc); #else opj_destroy_codec (enc); #endif gst_video_frame_unmap (&vframe); gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to fill OpenJPEG image"), (NULL)); return GST_FLOW_ERROR; } open_error: { opj_image_destroy (image); #ifdef HAVE_OPENJPEG_1 opj_destroy_compress (enc); #else opj_destroy_codec (enc); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, LIBRARY, INIT, ("Failed to open OpenJPEG data"), (NULL)); return GST_FLOW_ERROR; } encode_error: { #ifdef HAVE_OPENJPEG_1 opj_cio_close (io); opj_image_destroy (image); opj_destroy_compress (enc); #else opj_stream_destroy (stream); g_free (mstream.data); opj_image_destroy (image); opj_destroy_codec (enc); #endif gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, STREAM, ENCODE, ("Failed to encode OpenJPEG stream"), (NULL)); return GST_FLOW_ERROR; } #ifdef HAVE_OPENJPEG_1 allocate_error: { opj_cio_close (io); opj_destroy_compress (enc); gst_video_codec_frame_unref (frame); GST_ELEMENT_ERROR (self, CORE, FAILED, ("Failed to allocate output buffer"), (NULL)); return ret; } #endif }
bool DotNetEncode(MarshalledImage* image, bool lossless) { try { opj_cparameters cparameters; opj_set_default_encoder_parameters(&cparameters); cparameters.cp_disto_alloc = 1; if (lossless) { cparameters.tcp_numlayers = 1; cparameters.tcp_rates[0] = 0; } else { cparameters.tcp_numlayers = 5; cparameters.tcp_rates[0] = 1920; cparameters.tcp_rates[1] = 480; cparameters.tcp_rates[2] = 120; cparameters.tcp_rates[3] = 30; cparameters.tcp_rates[4] = 10; cparameters.irreversible = 1; if (image->components >= 3) { cparameters.tcp_mct = 1; } } cparameters.cp_comment = (char*)""; opj_image_comptparm comptparm[5]; for (int i = 0; i < image->components; i++) { comptparm[i].bpp = 8; comptparm[i].prec = 8; comptparm[i].sgnd = 0; comptparm[i].dx = 1; comptparm[i].dy = 1; comptparm[i].x0 = 0; comptparm[i].y0 = 0; comptparm[i].w = image->width; comptparm[i].h = image->height; } opj_image_t* jp2_image = opj_image_create(image->components, comptparm, CLRSPC_SRGB); if (jp2_image == NULL) throw "opj_image_create failed"; jp2_image->x0 = 0; jp2_image->y0 = 0; jp2_image->x1 = image->width; jp2_image->y1 = image->height; int n = image->width * image->height; for (int i = 0; i < image->components; i++) std::copy(image->decoded + i * n, image->decoded + (i + 1) * n, jp2_image->comps[i].data); opj_cinfo* cinfo = opj_create_compress(CODEC_J2K); opj_setup_encoder(cinfo, &cparameters, jp2_image); opj_cio* cio = opj_cio_open((opj_common_ptr)cinfo, NULL, 0); if (cio == NULL) throw "opj_cio_open failed"; if (!opj_encode(cinfo, cio, jp2_image, cparameters.index)) return false; image->length = cio_tell(cio); image->encoded = new unsigned char[image->length]; std::copy(cio->buffer, cio->buffer + image->length, image->encoded); opj_image_destroy(jp2_image); opj_destroy_compress(cinfo); opj_cio_close(cio); return true; } catch (...) { return false; } }
static void aperio_tiff_tilereader(openslide_t *osr, TIFF *tiff, uint32_t *dest, int64_t x, int64_t y, int32_t w, int32_t h) { // which compression? uint16_t compression_mode; TIFFGetField(tiff, TIFFTAG_COMPRESSION, &compression_mode); // not for us? fallback if ((compression_mode != APERIO_COMPRESSION_JP2K_YCBCR) && (compression_mode != APERIO_COMPRESSION_JP2K_RGB)) { _openslide_generic_tiff_tilereader(osr, tiff, dest, x, y, w, h); return; } // else, JPEG 2000! opj_cio_t *stream = NULL; opj_dinfo_t *dinfo = NULL; opj_image_t *image = NULL; opj_image_comp_t *comps = NULL; // note: don't use info_handler, it outputs lots of junk opj_event_mgr_t event_callbacks = { .error_handler = error_callback, .warning_handler = warning_callback, }; // get tile number ttile_t tile_no = TIFFComputeTile(tiff, x, y, 0, 0); // g_debug("aperio reading tile_no: %d", tile_no); // get tile size toff_t *sizes; if (TIFFGetField(tiff, TIFFTAG_TILEBYTECOUNTS, &sizes) == 0) { _openslide_set_error(osr, "Cannot get tile size"); return; // ok, haven't allocated anything yet } tsize_t tile_size = sizes[tile_no]; // get raw tile tdata_t buf = g_slice_alloc(tile_size); tsize_t size = TIFFReadRawTile(tiff, tile_no, buf, tile_size); if (size == -1) { _openslide_set_error(osr, "Cannot get raw tile"); goto DONE; } // init decompressor opj_dparameters_t parameters; dinfo = opj_create_decompress(CODEC_J2K); opj_set_default_decoder_parameters(¶meters); opj_setup_decoder(dinfo, ¶meters); stream = opj_cio_open((opj_common_ptr) dinfo, buf, size); opj_set_event_mgr((opj_common_ptr) dinfo, &event_callbacks, osr); // decode image = opj_decode(dinfo, stream); // check error if (openslide_get_error(osr)) { goto DONE; } comps = image->comps; // sanity check if (image->numcomps != 3) { _openslide_set_error(osr, "image->numcomps != 3"); goto DONE; } // TODO more checks? copy_aperio_tile(compression_mode, comps, dest, w, h, w / comps[0].w, h / comps[0].h, w / comps[1].w, h / comps[1].h, w / comps[2].w, h / comps[2].h); DONE: // erase g_slice_free1(tile_size, buf); if (image) opj_image_destroy(image); if (stream) opj_cio_close(stream); if (dinfo) opj_destroy_decompress(dinfo); }
// load the jpeg2000 file format bool wxJPEG2000Handler::LoadFile(wxImage *image, wxInputStream& stream, bool verbose, int index) { opj_dparameters_t parameters; /* decompression parameters */ opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *opjimage = NULL; unsigned char *src = NULL; unsigned char *ptr; int file_length, jp2c_point, jp2h_point; unsigned long int jp2hboxlen, jp2cboxlen; opj_codestream_info_t cstr_info; /* Codestream information structure */ unsigned char hdr[24]; int jpfamform; // destroy the image image->Destroy(); /* read the beginning of the file to check the type */ if (!stream.Read(hdr, WXSIZEOF(hdr))) return false; if ((jpfamform = jpeg2000familytype(hdr, WXSIZEOF(hdr))) < 0) return false; stream.SeekI(0, wxFromStart); /* handle to a decompressor */ opj_dinfo_t* dinfo = NULL; opj_cio_t *cio = NULL; /* configure the event callbacks */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = jpeg2000_error_callback; event_mgr.warning_handler = jpeg2000_warning_callback; event_mgr.info_handler = jpeg2000_info_callback; /* set decoding parameters to default values */ opj_set_default_decoder_parameters(¶meters); /* prepare parameters */ strncpy(parameters.infile, "", sizeof(parameters.infile) - 1); strncpy(parameters.outfile, "", sizeof(parameters.outfile) - 1); parameters.decod_format = jpfamform; parameters.cod_format = BMP_DFMT; if (m_reducefactor) parameters.cp_reduce = m_reducefactor; if (m_qualitylayers) parameters.cp_layer = m_qualitylayers; /*if (n_components) parameters. = n_components;*/ /* JPWL only */ #ifdef USE_JPWL parameters.jpwl_exp_comps = m_expcomps; parameters.jpwl_max_tiles = m_maxtiles; parameters.jpwl_correct = m_enablejpwl; #endif /* USE_JPWL */ /* get a decoder handle */ if (jpfamform == JP2_CFMT || jpfamform == MJ2_CFMT) dinfo = opj_create_decompress(CODEC_JP2); else if (jpfamform == J2K_CFMT) dinfo = opj_create_decompress(CODEC_J2K); else return false; /* find length of the stream */ stream.SeekI(0, wxFromEnd); file_length = (int) stream.TellI(); /* it's a movie */ if (jpfamform == MJ2_CFMT) { /* search for the first codestream box and the movie header box */ jp2c_point = searchjpeg2000c(stream, file_length, m_framenum); jp2h_point = searchjpeg2000headerbox(stream, file_length); // read the jp2h box and store it stream.SeekI(jp2h_point, wxFromStart); stream.Read(&jp2hboxlen, sizeof(unsigned long int)); jp2hboxlen = BYTE_SWAP4(jp2hboxlen); // read the jp2c box and store it stream.SeekI(jp2c_point, wxFromStart); stream.Read(&jp2cboxlen, sizeof(unsigned long int)); jp2cboxlen = BYTE_SWAP4(jp2cboxlen); // malloc memory source src = (unsigned char *) malloc(jpeg2000headSIZE + jp2hboxlen + jp2cboxlen); // copy the jP and ftyp memcpy(src, jpeg2000head, jpeg2000headSIZE); // copy the jp2h stream.SeekI(jp2h_point, wxFromStart); stream.Read(&src[jpeg2000headSIZE], jp2hboxlen); // copy the jp2c stream.SeekI(jp2c_point, wxFromStart); stream.Read(&src[jpeg2000headSIZE + jp2hboxlen], jp2cboxlen); } else if (jpfamform == JP2_CFMT || jpfamform == J2K_CFMT) { /* It's a plain image */ /* get data */ stream.SeekI(0, wxFromStart); src = (unsigned char *) malloc(file_length); stream.Read(src, file_length); } else return false; /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, stderr); /* setup the decoder decoding parameters using user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ if (jpfamform == MJ2_CFMT) cio = opj_cio_open((opj_common_ptr)dinfo, src, jpeg2000headSIZE + jp2hboxlen + jp2cboxlen); else if (jpfamform == JP2_CFMT || jpfamform == J2K_CFMT) cio = opj_cio_open((opj_common_ptr)dinfo, src, file_length); else { free(src); return false; } /* decode the stream and fill the image structure */ opjimage = opj_decode_with_info(dinfo, cio, &cstr_info); if (!opjimage) { wxMutexGuiEnter(); wxLogError(wxT("JPEG 2000 failed to decode image!")); wxMutexGuiLeave(); opj_destroy_decompress(dinfo); opj_cio_close(cio); free(src); return false; } /* close the byte stream */ opj_cio_close(cio); /* - At this point, we have the structure "opjimage" that is filled with decompressed data, as processed by the OpenJPEG decompression engine - We need to fill the class "image" with the proper pixel sample values */ { int shiftbpp; int c, tempcomps; // check components number if (m_components > opjimage->numcomps) m_components = opjimage->numcomps; // check image depth (only on the first one, for now) if (m_components) shiftbpp = opjimage->comps[m_components - 1].prec - 8; else shiftbpp = opjimage->comps[0].prec - 8; // prepare image size if (m_components) image->Create(opjimage->comps[m_components - 1].w, opjimage->comps[m_components - 1].h, true); else image->Create(opjimage->comps[0].w, opjimage->comps[0].h, true); // access image raw data image->SetMask(false); ptr = image->GetData(); // workaround for components different from 1 or 3 if ((opjimage->numcomps != 1) && (opjimage->numcomps != 3)) { #ifndef __WXGTK__ wxMutexGuiEnter(); #endif /* __WXGTK__ */ wxLogMessage(wxT("JPEG2000: weird number of components")); #ifndef __WXGTK__ wxMutexGuiLeave(); #endif /* __WXGTK__ */ tempcomps = 1; } else tempcomps = opjimage->numcomps; // workaround for subsampled components for (c = 1; c < tempcomps; c++) { if ((opjimage->comps[c].w != opjimage->comps[c - 1].w) || (opjimage->comps[c].h != opjimage->comps[c - 1].h)) { tempcomps = 1; break; } } // workaround for different precision components for (c = 1; c < tempcomps; c++) { if (opjimage->comps[c].bpp != opjimage->comps[c - 1].bpp) { tempcomps = 1; break; } } // only one component selected if (m_components) tempcomps = 1; // RGB color picture if (tempcomps == 3) { int row, col; int *r = opjimage->comps[0].data; int *g = opjimage->comps[1].data; int *b = opjimage->comps[2].data; if (shiftbpp > 0) { for (row = 0; row < opjimage->comps[0].h; row++) { for (col = 0; col < opjimage->comps[0].w; col++) { *(ptr++) = (*(r++)) >> shiftbpp; *(ptr++) = (*(g++)) >> shiftbpp; *(ptr++) = (*(b++)) >> shiftbpp; } } } else if (shiftbpp < 0) { for (row = 0; row < opjimage->comps[0].h; row++) { for (col = 0; col < opjimage->comps[0].w; col++) { *(ptr++) = (*(r++)) << -shiftbpp; *(ptr++) = (*(g++)) << -shiftbpp; *(ptr++) = (*(b++)) << -shiftbpp; } } } else { for (row = 0; row < opjimage->comps[0].h; row++) { for (col = 0; col < opjimage->comps[0].w; col++) { *(ptr++) = *(r++); *(ptr++) = *(g++); *(ptr++) = *(b++); } } } }
static int libopenjpeg_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame, int *got_packet) { LibOpenJPEGContext *ctx = avctx->priv_data; opj_image_t *image = ctx->image; opj_cinfo_t *compress = NULL; opj_cio_t *stream = NULL; int cpyresult = 0; int ret, len; AVFrame *gbrframe; switch (avctx->pix_fmt) { case AV_PIX_FMT_RGB24: case AV_PIX_FMT_RGBA: case AV_PIX_FMT_YA8: cpyresult = libopenjpeg_copy_packed8(avctx, frame, image); break; case AV_PIX_FMT_XYZ12: cpyresult = libopenjpeg_copy_packed12(avctx, frame, image); break; case AV_PIX_FMT_RGB48: case AV_PIX_FMT_RGBA64: case AV_PIX_FMT_YA16: cpyresult = libopenjpeg_copy_packed16(avctx, frame, image); break; case AV_PIX_FMT_GBR24P: case AV_PIX_FMT_GBRP9: case AV_PIX_FMT_GBRP10: case AV_PIX_FMT_GBRP12: case AV_PIX_FMT_GBRP14: case AV_PIX_FMT_GBRP16: gbrframe = av_frame_clone(frame); if (!gbrframe) return AVERROR(ENOMEM); gbrframe->data[0] = frame->data[2]; // swap to be rgb gbrframe->data[1] = frame->data[0]; gbrframe->data[2] = frame->data[1]; gbrframe->linesize[0] = frame->linesize[2]; gbrframe->linesize[1] = frame->linesize[0]; gbrframe->linesize[2] = frame->linesize[1]; if (avctx->pix_fmt == AV_PIX_FMT_GBR24P) { cpyresult = libopenjpeg_copy_unpacked8(avctx, gbrframe, image); } else { cpyresult = libopenjpeg_copy_unpacked16(avctx, gbrframe, image); } av_frame_free(&gbrframe); break; case AV_PIX_FMT_GRAY8: case AV_PIX_FMT_YUV410P: case AV_PIX_FMT_YUV411P: case AV_PIX_FMT_YUV420P: case AV_PIX_FMT_YUV422P: case AV_PIX_FMT_YUV440P: case AV_PIX_FMT_YUV444P: case AV_PIX_FMT_YUVA420P: case AV_PIX_FMT_YUVA422P: case AV_PIX_FMT_YUVA444P: cpyresult = libopenjpeg_copy_unpacked8(avctx, frame, image); break; case AV_PIX_FMT_GRAY16: case AV_PIX_FMT_YUV420P9: case AV_PIX_FMT_YUV422P9: case AV_PIX_FMT_YUV444P9: case AV_PIX_FMT_YUVA420P9: case AV_PIX_FMT_YUVA422P9: case AV_PIX_FMT_YUVA444P9: case AV_PIX_FMT_YUV444P10: case AV_PIX_FMT_YUV422P10: case AV_PIX_FMT_YUV420P10: case AV_PIX_FMT_YUVA444P10: case AV_PIX_FMT_YUVA422P10: case AV_PIX_FMT_YUVA420P10: case AV_PIX_FMT_YUV420P12: case AV_PIX_FMT_YUV422P12: case AV_PIX_FMT_YUV444P12: case AV_PIX_FMT_YUV420P14: case AV_PIX_FMT_YUV422P14: case AV_PIX_FMT_YUV444P14: case AV_PIX_FMT_YUV444P16: case AV_PIX_FMT_YUV422P16: case AV_PIX_FMT_YUV420P16: case AV_PIX_FMT_YUVA444P16: case AV_PIX_FMT_YUVA422P16: case AV_PIX_FMT_YUVA420P16: cpyresult = libopenjpeg_copy_unpacked16(avctx, frame, image); break; default: av_log(avctx, AV_LOG_ERROR, "The frame's pixel format '%s' is not supported\n", av_get_pix_fmt_name(avctx->pix_fmt)); return AVERROR(EINVAL); break; } if (!cpyresult) { av_log(avctx, AV_LOG_ERROR, "Could not copy the frame data to the internal image buffer\n"); return -1; } compress = opj_create_compress(ctx->format); if (!compress) { av_log(avctx, AV_LOG_ERROR, "Error creating the compressor\n"); return AVERROR(ENOMEM); } opj_setup_encoder(compress, &ctx->enc_params, image); stream = opj_cio_open((opj_common_ptr) compress, NULL, 0); if (!stream) { av_log(avctx, AV_LOG_ERROR, "Error creating the cio stream\n"); return AVERROR(ENOMEM); } memset(&ctx->event_mgr, 0, sizeof(ctx->event_mgr)); ctx->event_mgr.info_handler = info_callback; ctx->event_mgr.error_handler = error_callback; ctx->event_mgr.warning_handler = warning_callback; opj_set_event_mgr((opj_common_ptr) compress, &ctx->event_mgr, avctx); if (!opj_encode(compress, stream, image, NULL)) { av_log(avctx, AV_LOG_ERROR, "Error during the opj encode\n"); return -1; } len = cio_tell(stream); if ((ret = ff_alloc_packet2(avctx, pkt, len, 0)) < 0) { return ret; } memcpy(pkt->data, stream->buffer, len); pkt->flags |= AV_PKT_FLAG_KEY; *got_packet = 1; opj_cio_close(stream); stream = NULL; opj_destroy_compress(compress); compress = NULL; return 0; }
/* -------------------------------------------------------------------------- -------------------- MAIN METHOD, CALLED BY JAVA -----------------------*/ JNIEXPORT jint JNICALL Java_org_openJpeg_OpenJPEGJavaDecoder_internalDecodeJ2KtoImage(JNIEnv *env, jobject obj, jobjectArray javaParameters) { int argc; /* To simulate the command line parameters (taken from the javaParameters variable) and be able to re-use the */ char **argv; /* 'parse_cmdline_decoder' method taken from the j2k_to_image project */ opj_dparameters_t parameters; /* decompression parameters */ img_fol_t img_fol; opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *image = NULL; FILE *fsrc = NULL; unsigned char *src = NULL; int file_length; int num_images; int i,j,imageno; opj_dinfo_t* dinfo = NULL; /* handle to a decompressor */ opj_cio_t *cio = NULL; int w,h; long min_value, max_value; short tempS; unsigned char tempUC, tempUC1, tempUC2; /* ==> Access variables to the Java member variables*/ jsize arraySize; jclass cls; jobject object; jboolean isCopy; jfieldID fid; jbyteArray jba; jshortArray jsa; jintArray jia; jbyte *jbBody, *ptrBBody; jshort *jsBody, *ptrSBody; jint *jiBody, *ptrIBody; callback_variables_t msgErrorCallback_vars; /* <=== access variable to Java member variables */ int *ptr, *ptr1, *ptr2; /* <== To transfer the decoded image to Java*/ /* configure the event callbacks */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* JNI reference to the calling class*/ cls = (*env)->GetObjectClass(env, obj); /* Pointers to be able to call a Java method for all the info and error messages*/ msgErrorCallback_vars.env = env; msgErrorCallback_vars.jobj = &obj; msgErrorCallback_vars.message_mid = (*env)->GetMethodID(env, cls, "logMessage", "(Ljava/lang/String;)V"); msgErrorCallback_vars.error_mid = (*env)->GetMethodID(env, cls, "logError", "(Ljava/lang/String;)V"); /* Get the String[] containing the parameters, and converts it into a char** to simulate command line arguments.*/ arraySize = (*env)->GetArrayLength(env, javaParameters); argc = (int) arraySize +1; argv = opj_malloc(argc*sizeof(char*)); argv[0] = "ProgramName.exe"; /* The program name: useless*/ j=0; for (i=1; i<argc; i++) { object = (*env)->GetObjectArrayElement(env, javaParameters, i-1); argv[i] = (char*)(*env)->GetStringUTFChars(env, object, &isCopy); } /*printf("C: decoder params = "); for (i=0; i<argc; i++) { printf("[%s]",argv[i]); } printf("\n");*/ /* set decoding parameters to default values */ opj_set_default_decoder_parameters(¶meters); parameters.decod_format = J2K_CFMT; /* parse input and get user encoding parameters */ if(parse_cmdline_decoder(argc, argv, ¶meters,&img_fol) == 1) { /* Release the Java arguments array*/ for (i=1; i<argc; i++) (*env)->ReleaseStringUTFChars(env, (*env)->GetObjectArrayElement(env, javaParameters, i-1), argv[i]); return -1; } /* Release the Java arguments array*/ for (i=1; i<argc; i++) (*env)->ReleaseStringUTFChars(env, (*env)->GetObjectArrayElement(env, javaParameters, i-1), argv[i]); num_images=1; /* Get additional information from the Java object variables*/ fid = (*env)->GetFieldID(env, cls,"skippedResolutions", "I"); parameters.cp_reduce = (short) (*env)->GetIntField(env, obj, fid); /*Decoding image one by one*/ for(imageno = 0; imageno < num_images ; imageno++) { image = NULL; fprintf(stderr,"\n"); /* read the input file and put it in memory into the 'src' object, if the -i option is given in JavaParameters. Implemented for debug purpose. */ /* -------------------------------------------------------------- */ if (parameters.infile && parameters.infile[0]!='\0') { /*printf("C: opening [%s]\n", parameters.infile);*/ fsrc = fopen(parameters.infile, "rb"); if (!fsrc) { fprintf(stderr, "ERROR -> failed to open %s for reading\n", parameters.infile); return 1; } fseek(fsrc, 0, SEEK_END); file_length = ftell(fsrc); fseek(fsrc, 0, SEEK_SET); src = (unsigned char *) opj_malloc(file_length); fread(src, 1, file_length, fsrc); fclose(fsrc); /*printf("C: %d bytes read from file\n",file_length);*/ } else { /* Preparing the transfer of the codestream from Java to C*/ /*printf("C: before transfering codestream\n");*/ fid = (*env)->GetFieldID(env, cls,"compressedStream", "[B"); jba = (*env)->GetObjectField(env, obj, fid); file_length = (*env)->GetArrayLength(env, jba); jbBody = (*env)->GetByteArrayElements(env, jba, &isCopy); src = (unsigned char*)jbBody; } /* decode the code-stream */ /* ---------------------- */ switch(parameters.decod_format) { case J2K_CFMT: { /* JPEG-2000 codestream */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_J2K); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, &msgErrorCallback_vars); /* setup the decoder decoding parameters using user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, src, file_length); /* decode the stream and fill the image structure */ image = opj_decode(dinfo, cio); if(!image) { fprintf(stderr, "ERROR -> j2k_to_image: failed to decode image!\n"); opj_destroy_decompress(dinfo); opj_cio_close(cio); return 1; } /* close the byte stream */ opj_cio_close(cio); } break; case JP2_CFMT: { /* JPEG 2000 compressed image data */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_JP2); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, &msgErrorCallback_vars); /* setup the decoder decoding parameters using the current image and user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, src, file_length); /* decode the stream and fill the image structure */ image = opj_decode(dinfo, cio); if(!image) { fprintf(stderr, "ERROR -> j2k_to_image: failed to decode image!\n"); opj_destroy_decompress(dinfo); opj_cio_close(cio); return 1; } /* close the byte stream */ opj_cio_close(cio); } break; case JPT_CFMT: { /* JPEG 2000, JPIP */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_JPT); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, &msgErrorCallback_vars); /* setup the decoder decoding parameters using user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, src, file_length); /* decode the stream and fill the image structure */ image = opj_decode(dinfo, cio); if(!image) { fprintf(stderr, "ERROR -> j2k_to_image: failed to decode image!\n"); opj_destroy_decompress(dinfo); opj_cio_close(cio); return 1; } /* close the byte stream */ opj_cio_close(cio); } break; default: fprintf(stderr, "skipping file..\n"); continue; } /* free the memory containing the code-stream */ if (parameters.infile && parameters.infile[0]!='\0') { opj_free(src); } else { (*env)->ReleaseByteArrayElements(env, jba, jbBody, 0); } src = NULL; /* create output image. If the -o parameter is given in the JavaParameters, write the decoded version into a file. Implemented for debug purpose. */ /* ---------------------------------- */ switch (parameters.cod_format) { case PXM_DFMT: /* PNM PGM PPM */ if (imagetopnm(image, parameters.outfile)) { fprintf(stdout,"Outfile %s not generated\n",parameters.outfile); } else { fprintf(stdout,"Generated Outfile %s\n",parameters.outfile); } break; case PGX_DFMT: /* PGX */ if(imagetopgx(image, parameters.outfile)){ fprintf(stdout,"Outfile %s not generated\n",parameters.outfile); } else { fprintf(stdout,"Generated Outfile %s\n",parameters.outfile); } break; case BMP_DFMT: /* BMP */ if(imagetobmp(image, parameters.outfile)){ fprintf(stdout,"Outfile %s not generated\n",parameters.outfile); } else { fprintf(stdout,"Generated Outfile %s\n",parameters.outfile); } break; } /* ========= Return the image to the Java structure ===============*/ #ifdef CHECK_THRESHOLDS printf("C: checking thresholds\n"); #endif /* First compute the real with and height, in function of the resolutions decoded.*/ /*wr = (image->comps[0].w + (1 << image->comps[0].factor) -1) >> image->comps[0].factor;*/ /*hr = (image->comps[0].h + (1 << image->comps[0].factor) -1) >> image->comps[0].factor;*/ w = image->comps[0].w; h = image->comps[0].h; if (image->numcomps==3) { /* 3 components color image*/ ptr = image->comps[0].data; ptr1 = image->comps[1].data; ptr2 = image->comps[2].data; #ifdef CHECK_THRESHOLDS if (image->comps[0].sgnd) { min_value = -128; max_value = 127; } else { min_value = 0; max_value = 255; } #endif /* Get the pointer to the Java structure where the data must be copied*/ fid = (*env)->GetFieldID(env, cls,"image24", "[I"); jia = (*env)->GetObjectField(env, obj, fid); jiBody = (*env)->GetIntArrayElements(env, jia, 0); ptrIBody = jiBody; printf("C: transfering image24: %d int to Java pointer=%d\n",image->numcomps*w*h, ptrIBody); for (i=0; i<w*h; i++) { tempUC = (unsigned char)(ptr[i]); tempUC1 = (unsigned char)(ptr1[i]); tempUC2 = (unsigned char)(ptr2[i]); #ifdef CHECK_THRESHOLDS if (tempUC < min_value) tempUC=min_value; else if (tempUC > max_value) tempUC=max_value; if (tempUC1 < min_value) tempUC1=min_value; else if (tempUC1 > max_value) tempUC1=max_value; if (tempUC2 < min_value) tempUC2=min_value; else if (tempUC2 > max_value) tempUC2=max_value; #endif *(ptrIBody++) = (int) ( (tempUC2<<16) + (tempUC1<<8) + tempUC ); } (*env)->ReleaseIntArrayElements(env, jia, jiBody, 0); } else { /* 1 component 8 or 16 bpp image*/ ptr = image->comps[0].data; printf("C: before transfering a %d bpp image to java (length = %d)\n",image->comps[0].prec ,w*h); if (image->comps[0].prec<=8) { fid = (*env)->GetFieldID(env, cls,"image8", "[B"); jba = (*env)->GetObjectField(env, obj, fid); jbBody = (*env)->GetByteArrayElements(env, jba, 0); ptrBBody = jbBody; #ifdef CHECK_THRESHOLDS if (image->comps[0].sgnd) { min_value = -128; max_value = 127; } else { min_value = 0; max_value = 255; } #endif /*printf("C: transfering %d shorts to Java image8 pointer = %d\n", wr*hr,ptrSBody);*/ for (i=0; i<w*h; i++) { tempUC = (unsigned char) (ptr[i]); #ifdef CHECK_THRESHOLDS if (tempUC<min_value) tempUC = min_value; else if (tempUC > max_value) tempUC = max_value; #endif *(ptrBBody++) = tempUC; } (*env)->ReleaseByteArrayElements(env, jba, jbBody, 0); printf("C: image8 transfered to Java\n"); } else { fid = (*env)->GetFieldID(env, cls,"image16", "[S"); jsa = (*env)->GetObjectField(env, obj, fid); jsBody = (*env)->GetShortArrayElements(env, jsa, 0); ptrSBody = jsBody; #ifdef CHECK_THRESHOLDS if (image->comps[0].sgnd) { min_value = -32768; max_value = 32767; } else { min_value = 0; max_value = 65535; } printf("C: minValue = %ld, maxValue = %ld\n", min_value, max_value); #endif printf("C: transfering %d shorts to Java image16 pointer = %d\n", w*h,ptrSBody); for (i=0; i<w*h; i++) { tempS = (short) (ptr[i]); #ifdef CHECK_THRESHOLDS if (tempS<min_value) { printf("C: value %d truncated to %ld\n", tempS, min_value); tempS = min_value; } else if (tempS > max_value) { printf("C: value %d truncated to %ld\n", tempS, max_value); tempS = max_value; } #endif *(ptrSBody++) = tempS; } (*env)->ReleaseShortArrayElements(env, jsa, jsBody, 0); printf("C: image16 completely filled\n"); } } /* free remaining structures */ if(dinfo) { opj_destroy_decompress(dinfo); } /* free image data structure */ opj_image_destroy(image); } return 1; /* OK */ }
static int libopenjpeg_encode_frame(AVCodecContext *avctx, uint8_t *buf, int buf_size, void *data) { AVFrame *frame = data; LibOpenJPEGContext *ctx = avctx->priv_data; opj_cinfo_t *compress = ctx->compress; opj_image_t *image = ctx->image; opj_cio_t *stream; int cpyresult = 0; int len = 0; // x0, y0 is the top left corner of the image // x1, y1 is the width, height of the reference grid image->x0 = 0; image->y0 = 0; image->x1 = (avctx->width - 1) * ctx->enc_params.subsampling_dx + 1; image->y1 = (avctx->height - 1) * ctx->enc_params.subsampling_dy + 1; switch (avctx->pix_fmt) { case PIX_FMT_GRAY8: cpyresult = libopenjpeg_copy_rgba(avctx, frame, image, 1); break; case PIX_FMT_RGB24: cpyresult = libopenjpeg_copy_rgba(avctx, frame, image, 3); break; case PIX_FMT_RGBA: cpyresult = libopenjpeg_copy_rgba(avctx, frame, image, 4); break; case PIX_FMT_YUV420P: case PIX_FMT_YUV422P: case PIX_FMT_YUV440P: case PIX_FMT_YUV444P: cpyresult = libopenjpeg_copy_yuv(avctx, frame, image); break; default: av_log(avctx, AV_LOG_ERROR, "The frame's pixel format '%s' is not supported\n", av_get_pix_fmt_name(avctx->pix_fmt)); return AVERROR(EINVAL); break; } if (!cpyresult) { av_log(avctx, AV_LOG_ERROR, "Could not copy the frame data to the internal image buffer\n"); return -1; } opj_setup_encoder(compress, &ctx->enc_params, image); stream = opj_cio_open((opj_common_ptr)compress, NULL, 0); if (!stream) { av_log(avctx, AV_LOG_ERROR, "Error creating the cio stream\n"); return AVERROR(ENOMEM); } if (!opj_encode(compress, stream, image, NULL)) { opj_cio_close(stream); av_log(avctx, AV_LOG_ERROR, "Error during the opj encode\n"); return -1; } len = cio_tell(stream); if (len > buf_size) { opj_cio_close(stream); av_log(avctx, AV_LOG_ERROR, "Error with buf_size, not large enough to hold the frame\n"); return -1; } memcpy(buf, stream->buffer, len); opj_cio_close(stream); return len; }
static av_cold int libopenjpeg_encode_init(AVCodecContext *avctx) { LibOpenJPEGContext *ctx = avctx->priv_data; int err = AVERROR(ENOMEM); opj_set_default_encoder_parameters(&ctx->enc_params); ctx->enc_params.cp_rsiz = ctx->profile; ctx->enc_params.mode = !!avctx->global_quality; ctx->enc_params.cp_cinema = ctx->cinema_mode; ctx->enc_params.prog_order = ctx->prog_order; ctx->enc_params.numresolution = ctx->numresolution; ctx->enc_params.cp_disto_alloc = ctx->disto_alloc; ctx->enc_params.cp_fixed_alloc = ctx->fixed_alloc; ctx->enc_params.cp_fixed_quality = ctx->fixed_quality; ctx->enc_params.tcp_numlayers = ctx->numlayers; ctx->enc_params.tcp_rates[0] = FFMAX(avctx->compression_level, 0) * 2; if (ctx->cinema_mode > 0) { ctx->enc_params.irreversible = 1; ctx->enc_params.tcp_mct = 1; ctx->enc_params.tile_size_on = 0; /* no subsampling */ ctx->enc_params.cp_tdx=1; ctx->enc_params.cp_tdy=1; ctx->enc_params.subsampling_dx = 1; ctx->enc_params.subsampling_dy = 1; /* Tile and Image shall be at (0,0) */ ctx->enc_params.cp_tx0 = 0; ctx->enc_params.cp_ty0 = 0; ctx->enc_params.image_offset_x0 = 0; ctx->enc_params.image_offset_y0 = 0; /* Codeblock size= 32*32 */ ctx->enc_params.cblockw_init = 32; ctx->enc_params.cblockh_init = 32; ctx->enc_params.csty |= 0x01; /* No ROI */ ctx->enc_params.roi_compno = -1; if (ctx->enc_params.prog_order != CPRL) { av_log(avctx, AV_LOG_ERROR, "prog_order forced to CPRL\n"); ctx->enc_params.prog_order = CPRL; } ctx->enc_params.tp_flag = 'C'; ctx->enc_params.tp_on = 1; } ctx->compress = opj_create_compress(ctx->format); if (!ctx->compress) { av_log(avctx, AV_LOG_ERROR, "Error creating the compressor\n"); return AVERROR(ENOMEM); } ctx->image = mj2_create_image(avctx, &ctx->enc_params); if (!ctx->image) { av_log(avctx, AV_LOG_ERROR, "Error creating the mj2 image\n"); err = AVERROR(EINVAL); goto fail; } opj_setup_encoder(ctx->compress, &ctx->enc_params, ctx->image); ctx->stream = opj_cio_open((opj_common_ptr) ctx->compress, NULL, 0); if (!ctx->stream) { av_log(avctx, AV_LOG_ERROR, "Error creating the cio stream\n"); err = AVERROR(ENOMEM); goto fail; } avctx->coded_frame = av_frame_alloc(); if (!avctx->coded_frame) { av_log(avctx, AV_LOG_ERROR, "Error allocating coded frame\n"); goto fail; } memset(&ctx->event_mgr, 0, sizeof(opj_event_mgr_t)); ctx->event_mgr.info_handler = info_callback; ctx->event_mgr.error_handler = error_callback; ctx->event_mgr.warning_handler = warning_callback; opj_set_event_mgr((opj_common_ptr) ctx->compress, &ctx->event_mgr, avctx); return 0; fail: opj_cio_close(ctx->stream); ctx->stream = NULL; opj_destroy_compress(ctx->compress); ctx->compress = NULL; opj_image_destroy(ctx->image); ctx->image = NULL; av_freep(&avctx->coded_frame); return err; }
fz_error fz_load_jpx_image(fz_pixmap **imgp, unsigned char *data, int size, fz_colorspace *defcs) { fz_pixmap *img; opj_event_mgr_t evtmgr; opj_dparameters_t params; opj_dinfo_t *info; opj_cio_t *cio; opj_image_t *jpx; fz_colorspace *colorspace; unsigned char *p; int format; int a, n, w, h, depth, sgnd; int x, y, k, v; if (size < 2) return fz_throw("not enough data to determine image format"); /* Check for SOC marker -- if found we have a bare J2K stream */ if (data[0] == 0xFF && data[1] == 0x4F) format = CODEC_J2K; else format = CODEC_JP2; memset(&evtmgr, 0, sizeof(evtmgr)); evtmgr.error_handler = fz_opj_error_callback; evtmgr.warning_handler = fz_opj_warning_callback; evtmgr.info_handler = fz_opj_info_callback; opj_set_default_decoder_parameters(¶ms); info = opj_create_decompress(format); opj_set_event_mgr((opj_common_ptr)info, &evtmgr, stderr); opj_setup_decoder(info, ¶ms); cio = opj_cio_open((opj_common_ptr)info, data, size); jpx = opj_decode(info, cio); opj_cio_close(cio); opj_destroy_decompress(info); if (!jpx) return fz_throw("opj_decode failed"); for (k = 1; k < jpx->numcomps; k++) { if (jpx->comps[k].w != jpx->comps[0].w) return fz_throw("image components have different width"); if (jpx->comps[k].h != jpx->comps[0].h) return fz_throw("image components have different height"); if (jpx->comps[k].prec != jpx->comps[0].prec) return fz_throw("image components have different precision"); } n = jpx->numcomps; w = jpx->comps[0].w; h = jpx->comps[0].h; depth = jpx->comps[0].prec; sgnd = jpx->comps[0].sgnd; if (jpx->color_space == CLRSPC_SRGB && n == 4) { n = 3; a = 1; } else if (jpx->color_space == CLRSPC_SYCC && n == 4) { n = 3; a = 1; } else if (n == 2) { n = 1; a = 1; } else if (n > 4) { n = 4; a = 1; } else { a = 0; } if (defcs) { if (defcs->n == n) { colorspace = defcs; } else { fz_warn("jpx file and dict colorspaces do not match"); defcs = NULL; } } if (!defcs) { switch (n) { case 1: colorspace = fz_device_gray; break; case 3: colorspace = fz_device_rgb; break; case 4: colorspace = fz_device_cmyk; break; } } img = fz_new_pixmap_with_limit(colorspace, w, h); if (!img) { opj_image_destroy(jpx); return fz_throw("out of memory"); } p = img->samples; for (y = 0; y < h; y++) { for (x = 0; x < w; x++) { for (k = 0; k < n + a; k++) { v = jpx->comps[k].data[y * w + x]; if (sgnd) v = v + (1 << (depth - 1)); if (depth > 8) v = v >> (depth - 8); *p++ = v; } if (!a) *p++ = 255; } } if (a) { if (n == 4) { fz_pixmap *tmp = fz_new_pixmap(fz_device_rgb, w, h); fz_convert_pixmap(img, tmp); fz_drop_pixmap(img); img = tmp; } fz_premultiply_pixmap(img); } opj_image_destroy(jpx); *imgp = img; return fz_okay; }
BOOL LLImageJ2COJ::decodeImpl(LLImageJ2C &base, LLImageRaw &raw_image, F32 decode_time, S32 first_channel, S32 max_channel_count) { // // FIXME: Get the comment field out of the texture // if (!base.getData()) return FALSE; if (!base.getDataSize()) return FALSE; if (!raw_image.getData()) return FALSE; if (!raw_image.getDataSize()) return FALSE; LLTimer decode_timer; opj_dparameters_t parameters; /* decompression parameters */ opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *image = NULL; opj_dinfo_t* dinfo = NULL; /* handle to a decompressor */ opj_cio_t *cio = NULL; /* configure the event callbacks (not required) */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* set decoding parameters to default values */ opj_set_default_decoder_parameters(¶meters); parameters.cp_reduce = base.getRawDiscardLevel(); /* decode the code-stream */ /* ---------------------- */ /* JPEG-2000 codestream */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_J2K); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, stderr); /* setup the decoder decoding parameters using user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, base.getData(), base.getDataSize()); /* decode the stream and fill the image structure */ if (!cio) return FALSE; if (cio->bp == NULL) return FALSE; if (!dinfo) return FALSE; image = opj_decode(dinfo, cio); /* close the byte stream */ opj_cio_close(cio); /* free remaining structures */ if(dinfo) { opj_destroy_decompress(dinfo); } // The image decode failed if the return was NULL or the component // count was zero. The latter is just a sanity check before we // dereference the array. if(!image) { LL_DEBUGS("Openjpeg") << "ERROR -> decodeImpl: failed to decode image - no image" << LL_ENDL; return TRUE; // done } S32 img_components = image->numcomps; if( !img_components ) // < 1 ||img_components > 4 ) { LL_DEBUGS("Openjpeg") << "ERROR -> decodeImpl: failed to decode image wrong number of components: " << img_components << LL_ENDL; if (image) { opj_image_destroy(image); } return TRUE; // done } // sometimes we get bad data out of the cache - check to see if the decode succeeded for (S32 i = 0; i < img_components; i++) { if (image->comps[i].factor != base.getRawDiscardLevel()) { // if we didn't get the discard level we're expecting, fail if (image) //anyway somthing odd with the image, better check than crash opj_image_destroy(image); base.mDecoding = FALSE; return TRUE; } } if(img_components <= first_channel) { LL_DEBUGS("Openjpeg") << "trying to decode more channels than are present in image: numcomps: " << img_components << " first_channel: " << first_channel << LL_ENDL; if (image) { opj_image_destroy(image); } return TRUE; } // Copy image data into our raw image format (instead of the separate channel format S32 channels = img_components - first_channel; if( channels > max_channel_count ) channels = max_channel_count; // Component buffers are allocated in an image width by height buffer. // The image placed in that buffer is ceil(width/2^factor) by // ceil(height/2^factor) and if the factor isn't zero it will be at the // top left of the buffer with black filled in the rest of the pixels. // It is integer math so the formula is written in ceildivpo2. // (Assuming all the components have the same width, height and // factor.) S32 comp_width = image->comps[0].w; S32 f=image->comps[0].factor; S32 width = ceildivpow2(image->x1 - image->x0, f); S32 height = ceildivpow2(image->y1 - image->y0, f); raw_image.resize(width, height, channels); U8 *rawp = raw_image.getData(); // first_channel is what channel to start copying from // dest is what channel to copy to. first_channel comes from the // argument, dest always starts writing at channel zero. for (S32 comp = first_channel, dest=0; comp < first_channel + channels; comp++, dest++) { if (image->comps[comp].data) { S32 offset = dest; for (S32 y = (height - 1); y >= 0; y--) { for (S32 x = 0; x < width; x++) { rawp[offset] = image->comps[comp].data[y*comp_width + x]; offset += channels; } } } else // Some rare OpenJPEG versions have this bug. { llwarns << "ERROR -> decodeImpl: failed to decode image! (NULL comp data - OpenJPEG bug)" << llendl; opj_image_destroy(image); return TRUE; // done } } /* free image data structure */ if (image) { opj_image_destroy(image); } return TRUE; // done }
// load the mxf file format bool wxMXFHandler::LoadFile(wxImage *image, wxInputStream& stream, bool verbose, int index) { opj_dparameters_t parameters; /* decompression parameters */ opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *opjimage = NULL; unsigned char *src = NULL; unsigned char *ptr; int file_length, j2k_point, j2k_len; opj_codestream_info_t cstr_info; /* Codestream information structure */ // destroy the image image->Destroy(); /* handle to a decompressor */ opj_dinfo_t* dinfo = NULL; opj_cio_t *cio = NULL; /* configure the event callbacks (not required) */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = mxf_error_callback; event_mgr.warning_handler = mxf_warning_callback; event_mgr.info_handler = mxf_info_callback; /* set decoding parameters to default values */ opj_set_default_decoder_parameters(¶meters); /* prepare parameters */ strncpy(parameters.infile, "", sizeof(parameters.infile)-1); strncpy(parameters.outfile, "", sizeof(parameters.outfile)-1); parameters.decod_format = J2K_CFMT; parameters.cod_format = BMP_DFMT; if (m_reducefactor) parameters.cp_reduce = m_reducefactor; if (m_qualitylayers) parameters.cp_layer = m_qualitylayers; /*if (n_components) parameters. = n_components;*/ /* JPWL only */ #ifdef USE_JPWL parameters.jpwl_exp_comps = m_expcomps; parameters.jpwl_max_tiles = m_maxtiles; parameters.jpwl_correct = m_enablejpwl; #endif /* USE_JPWL */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_J2K); /* find length of the stream */ stream.SeekI(0, wxFromEnd); file_length = (int) stream.TellI(); /* search for the m_framenum codestream position and length */ //jp2c_point = searchjp2c(stream, file_length, m_framenum); //jp2c_len = searchjp2c(stream, file_length, m_framenum); j2k_point = 0; j2k_len = 10; // malloc memory source src = (unsigned char *) malloc(j2k_len); // copy the jp2c stream.SeekI(j2k_point, wxFromStart); stream.Read(src, j2k_len); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, stderr); /* setup the decoder decoding parameters using user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, src, j2k_len); /* decode the stream and fill the image structure */ opjimage = opj_decode_with_info(dinfo, cio, &cstr_info); if (!opjimage) { wxMutexGuiEnter(); wxLogError(wxT("MXF: failed to decode image!")); wxMutexGuiLeave(); opj_destroy_decompress(dinfo); opj_cio_close(cio); free(src); return false; } /* close the byte stream */ opj_cio_close(cio); /* common rendering method */ #include "imagjpeg2000.cpp" wxMutexGuiEnter(); wxLogMessage(wxT("MXF: image loaded.")); wxMutexGuiLeave(); /* close openjpeg structs */ opj_destroy_decompress(dinfo); opj_image_destroy(opjimage); free(src); if (!image->Ok()) return false; else return true; }
BOOL LLImageJ2COJ::getMetadata(LLImageJ2C &base) { // // FIXME: We get metadata by decoding the ENTIRE image. // if (!base.getData()) return FALSE; if (!base.getDataSize()) return FALSE; // Update the raw discard level base.updateRawDiscardLevel(); opj_dparameters_t parameters; /* decompression parameters */ opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *image = NULL; opj_dinfo_t* dinfo = NULL; /* handle to a decompressor */ opj_cio_t *cio = NULL; /* configure the event callbacks (not required) */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* set decoding parameters to default values */ opj_set_default_decoder_parameters(¶meters); // Only decode what's required to get the size data. parameters.cp_limit_decoding=LIMIT_TO_MAIN_HEADER; //parameters.cp_reduce = mRawDiscardLevel; /* decode the code-stream */ /* ---------------------- */ /* JPEG-2000 codestream */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_J2K); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, stderr); /* setup the decoder decoding parameters using user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, base.getData(), base.getDataSize()); /* decode the stream and fill the image structure */ if (!cio) return FALSE; if (cio->bp == NULL) return FALSE; if (!dinfo) return FALSE; image = opj_decode(dinfo, cio); /* close the byte stream */ opj_cio_close(cio); /* free remaining structures */ if(dinfo) { opj_destroy_decompress(dinfo); } if(!image) { llwarns << "ERROR -> getMetadata: failed to decode image!" << llendl; return FALSE; } // Copy image data into our raw image format (instead of the separate channel format S32 width = 0; S32 height = 0; S32 img_components = image->numcomps; width = image->x1 - image->x0; height = image->y1 - image->y0; base.setSize(width, height, img_components); /* free image data structure */ opj_image_destroy(image); return TRUE; }
BOOL LLImageJ2COJ::decodeImpl(LLImageJ2C &base, LLImageRaw &raw_image, F32 decode_time, S32 first_channel, S32 max_channel_count) { // // FIXME: Get the comment field out of the texture // LLTimer decode_timer; opj_dparameters_t parameters; /* decompression parameters */ opj_event_mgr_t event_mgr; /* event manager */ opj_image_t *image = NULL; opj_dinfo_t* dinfo = NULL; /* handle to a decompressor */ opj_cio_t *cio = NULL; /* configure the event callbacks (not required) */ memset(&event_mgr, 0, sizeof(opj_event_mgr_t)); event_mgr.error_handler = error_callback; event_mgr.warning_handler = warning_callback; event_mgr.info_handler = info_callback; /* set decoding parameters to default values */ opj_set_default_decoder_parameters(¶meters); parameters.cp_reduce = base.getRawDiscardLevel(); /* decode the code-stream */ /* ---------------------- */ /* JPEG-2000 codestream */ /* get a decoder handle */ dinfo = opj_create_decompress(CODEC_J2K); /* catch events using our callbacks and give a local context */ opj_set_event_mgr((opj_common_ptr)dinfo, &event_mgr, stderr); /* setup the decoder decoding parameters using user parameters */ opj_setup_decoder(dinfo, ¶meters); /* open a byte stream */ cio = opj_cio_open((opj_common_ptr)dinfo, base.getData(), base.getDataSize()); /* decode the stream and fill the image structure. Also fill in an additional structur to get the decoding result. This structure is a bit unusual in that it is not received through opj, but still has somt dynamically allocated fields that need to be cleared up at the end by calling a destroy function. */ opj_codestream_info_t cinfo; memset(&cinfo, 0, sizeof(opj_codestream_info_t)); image = opj_decode_with_info(dinfo, cio, &cinfo); /* close the byte stream */ opj_cio_close(cio); /* free remaining structures */ if(dinfo) { opj_destroy_decompress(dinfo); } // The image decode failed if the return was NULL or the component // count was zero. The latter is just a sanity check before we // dereference the array. if(!image) { llwarns << "ERROR -> decodeImpl: failed to decode image - no image" << LL_ENDL; return TRUE; // done } S32 img_components = image->numcomps; if( !img_components ) // < 1 ||img_components > 4 ) { llwarns << "ERROR -> decodeImpl: failed to decode image wrong number of components: " << img_components << LL_ENDL; if (image) { opj_destroy_cstr_info(&cinfo); opj_image_destroy(image); } return TRUE; // done } // sometimes we get bad data out of the cache - check to see if the decode succeeded int decompdifference = 0; if (cinfo.numdecompos) // sanity { for (int comp = 0; comp < image->numcomps; comp++) { /* get maximum decomposition level difference, first field is from the COD header and the second is what is actually met in the codestream, NB: if everything was ok, this calculation will return what was set in the cp_reduce value! */ decompdifference = llmax(decompdifference, cinfo.numdecompos[comp] - image->comps[comp].resno_decoded); } if (decompdifference < 0) // sanity { decompdifference = 0; } } /* if OpenJPEG failed to decode all requested decomposition levels the difference will be greater than this level */ if (decompdifference > base.getRawDiscardLevel()) { llwarns << "not enough data for requested discard level, setting mDecoding to FALSE, difference: " << (decompdifference - base.getRawDiscardLevel()) << llendl; opj_destroy_cstr_info(&cinfo); opj_image_destroy(image); base.mDecoding = FALSE; return TRUE; } if(img_components <= first_channel) { llwarns << "trying to decode more channels than are present in image: numcomps: " << img_components << " first_channel: " << first_channel << LL_ENDL; if (image) { opj_destroy_cstr_info(&cinfo); opj_image_destroy(image); } return TRUE; } // Copy image data into our raw image format (instead of the separate channel format S32 channels = img_components - first_channel; if( channels > max_channel_count ) channels = max_channel_count; // Component buffers are allocated in an image width by height buffer. // The image placed in that buffer is ceil(width/2^factor) by // ceil(height/2^factor) and if the factor isn't zero it will be at the // top left of the buffer with black filled in the rest of the pixels. // It is integer math so the formula is written in ceildivpo2. // (Assuming all the components have the same width, height and // factor.) S32 comp_width = image->comps[0].w; S32 f=image->comps[0].factor; S32 width = ceildivpow2(image->x1 - image->x0, f); S32 height = ceildivpow2(image->y1 - image->y0, f); raw_image.resize(width, height, channels); U8 *rawp = raw_image.getData(); // first_channel is what channel to start copying from // dest is what channel to copy to. first_channel comes from the // argument, dest always starts writing at channel zero. for (S32 comp = first_channel, dest=0; comp < first_channel + channels; comp++, dest++) { if (image->comps[comp].data) { S32 offset = dest; for (S32 y = (height - 1); y >= 0; y--) { for (S32 x = 0; x < width; x++) { rawp[offset] = image->comps[comp].data[y*comp_width + x]; offset += channels; } } } else // Some rare OpenJPEG versions have this bug. { llwarns << "ERROR -> decodeImpl: failed to decode image! (NULL comp data - OpenJPEG bug)" << llendl; opj_destroy_cstr_info(&cinfo); opj_image_destroy(image); return TRUE; // done } } /* free opj data structures */ if (image) { opj_destroy_cstr_info(&cinfo); opj_image_destroy(image); } return TRUE; // done }