Example #1
0
static av_cold int libopenjpeg_encode_init(AVCodecContext *avctx)
{
    LibOpenJPEGContext *ctx = avctx->priv_data;
    int err = AVERROR(ENOMEM);

    opj_set_default_encoder_parameters(&ctx->enc_params);

    ctx->enc_params.cp_rsiz = ctx->profile;
    ctx->enc_params.mode = !!avctx->global_quality;
    ctx->enc_params.cp_cinema = ctx->cinema_mode;
    ctx->enc_params.prog_order = ctx->prog_order;
    ctx->enc_params.numresolution = ctx->numresolution;
    ctx->enc_params.cp_disto_alloc = ctx->disto_alloc;
    ctx->enc_params.cp_fixed_alloc = ctx->fixed_alloc;
    ctx->enc_params.cp_fixed_quality = ctx->fixed_quality;
    ctx->enc_params.tcp_numlayers = ctx->numlayers;
    ctx->enc_params.tcp_rates[0] = FFMAX(avctx->compression_level, 0) * 2;

    if (ctx->cinema_mode > 0) {
        cinema_parameters(&ctx->enc_params);
    }

    ctx->image = mj2_create_image(avctx, &ctx->enc_params);
    if (!ctx->image) {
        av_log(avctx, AV_LOG_ERROR, "Error creating the mj2 image\n");
        err = AVERROR(EINVAL);
        goto fail;
    }

    avctx->coded_frame = av_frame_alloc();
    if (!avctx->coded_frame) {
        av_log(avctx, AV_LOG_ERROR, "Error allocating coded frame\n");
        goto fail;
    }

    return 0;

fail:
    opj_image_destroy(ctx->image);
    ctx->image = NULL;
    av_frame_free(&avctx->coded_frame);
    return err;
}
Example #2
0
int write_image (dt_imageio_j2k_t *j2k, const char *filename, const float *in, void *exif, int exif_len, int imgid)
{
  opj_cparameters_t parameters;     /* compression parameters */
  float *rates = NULL;
  opj_event_mgr_t event_mgr;        /* event manager */
  opj_image_t *image = NULL;
  int quality = CLAMP(j2k->quality, 1, 100);

  /*
  configure the event callbacks (not required)
  setting of each callback is optionnal
  */
  memset(&event_mgr, 0, sizeof(opj_event_mgr_t));
  event_mgr.error_handler = error_callback;
  event_mgr.warning_handler = warning_callback;
  event_mgr.info_handler = info_callback;

  /* set encoding parameters to default values */
  opj_set_default_encoder_parameters(&parameters);

  /* compression ratio */
  /* invert range, from 10-100, 100-1
  * where jpeg see's 1 and highest quality (lossless) and 100 is very low quality*/
  parameters.tcp_rates[0] = 100 - quality + 1;

  parameters.tcp_numlayers = 1; /* only one resolution */
  parameters.cp_disto_alloc = 1;
  parameters.cp_rsiz = STD_RSIZ;

  parameters.cod_format = j2k->format;
  parameters.cp_cinema = j2k->preset;

  if(parameters.cp_cinema)
  {
    rates = (float*)malloc(parameters.tcp_numlayers * sizeof(float));
    for(int i=0; i< parameters.tcp_numlayers; i++)
    {
      rates[i] = parameters.tcp_rates[i];
    }
    cinema_parameters(&parameters);
  }

  /* Create comment for codestream */
  const char comment[] = "Created by "PACKAGE_STRING;
  parameters.cp_comment = g_strdup(comment);

  /*Converting the image to a format suitable for encoding*/
  {
    int subsampling_dx = parameters.subsampling_dx;
    int subsampling_dy = parameters.subsampling_dy;
    int numcomps = 3;
    int prec = 12; //TODO: allow other bitdepths!
    int w = j2k->width, h = j2k->height;

    opj_image_cmptparm_t cmptparm[4]; /* RGBA: max. 4 components */
    memset(&cmptparm[0], 0, numcomps * sizeof(opj_image_cmptparm_t));

    for(int i = 0; i < numcomps; i++)
    {
      cmptparm[i].prec = prec;
      cmptparm[i].bpp = prec;
      cmptparm[i].sgnd = 0;
      cmptparm[i].dx = subsampling_dx;
      cmptparm[i].dy = subsampling_dy;
      cmptparm[i].w = w;
      cmptparm[i].h = h;
    }
    image = opj_image_create(numcomps, &cmptparm[0], CLRSPC_SRGB);
    if(!image)
    {
      fprintf(stderr, "Error: opj_image_create() failed\n");
      return 1;
    }

    /* set image offset and reference grid */
    image->x0 = parameters.image_offset_x0;
    image->y0 = parameters.image_offset_y0;
    image->x1 = parameters.image_offset_x0 + (w - 1) * subsampling_dx + 1;
    image->y1 = parameters.image_offset_y0 + (h - 1) * subsampling_dy + 1;

    switch(prec)
    {
      case 8:
        for(int i = 0; i < w * h; i++)
        {
          for(int k = 0; k < numcomps; k++) image->comps[k].data[i] = DOWNSAMPLE_FLOAT_TO_8BIT(in[i*4 + k]);
        }
        break;
      case 12:
        for(int i = 0; i < w * h; i++)
        {
          for(int k = 0; k < numcomps; k++) image->comps[k].data[i] = DOWNSAMPLE_FLOAT_TO_12BIT(in[i*4 + k]);
        }
        break;
      case 16:
        for(int i = 0; i < w * h; i++)
        {
          for(int k = 0; k < numcomps; k++) image->comps[k].data[i] = DOWNSAMPLE_FLOAT_TO_16BIT(in[i*4 + k]);
        }
        break;
      default:
        fprintf(stderr, "Error: this shouldn't happen, there is no bit depth of %d for jpeg 2000 images.\n", prec);
        return 1;
    }
  }

  /*Encoding image*/

  /* Decide if MCT should be used */
  parameters.tcp_mct = image->numcomps == 3 ? 1 : 0;

  if(parameters.cp_cinema)
  {
    cinema_setup_encoder(&parameters,image,rates);
  }

  /* encode the destination image */
  /* ---------------------------- */
  int rc = 1;
  OPJ_CODEC_FORMAT codec;
  if(parameters.cod_format == J2K_CFMT)        /* J2K format output */
    codec = CODEC_J2K;
  else
    codec = CODEC_JP2;

  int codestream_length;
  size_t res;
  opj_cio_t *cio = NULL;
  FILE *f = NULL;

  /* get a J2K/JP2 compressor handle */
  opj_cinfo_t* cinfo = opj_create_compress(codec);

  /* catch events using our callbacks and give a local context */
  opj_set_event_mgr((opj_common_ptr)cinfo, &event_mgr, stderr);

  /* setup the encoder parameters using the current image and user parameters */
  opj_setup_encoder(cinfo, &parameters, image);

  /* open a byte stream for writing */
  /* allocate memory for all tiles */
  cio = opj_cio_open((opj_common_ptr)cinfo, NULL, 0);

  /* encode the image */
  if(!opj_encode(cinfo, cio, image, NULL))
  {
    opj_cio_close(cio);
    fprintf(stderr, "failed to encode image\n");
    return 1;
  }
  codestream_length = cio_tell(cio);

  /* write the buffer to disk */
  f = fopen(filename, "wb");
  if(!f)
  {
    fprintf(stderr, "failed to open %s for writing\n", filename);
    return 1;
  }
  res = fwrite(cio->buffer, 1, codestream_length, f);
  if(res < (size_t)codestream_length) /* FIXME */
  {
    fprintf(stderr, "failed to write %d (%s)\n", codestream_length, filename);
    fclose(f);
    return 1;
  }
  fclose(f);

  /* close and free the byte stream */
  opj_cio_close(cio);

  /* free remaining compression structures */
  opj_destroy_compress(cinfo);

  /* add exif data blob. seems to not work for j2k files :( */
  if(exif && j2k->format == JP2_CFMT)
    rc = dt_exif_write_blob(exif,exif_len,filename);

  /* free image data */
  opj_image_destroy(image);

  /* free user parameters structure */
  g_free(parameters.cp_comment);
  if(parameters.cp_matrice) free(parameters.cp_matrice);

  return ((rc == 1) ? 0 : 1);
}
Example #3
0
static opj_image_t* ibuftoimage(ImBuf *ibuf, opj_cparameters_t *parameters) {
	
	unsigned char *rect;
	float *rect_float;
	
	int subsampling_dx = parameters->subsampling_dx;
	int subsampling_dy = parameters->subsampling_dy;
	

	int i, numcomps, w, h, prec;
	int x,y, y_row;
	OPJ_COLOR_SPACE color_space;
	opj_image_cmptparm_t cmptparm[4];	/* maximum of 4 components */
	opj_image_t * image = NULL;
	
	img_fol_t img_fol; /* only needed for cinema presets */
	memset(&img_fol,0,sizeof(img_fol_t));
	
	if (ibuf->ftype & JP2_CINE) {
		
		if (ibuf->x==4096 || ibuf->y==2160)
			parameters->cp_cinema= CINEMA4K_24;
		else {
			if (ibuf->ftype & JP2_CINE_48FPS) {
				parameters->cp_cinema= CINEMA2K_48;
			}
			else {
				parameters->cp_cinema= CINEMA2K_24;
			}
		}
		if (parameters->cp_cinema){
			img_fol.rates = (float*)MEM_mallocN(parameters->tcp_numlayers * sizeof(float), "jp2_rates");
			for(i=0; i< parameters->tcp_numlayers; i++){
				img_fol.rates[i] = parameters->tcp_rates[i];
			}
			cinema_parameters(parameters);
		}
		
		color_space= CLRSPC_SYCC;
		prec= 12;
		numcomps= 3;
	}
	else { 
		/* Get settings from the imbuf */
		color_space = (ibuf->ftype & JP2_YCC) ? CLRSPC_SYCC : CLRSPC_SRGB;
		
		if (ibuf->ftype & JP2_16BIT)		prec= 16;
		else if (ibuf->ftype & JP2_12BIT)	prec= 12;
		else 						prec= 8;
		
		/* 32bit images == alpha channel */
		/* grayscale not supported yet */
		numcomps= (ibuf->depth==32) ? 4 : 3;
	}
	
	w= ibuf->x;
	h= ibuf->y;
	
	
	/* initialize image components */
	memset(&cmptparm[0], 0, 4 * sizeof(opj_image_cmptparm_t));
	for(i = 0; i < numcomps; i++) {
		cmptparm[i].prec = prec;
		cmptparm[i].bpp = prec;
		cmptparm[i].sgnd = 0;
		cmptparm[i].dx = subsampling_dx;
		cmptparm[i].dy = subsampling_dy;
		cmptparm[i].w = w;
		cmptparm[i].h = h;
	}
	/* create the image */
	image = opj_image_create(numcomps, &cmptparm[0], color_space);
	if(!image) {
		printf("Error: opj_image_create() failed\n");
		return NULL;
	}

	/* set image offset and reference grid */
	image->x0 = parameters->image_offset_x0;
	image->y0 = parameters->image_offset_y0;
	image->x1 = parameters->image_offset_x0 + (w - 1) *	subsampling_dx + 1;
	image->y1 = parameters->image_offset_y0 + (h - 1) *	subsampling_dy + 1;
	
	/* set image data */
	rect = (unsigned char*) ibuf->rect;
	rect_float= ibuf->rect_float;
	
	if (rect_float && rect && prec==8) {
		/* No need to use the floating point buffer, just write the 8 bits from the char buffer */
		rect_float= NULL;
	}
	
	
	if (rect_float) {
		float rgb[3];
		
		switch (prec) {
		case 8: /* Convert blenders float color channels to 8,12 or 16bit ints */
			for(y=h-1; y>=0; y--) {
				y_row = y*w;
				for(x=0; x<w; x++, rect_float+=4) {
					i = y_row + x;
					
					if (ibuf->profile == IB_PROFILE_LINEAR_RGB)
						linearrgb_to_srgb_v3_v3(rgb, rect_float);
					else
						copy_v3_v3(rgb, rect_float);
				
					image->comps[0].data[i] = DOWNSAMPLE_FLOAT_TO_8BIT(rgb[0]);
					image->comps[1].data[i] = DOWNSAMPLE_FLOAT_TO_8BIT(rgb[1]);
					image->comps[2].data[i] = DOWNSAMPLE_FLOAT_TO_8BIT(rgb[2]);
					if (numcomps>3)
						image->comps[3].data[i] = DOWNSAMPLE_FLOAT_TO_8BIT(rect_float[3]);
				}
			}
			break;
			
		case 12:
			for(y=h-1; y>=0; y--) {
				y_row = y*w;
				for(x=0; x<w; x++, rect_float+=4) {
					i = y_row + x;
					
					if (ibuf->profile == IB_PROFILE_LINEAR_RGB)
						linearrgb_to_srgb_v3_v3(rgb, rect_float);
					else
						copy_v3_v3(rgb, rect_float);
				
					image->comps[0].data[i] = DOWNSAMPLE_FLOAT_TO_12BIT(rgb[0]);
					image->comps[1].data[i] = DOWNSAMPLE_FLOAT_TO_12BIT(rgb[1]);
					image->comps[2].data[i] = DOWNSAMPLE_FLOAT_TO_12BIT(rgb[2]);
					if (numcomps>3)
						image->comps[3].data[i] = DOWNSAMPLE_FLOAT_TO_12BIT(rect_float[3]);
				}
			}
			break;
		case 16:
			for(y=h-1; y>=0; y--) {
				y_row = y*w;
				for(x=0; x<w; x++, rect_float+=4) {
					i = y_row + x;
					
					if (ibuf->profile == IB_PROFILE_LINEAR_RGB)
						linearrgb_to_srgb_v3_v3(rgb, rect_float);
					else
						copy_v3_v3(rgb, rect_float);
				
					image->comps[0].data[i] = DOWNSAMPLE_FLOAT_TO_16BIT(rgb[0]);
					image->comps[1].data[i] = DOWNSAMPLE_FLOAT_TO_16BIT(rgb[1]);
					image->comps[2].data[i] = DOWNSAMPLE_FLOAT_TO_16BIT(rgb[2]);
					if (numcomps>3)
						image->comps[3].data[i] = DOWNSAMPLE_FLOAT_TO_16BIT(rect_float[3]);
				}
			}
			break;
		}
	} else {
		/* just use rect*/
		switch (prec) {
		case 8:
			for(y=h-1; y>=0; y--) {
				y_row = y*w;
				for(x=0; x<w; x++, rect+=4) {
					i = y_row + x;
				
					image->comps[0].data[i] = rect[0];
					image->comps[1].data[i] = rect[1];
					image->comps[2].data[i] = rect[2];
					if (numcomps>3)
						image->comps[3].data[i] = rect[3];
				}
			}
			break;
			
		case 12: /* Up Sampling, a bit pointless but best write the bit depth requested */
			for(y=h-1; y>=0; y--) {
				y_row = y*w;
				for(x=0; x<w; x++, rect+=4) {
					i = y_row + x;
				
					image->comps[0].data[i]= UPSAMPLE_8_TO_12(rect[0]);
					image->comps[1].data[i]= UPSAMPLE_8_TO_12(rect[1]);
					image->comps[2].data[i]= UPSAMPLE_8_TO_12(rect[2]);
					if (numcomps>3)
						image->comps[3].data[i]= UPSAMPLE_8_TO_12(rect[3]);
				}
			}
			break;
		case 16:
			for(y=h-1; y>=0; y--) {
				y_row = y*w;
				for(x=0; x<w; x++, rect+=4) {
					i = y_row + x;
				
					image->comps[0].data[i]= UPSAMPLE_8_TO_16(rect[0]);
					image->comps[1].data[i]= UPSAMPLE_8_TO_16(rect[1]);
					image->comps[2].data[i]= UPSAMPLE_8_TO_16(rect[2]);
					if (numcomps>3)
						image->comps[3].data[i]= UPSAMPLE_8_TO_16(rect[3]);
				}
			}
			break;
		}
	}
	
	/* Decide if MCT should be used */
	parameters->tcp_mct = image->numcomps == 3 ? 1 : 0;
	
	if(parameters->cp_cinema){
		cinema_setup_encoder(parameters,image,&img_fol);
	}
	
	if (img_fol.rates)
		MEM_freeN(img_fol.rates);
	
	return image;
}