/** init_parse_files * Verifies the PNG input files and prepares YUV4MPEG header information. * @returns 0 on success */ static int init_parse_files(parameters_t *param) { char pngname[255]; snprintf(pngname, sizeof(pngname), param->pngformatstr, param->begin); mjpeg_debug("Analyzing %s to get the right pic params", pngname); if (decode_png(pngname, 0, param) == -1) mjpeg_error_exit1("Reading of %s failed.\n", pngname); mjpeg_info("Image dimensions are %ux%u", param->width, param->height); mjpeg_info("Movie frame rate is: %f frames/second", Y4M_RATIO_DBL(param->framerate)); switch (param->interlace) { case Y4M_ILACE_NONE: mjpeg_info("Non-interlaced/progressive frames."); break; case Y4M_ILACE_BOTTOM_FIRST: mjpeg_info("Interlaced frames, bottom field first."); break; case Y4M_ILACE_TOP_FIRST: mjpeg_info("Interlaced frames, top field first."); break; default: mjpeg_error_exit1("Interlace has not been specified (use -I option)"); break; } if ((param->interlace != Y4M_ILACE_NONE) && (param->interleave == -1)) mjpeg_error_exit1("Interleave has not been specified (use -L option)"); if (!(param->interleave) && (param->interlace != Y4M_ILACE_NONE)) { param->height *= 2; mjpeg_info("Non-interleaved fields (image height doubled)"); } mjpeg_info("Frame size: %u x %u", param->width, param->height); return 0; }
/** * Decodes the given byte array into the <tt>ImageData</tt>. * <p> * Java declaration: * <pre> * loadPNG(Ljavax/microedition/lcdui/ImageData;[BII)Z * </pre> * * @param imageData the ImageData to load to * @param imageBytes A byte array containing the encoded PNG image data * @param offset The start of the image data within the byte array * @param length The length of the image data in the byte array * * @return true if there is alpha data */ KNIEXPORT KNI_RETURNTYPE_BOOLEAN KNIDECL(javax_microedition_lcdui_ImageDataFactory_loadPNG) { int length = KNI_GetParameterAsInt(4); int offset = KNI_GetParameterAsInt(3); int status = KNI_TRUE; unsigned char* srcBuffer = NULL; gxj_screen_buffer image; java_imagedata * midpImageData = NULL; /* variable to hold error codes */ gxutl_native_image_error_codes creationError = GXUTL_NATIVE_IMAGE_NO_ERROR; KNI_StartHandles(4); KNI_DeclareHandle(alphaData); KNI_DeclareHandle(pixelData); KNI_DeclareHandle(pngData); KNI_DeclareHandle(imageData); KNI_GetParameterAsObject(2, pngData); KNI_GetParameterAsObject(1, imageData); midpImageData = GXAPI_GET_IMAGEDATA_PTR(imageData); /* assert * (KNI_IsNullHandle(pngData)) */ srcBuffer = (unsigned char *)JavaByteArray(pngData); /* * JAVA_TRACE("loadPNG pngData length=%d %x\n", * JavaByteArray(pngData)->length, srcBuffer); */ image.width = midpImageData->width; image.height = midpImageData->height; unhand(jbyte_array, pixelData) = midpImageData->pixelData; if (!KNI_IsNullHandle(pixelData)) { image.pixelData = (gxj_pixel_type *)JavaByteArray(pixelData); /* * JAVA_TRACE("loadPNG pixelData length=%d\n", * JavaByteArray(pixelData)->length); */ } else { image.pixelData = NULL; } unhand(jbyte_array, alphaData) = midpImageData->alphaData; if (!KNI_IsNullHandle(alphaData)) { image.alphaData = (gxj_alpha_type *)JavaByteArray(alphaData); /* * JAVA_TRACE("decodePNG alphaData length=%d\n", * JavaByteArray(alphaData)->length); */ } else { image.alphaData = NULL; } /* assert * (imagedata.pixelData != NULL && imagedata.alphaData != NULL) */ status = decode_png((srcBuffer + offset), length, &image, &creationError); if (GXUTL_NATIVE_IMAGE_NO_ERROR != creationError) { KNI_ThrowNew(midpIllegalArgumentException, NULL); } KNI_EndHandles(); KNI_ReturnBoolean(status); }
/** * Decodes the given input data into a cache representation that can * be saved and loaded quickly. * The input data should be in a self-identifying format; that is, * the data must contain a description of the decoding process. * * @param srcBuffer input data to be decoded. * @param length length of the input data. * @param ret_dataBuffer pointer to the platform representation data that * be saved. * @param ret_length pointer to the length of the return data. * @return one of error codes: * MIDP_ERROR_NONE, * MIDP_ERROR_OUT_MEM, * MIDP_ERROR_UNSUPPORTED, * MIDP_ERROR_OUT_OF_RESOURCE, * MIDP_ERROR_IMAGE_CORRUPTED */ MIDP_ERROR gx_decode_data2cache(unsigned char* srcBuffer, unsigned int length, unsigned char** ret_dataBuffer, unsigned int* ret_length) { unsigned int pixelSize, alphaSize; gxutl_image_format format; MIDP_ERROR err; gxj_screen_buffer sbuf; gxutl_image_buffer_raw *rawBuffer; gxutl_native_image_error_codes creationError = GXUTL_NATIVE_IMAGE_NO_ERROR; err = gxutl_image_get_info(srcBuffer, length, &format, (unsigned int *)&sbuf.width, (unsigned int *)&sbuf.height); if (err != MIDP_ERROR_NONE) { return err; } pixelSize = sizeof(gxj_pixel_type) * sbuf.width * sbuf.height; alphaSize = sizeof(gxj_alpha_type) * sbuf.width * sbuf.height; switch (format) { case GXUTL_IMAGE_FORMAT_JPEG: /* JPEG does not contain alpha data */ alphaSize = 0; /* Fall through */ case GXUTL_IMAGE_FORMAT_PNG: /* Decode PNG/JPEG to screen buffer format */ rawBuffer = (gxutl_image_buffer_raw *) midpMalloc(offsetof(gxutl_image_buffer_raw, data)+pixelSize+alphaSize); if (rawBuffer == NULL) { return MIDP_ERROR_OUT_MEM; } sbuf.pixelData = (gxj_pixel_type *)rawBuffer->data; if (format == GXUTL_IMAGE_FORMAT_PNG) { sbuf.alphaData = rawBuffer->data + pixelSize; rawBuffer->hasAlpha = decode_png(srcBuffer, length, &sbuf, &creationError); if (!rawBuffer->hasAlpha) { sbuf.alphaData = NULL; alphaSize = 0; /* Exclude alpha data */ } } else { sbuf.alphaData = NULL; rawBuffer->hasAlpha = KNI_FALSE; decode_jpeg(srcBuffer, length, &sbuf, &creationError); } if (GXUTL_NATIVE_IMAGE_NO_ERROR != creationError) { midpFree(rawBuffer); return MIDP_ERROR_IMAGE_CORRUPTED; } memcpy(rawBuffer->header, gxutl_raw_header, 4); rawBuffer->width = sbuf.width; /* Use default endian */ rawBuffer->height = sbuf.height; /* Use default endian */ *ret_dataBuffer = (unsigned char *)rawBuffer; *ret_length = offsetof(gxutl_image_buffer_raw, data)+pixelSize+alphaSize; return MIDP_ERROR_NONE; case GXUTL_IMAGE_FORMAT_RAW: /* Already in screen buffer format, simply copy the data */ *ret_dataBuffer = (unsigned char *)midpMalloc(length); if (*ret_dataBuffer == NULL) { return MIDP_ERROR_OUT_MEM; } else { memcpy(*ret_dataBuffer, srcBuffer, length); *ret_length = length; return MIDP_ERROR_NONE; } default: return MIDP_ERROR_UNSUPPORTED; } /* switch (image_type) */ }
static int generate_YUV4MPEG(parameters_t *param) { uint32_t frame; //size_t pngsize; char pngname[FILENAME_MAX]; uint8_t *yuv[3]; /* buffer for Y/U/V planes of decoded PNG */ y4m_stream_info_t streaminfo; y4m_frame_info_t frameinfo; if ((param->width % 2) == 0) param->new_width = param->width; else { param->new_width = ((param->width >> 1) + 1) << 1; printf("Setting new, even image width %d", param->new_width); } mjpeg_info("Now generating YUV4MPEG stream."); y4m_init_stream_info(&streaminfo); y4m_init_frame_info(&frameinfo); y4m_si_set_width(&streaminfo, param->new_width); y4m_si_set_height(&streaminfo, param->height); y4m_si_set_interlace(&streaminfo, param->interlace); y4m_si_set_framerate(&streaminfo, param->framerate); y4m_si_set_chroma(&streaminfo, param->ss_mode); yuv[0] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[0][0])); yuv[1] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[1][0])); yuv[2] = (uint8_t *)malloc(param->new_width * param->height * sizeof(yuv[2][0])); y4m_write_stream_header(STDOUT_FILENO, &streaminfo); for (frame = param->begin; (frame < param->numframes + param->begin) || (param->numframes == -1); frame++) { // if (frame < 25) // else //snprintf(pngname, sizeof(pngname), param->pngformatstr, frame - 25); snprintf(pngname, sizeof(pngname), param->pngformatstr, frame); raw0 = yuv[0]; raw1 = yuv[1]; raw2 = yuv[2]; if (decode_png(pngname, 1, param) == -1) { mjpeg_info("Read from '%s' failed: %s", pngname, strerror(errno)); if (param->numframes == -1) { mjpeg_info("No more frames. Stopping."); break; /* we are done; leave 'while' loop */ } else { mjpeg_info("Rewriting latest frame instead."); } } else { #if 0 mjpeg_debug("Preparing frame"); /* Now open this PNG file, and examine its header to retrieve the YUV4MPEG info that shall be written */ if ((param->interlace == Y4M_ILACE_NONE) || (param->interleave == 1)) { mjpeg_info("Processing non-interlaced/interleaved %s.", pngname, pngsize); decode_png(imagedata, 0, 420, yuv[0], yuv[1], yuv[2], param->width, param->height, param->new_width); #if 0 if (param->make_z_alpha) { mjpeg_info("Writing Z/Alpha data.\n"); za_write(real_z_imagemap, param->width, param->height,z_alpha_fp,frame); } #endif } else { mjpeg_error_exit1("Can't handle interlaced PNG information (yet) since there is no standard for it.\n" "Use interleaved mode (-L option) to create interlaced material."); switch (param->interlace) { case Y4M_ILACE_TOP_FIRST: mjpeg_info("Processing interlaced, top-first %s", pngname); #if 0 decode_jpeg_raw(jpegdata, jpegsize, Y4M_ILACE_TOP_FIRST, 420, param->width, param->height, yuv[0], yuv[1], yuv[2]); #endif break; case Y4M_ILACE_BOTTOM_FIRST: mjpeg_info("Processing interlaced, bottom-first %s", pngname); #if 0 decode_jpeg_raw(jpegdata, jpegsize, Y4M_ILACE_BOTTOM_FIRST, 420, param->width, param->height, yuv[0], yuv[1], yuv[2]); #endif break; default: mjpeg_error_exit1("FATAL logic error?!?"); break; } } #endif mjpeg_debug("Converting frame to YUV format."); /* Transform colorspace, then subsample (in place) */ convert_RGB_to_YCbCr(yuv, param->height * param->new_width); chroma_subsample(param->ss_mode, yuv, param->new_width, param->height); mjpeg_debug("Frame decoded, now writing to output stream."); } mjpeg_debug("Frame decoded, now writing to output stream."); y4m_write_frame(STDOUT_FILENO, &streaminfo, &frameinfo, yuv); } #if 0 if (param->make_z_alpha) { za_write_end(z_alpha_fp); fclose(z_alpha_fp); } #endif y4m_fini_stream_info(&streaminfo); y4m_fini_frame_info(&frameinfo); free(yuv[0]); free(yuv[1]); free(yuv[2]); return 0; }