JNIEXPORT void JNICALL Java_com_jiangpeng_android_antrace_Utils_unsharpMask(JNIEnv* env, jobject thiz, jobject input, jobject output) { AndroidBitmapInfo inputInfo; AndroidBitmapInfo outputInfo; void* src_pixels = 0; void* dst_pixels = 0; int ret = 0; if ((ret = AndroidBitmap_getInfo(env, input, &inputInfo)) < 0) { return; } if (inputInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { return; } if ((ret = AndroidBitmap_lockPixels(env, input, &src_pixels)) < 0) { return; } if ((ret = AndroidBitmap_getInfo(env, output, &outputInfo)) < 0) { return; } if (outputInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { return; } if ((ret = AndroidBitmap_lockPixels(env, output, &dst_pixels)) < 0) { return; } unsharpMask(src_pixels, dst_pixels, outputInfo.width, outputInfo.height, inputInfo.stride, 5); AndroidBitmap_unlockPixels(env, input); AndroidBitmap_unlockPixels(env, output); }
JNIEXPORT void JNICALL Java_com_leth_convertimage_MainActivity_convertImage (JNIEnv *env, jobject obj, jobject origbm,jobject graybm) { AndroidBitmapInfo infocolor; void *pixelscolor; AndroidBitmapInfo infogray; void *pixelsgray; int y; int x; LOGI("Before convert"); AndroidBitmap_getInfo(env, origbm, &infocolor); LOGI("infocolor :: width-%d height-%d format-%d stride-%ld" ,infocolor.width ,infocolor.height ,infocolor.stride); AndroidBitmap_getInfo(env, graybm, &infogray); AndroidBitmap_lockPixels(env, origbm, &pixelscolor); AndroidBitmap_lockPixels(env, graybm, &pixelsgray); LOGI("Start convert"); uint8_t tmp; for ( y=0; y<infocolor.height; y++) { rgba * line = (rgba *) pixelscolor; uint32_t * grayline = (uint32_t *) pixelsgray; for ( x=0; x < infocolor.width; x++){ tmp = line[x].red * 0.33 + line[x].green * 0.33 + line[x].blue * 0.33 ; grayline[x] = (line[x].alpha & 0xff) << 24 | (tmp & 0xff) << 16 | (tmp & 0xff) << 8 | (tmp & 0xff) //| line[x].red << 16 //| line[x].green << 8 //| line[x].blue ; } pixelscolor = (int8_t *)pixelscolor + infocolor.stride; pixelsgray = (int8_t *) pixelsgray + infogray.stride; } LOGI("Finish convert"); AndroidBitmap_unlockPixels(env, origbm); AndroidBitmap_unlockPixels(env, graybm); }
void bitmap_to_mat(JNIEnv *env, jobject &srcBitmap, Mat &srcMat) { void *srcPixels = 0; AndroidBitmapInfo srcBitmapInfo; try { AndroidBitmap_getInfo(env, srcBitmap, &srcBitmapInfo); AndroidBitmap_lockPixels(env, srcBitmap, &srcPixels); uint32_t srcHeight = srcBitmapInfo.height; uint32_t srcWidth = srcBitmapInfo.width; srcMat.create(srcHeight, srcWidth, CV_8UC4); if (srcBitmapInfo.format == ANDROID_BITMAP_FORMAT_RGBA_8888) { Mat tmp(srcHeight, srcWidth, CV_8UC4, srcPixels); tmp.copyTo(srcMat); } else { Mat tmp = Mat(srcHeight, srcWidth, CV_8UC2, srcPixels); cvtColor(tmp, srcMat, COLOR_BGR5652RGBA); } AndroidBitmap_unlockPixels(env, srcBitmap); return; } catch (cv::Exception &e) { AndroidBitmap_unlockPixels(env, srcBitmap); jclass je = env->FindClass("java/lang/Exception"); env -> ThrowNew(je, e.what()); return; } catch (...) { AndroidBitmap_unlockPixels(env, srcBitmap); jclass je = env->FindClass("java/lang/Exception"); env -> ThrowNew(je, "unknown"); return; } }
JNIEXPORT void JNICALL Java_makemachine_android_examples_Nati_greyscale(JNIEnv * env, jclass klass, jobject bitmap,jint wind){ void * raw; uint32_t * pixels; int ret,j; int grey; AndroidBitmapInfo info; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) return; if ((ret = AndroidBitmap_lockPixels(env, bitmap, &raw)) < 0) return; pixels=(uint32_t*) raw; #define USE_CONTRAST STRETCH #ifdef USE_CONTRAST_STRETCH determine_contrast(&pixels,info.height,info.width); #else Qwe::mingrey=0; Qwe::maxgrey=255; #endif greyscale(&pixels,info.height,info.width); make_mean(info.height,info.width,wind); #ifdef USE_DEVIATION make_dev(info.height,info.width,wind); #endif imtoin(info.height,info.width); thresh(info.height,info.width); close(info.height,info.width,2); open(info.height,info.width,2); AndroidBitmap_unlockPixels(env, bitmap); }
JNIEXPORT void JNICALL Java_com_daemon_aroundcircleviewdemo_ImageBlur_blurBitMap (JNIEnv *env, jclass obj, jobject bitmapIn, jint r) { AndroidBitmapInfo infoIn; void* pixelsIn; int ret; // Get image info if ((ret = AndroidBitmap_getInfo(env, bitmapIn, &infoIn)) < 0) return; // Check image if (infoIn.format != ANDROID_BITMAP_FORMAT_RGBA_8888) return; // Lock all images if ((ret = AndroidBitmap_lockPixels(env, bitmapIn, &pixelsIn)) < 0) { //AndroidBitmap_lockPixels failed! } //height width int h = infoIn.height; int w = infoIn.width; //Start pixelsIn = StackBlur((int*)pixelsIn, w, h, r); //End // Unlocks everything AndroidBitmap_unlockPixels(env, bitmapIn); }
GF_Err RAW_Setup(GF_VideoOutput *dr, void *os_handle, void *os_display, u32 init_flags) { AndroidBitmapInfo info; RAWCTX; void * pixels; int ret; GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("Android vout RAW_Setup\n")); if (!rc->width) { rc->env = (JNIEnv *)os_handle; rc->bitmap = (jobject *)os_display; AndroidBitmap_getInfo(rc->env, *(rc->bitmap), &info); rc->width = info.width; rc->height = info.height; rc->locked_data = NULL; } else { rc->env = (JNIEnv *)os_handle; rc->bitmap = (jobject *)os_display; } GF_LOG(GF_LOG_DEBUG, GF_LOG_CORE, ("Android vout rc dims: %d:%d\n", rc->height, rc->width)); return GF_OK; }
JNIEXPORT void JNICALL Java_si_majcn_frameoffame_MainActivity_applyEffect(JNIEnv *env, jobject obj, jobject bitmap, jint effectNumber) { AndroidBitmapInfo info; int ret; void *pixels; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); return; } if (info.format != ANDROID_BITMAP_FORMAT_RGB_565) { LOGE("Bitmap format is not RGBA_565 !"); return; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } applyEffect(&info, pixels, effectNumber); AndroidBitmap_unlockPixels(env, bitmap); }
int lockPixels(JNIEnv *env, jobject jbitmap, GifInfo *info, void **pixels) { AndroidBitmapInfo bitmapInfo; if (AndroidBitmap_getInfo(env, jbitmap, &bitmapInfo) == ANDROID_BITMAP_RESULT_SUCCESS) info->stride = bitmapInfo.width; else { throwException(env, ILLEGAL_STATE_EXCEPTION_BARE, "Could not get bitmap info"); return -2; } const int lockPixelsResult = AndroidBitmap_lockPixels(env, jbitmap, pixels); if (lockPixelsResult == ANDROID_BITMAP_RESULT_SUCCESS) return 0; char *message; switch (lockPixelsResult) { case ANDROID_BITMAP_RESULT_ALLOCATION_FAILED: #ifdef DEBUG LOGE("bitmap lock allocation failed"); #endif return -1; //#122 workaround case ANDROID_BITMAP_RESULT_BAD_PARAMETER: message = "Lock pixels error, bad parameter"; break; case ANDROID_BITMAP_RESULT_JNI_EXCEPTION: message = "Lock pixels error, JNI exception"; break; default: message = "Lock pixels error"; } throwException(env, ILLEGAL_STATE_EXCEPTION_BARE, message); return -2; }
static jlong JNICALL nativeGetFrame( JNIEnv* env, jobject clazz, jlong frameSequenceStateLong, jint frameNr, jobject bitmap, jint previousFrameNr) { FrameSequenceState* frameSequenceState = reinterpret_cast<FrameSequenceState*>(frameSequenceStateLong); int ret; AndroidBitmapInfo info; void* pixels; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { jniThrowException(env, ILLEGAL_STATE_EXEPTION, "Couldn't get info from Bitmap "); return 0; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { jniThrowException(env, ILLEGAL_STATE_EXEPTION, "Bitmap pixels couldn't be locked"); return 0; } int pixelStride = info.stride >> 2; jlong delayMs = frameSequenceState->drawFrame(frameNr, (Color8888*) pixels, pixelStride, previousFrameNr); AndroidBitmap_unlockPixels(env, bitmap); return delayMs; }
/*fill in data for a bitmap*/ JNIEXPORT jint JNICALL Java_feipeng_andzop_render_RenderView_naRenderAFrame(JNIEnv * pEnv, jobject pObj, jobject pBitmap, int _width, int _height, float _roiSh, float _roiSw, float _roiEh, float _roiEw) { AndroidBitmapInfo lInfo; //void* lPixels; int lRet; int li, lj, lk; int lPos; unsigned char* ltmp; LOGI(3, "start of render_a_frame"); //1. retrieve information about the bitmap if ((lRet = AndroidBitmap_getInfo(pEnv, pBitmap, &lInfo)) < 0) { LOGE(1, "AndroidBitmap_getInfo failed! error = %d", lRet); return; } if (lInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE(1, "Bitmap format is not RGBA_8888!"); return; } //2. lock the pixel buffer and retrieve a pointer to it if ((lRet = AndroidBitmap_lockPixels(pEnv, pBitmap, &gBitmap)) < 0) { LOGE(1, "AndroidBitmap_lockPixels() failed! error = %d", lRet); } //3. modify the pixel buffer //decode a video frame: the pBitmap will be filled with decoded pixels lRet = decode_a_frame(_width, _height, _roiSh, _roiSw, _roiEh, _roiEw); AndroidBitmap_unlockPixels(pEnv, pBitmap); LOGI(3, "~~~~~~~~~~end of rendering a frame~~~~~~~~~~~~~~~~~`"); return lRet; }
jobject Java_com_packpublishing_asynchronousandroid_chapter9_GrayImageLoader_convertImageToGray (JNIEnv *env, jobject obj, jobject bitmap) { AndroidBitmapInfo info; void *pixels; int ret; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { jclass clazz = env->FindClass("java/lang/RuntimeException"); env->ThrowNew(clazz, "Failed to get Information from the Bitmap!"); return 0; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, (void **) &pixels)) < 0) { jclass clazz = env->FindClass("java/lang/RuntimeException"); env->ThrowNew(clazz, "Failed to lock Bitmap pixels !"); return 0; } rgba *rgba_pixels = (rgba *) pixels; for (int i = 0; i < (info.width * info.height); i++) { uint8_t red = rgba_pixels[i].red; uint8_t green = rgba_pixels[i].green; uint8_t blue = rgba_pixels[i].blue; uint8_t gray = red * 0.3 + green * 0.59 + blue * 0.11; rgba_pixels[i].red = gray; rgba_pixels[i].green = gray; rgba_pixels[i].blue = gray; } AndroidBitmap_unlockPixels(env, bitmap); return bitmap; }
JNIEXPORT void JNICALL Java_net_qiujuer_genius_app_BlurNative_fastBlurBitmap (JNIEnv *env, jclass obj, jobject bitmapIn, jint r) { AndroidBitmapInfo infoIn; void* pixelsIn; int ret; // Get image info if ((ret = AndroidBitmap_getInfo(env, bitmapIn, &infoIn)) < 0) return; // Check image if (infoIn.format != ANDROID_BITMAP_FORMAT_RGBA_8888) return; // Lock all images if ((ret = AndroidBitmap_lockPixels(env, bitmapIn, &pixelsIn)) < 0) { //AndroidBitmap_lockPixels failed! } // height width int h = infoIn.height; int w = infoIn.width; // Start pixelsIn = stackBlur((int*)pixelsIn, w, h, r); // End // Unlocks everything AndroidBitmap_unlockPixels(env, bitmapIn); }
JNIEXPORT jboolean JNICALL jni_freerdp_update_graphics( JNIEnv *env, jclass cls, jint instance, jobject bitmap, jint x, jint y, jint width, jint height) { int ret; void* pixels; AndroidBitmapInfo info; freerdp* inst = (freerdp*)instance; rdpGdi *gdi = inst->context->gdi; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { DEBUG_ANDROID("AndroidBitmap_getInfo() failed ! error=%d", ret); return JNI_FALSE; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { DEBUG_ANDROID("AndroidBitmap_lockPixels() failed ! error=%d", ret); return JNI_FALSE; } copy_pixel_buffer(pixels, gdi->primary_buffer, x, y, width, height, gdi->width, gdi->height, gdi->bytesPerPixel); AndroidBitmap_unlockPixels(env, bitmap); return JNI_TRUE; }
void Java_org_yabause_android_YabauseRunnable_stateSlotScreenshot( JNIEnv* env, jobject obj, jobject dirpath, jobject itemnum, int slot, jobject bitmap ) { int outputwidth, outputheight; u32 * buffer, * cur; AndroidBitmapInfo info; void * pixels; int x, y; jboolean dummy; const char * dp = (*env)->GetStringUTFChars(env, dirpath, &dummy); const char * in = (*env)->GetStringUTFChars(env, itemnum, &dummy); if (0 != LoadStateSlotScreenshot(dp, in, slot, &outputwidth, &outputheight, &buffer)) return; AndroidBitmap_getInfo(env, bitmap, &info); AndroidBitmap_lockPixels(env, bitmap, &pixels); cur = buffer; for(y = 0;y < info.height;y++) { for(x = 0;x < info.width;x++) { *((uint32_t *) pixels + x) = *(cur + x); } pixels += info.stride; cur += outputwidth; } free(buffer); AndroidBitmap_unlockPixels(env, bitmap); }
JNIEXPORT void JNICALL Java_jp_dego_sample_ipcv_MainActivity_getHairArea(JNIEnv *env, jobject obj, jobject bmp, jint eyes_cp, jint eyes_w) { AndroidBitmapInfo info; void* pixels; // Bitmapの情報を取得 if (AndroidBitmap_getInfo(env, bmp, &info) < 0) return; // Bitmapのフォーマットをチェック if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) return; // Bitmapをロック if (AndroidBitmap_lockPixels(env, bmp, &pixels) < 0) return; jint *p = pixels; int w = info.width; int h = info.height; // サンプルカラーの取得 LAB *sc; sc = getSampleColor(p, w, h, (int)eyes_cp, (int)eyes_w); // 髪色強度マップの作成 float *map; map = getHairIntensity(p, w, h, sc); int i, j; // Get Maximum and Minimum Value float max, min; max = *map; min = *map; for (i = 1; i < info.width * info.height; i++) { if (max < *(map + i)) max = *(map + i); if (min > *(map + i)) min = *(map + i); } // 256段階でグレースケール化 unsigned char gray; int s; jint col; for (j = 0; j < info.height; j++) { for (i = 0; i < info.width; i++) { s = (int)((*(map + j * info.width + i) - (float)min) / ((float)max - min) * 255.0f); if (s > 255) gray = 255; else if (s < 1) gray = 0; else gray = (unsigned char)s; col = createColor(0xFF, gray, gray, gray); p[j * info.width + i] = col; } } free(map); }
void Java_com_zspace_Android_updateBitmap(JNIEnv* env, jobject that, jobject bitmap, jlong data) { // timestamp: 122..305 microseconds mean=152. Contested lock up to 5800 mcs // timestamp("updateBitmap"); void* buffer = ll2p(data); AndroidBitmapInfo info = {0}; int r = AndroidBitmap_getInfo(env, bitmap, &info); if (r != 0) { trace("AndroidBitmap_getInfo() failed ! error=%d", r); return; } int width = info.width; int height = info.height; if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888 && info.format != ANDROID_BITMAP_FORMAT_A_8) { trace("Bitmap format is not RGBA_8888 or A_8"); return; } void* pixels = null; r = AndroidBitmap_lockPixels(env, bitmap, &pixels); if (r != 0) { trace("AndroidBitmap_lockPixels() failed ! error=%d", r); return; } memcpy(pixels, buffer, width * height * (info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ? sizeof(int) : 1)); AndroidBitmap_unlockPixels(env, bitmap); // timestamp("updateBitmap"); }
int naPrepareDisplay(JNIEnv *pEnv, jobject pObj, jobject pBitmap, jint width, jint height) { VideoState* vs = gvs; VideoDisplayUtil* vdu = av_mallocz(sizeof(VideoDisplayUtil)); gvdu = vdu; vs->frame = avcodec_alloc_frame(); vdu->frameNum = 0; vdu->width = width; vdu->height = height; vdu->pFrameRGBA = avcodec_alloc_frame(); AndroidBitmapInfo linfo; int lret; //1. retrieve information about the bitmap if ((lret = AndroidBitmap_getInfo(pEnv, pBitmap, &linfo)) < 0) { LOGE(1, "AndroidBitmap_getinfo failed! error = %d", lret); return -1; } if (linfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE(1, "bitmap format is not rgba_8888!"); return- 1; } //2. lock the pixel buffer and retrieve a pointer to it if ((lret = AndroidBitmap_lockPixels(pEnv, pBitmap, &vdu->pBitmap)) < 0) { LOGE(1, "AndroidBitmap_lockpixels() failed! error = %d", lret); return -1; } //for android, we use the bitmap buffer as the buffer for pFrameRGBA avpicture_fill((AVPicture*)vdu->pFrameRGBA, vdu->pBitmap, PIX_FMT_RGBA, width, height); vdu->img_resample_ctx = sws_getContext(vs->pVideoStream->codec->width, vs->pVideoStream->codec->height, vs->pVideoStream->codec->pix_fmt, width, height, PIX_FMT_RGBA, SWS_BICUBIC, NULL, NULL, NULL); if (NULL == vdu->img_resample_ctx) { LOGE(1, "error initialize the video frame conversion context"); return -1; } vs->nextFrameTime = av_gettime() + 50*1000; //introduce 50 milliseconds of initial delay return 0; }
JNIEXPORT void JNICALL Java_app_ssm_duck_duckapp_CropActivity_seperateLetter(JNIEnv *env, jobject obj, jobject bitmap) { AndroidBitmapInfo info; void* pixels; //get information for bitmap object if(0>AndroidBitmap_getInfo(env, bitmap,&info)){ LOGE("AndroidBitmap_getInfo() failed!"); return; } LOGI("imagesize(%d,%d)\n",info.width,info.height); if(info.format != ANDROID_BITMAP_FORMAT_RGB_565){ LOGE("Bitmap format is not RGB_565:%d\n",info.format); //return; } //attemp to lock the pixel address. if(0> AndroidBitmap_lockPixels(env,bitmap,&pixels)){ LOGE("AndroidBitmap_lockPixels() failed!"); return; } //함수 AndroidBitmap_unlockPixels(env,bitmap); }
JNIEXPORT void JNICALL Java_makemachine_android_examples_Nati_getrot(JNIEnv * env, jclass klass, jobject bitmap){ int ret,j; uint32_t * pixels; void * raw; AndroidBitmapInfo info; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) return; if ((ret = AndroidBitmap_lockPixels(env, bitmap, &raw)) < 0) return; pixels=(uint32_t*)raw; for(j=0;j<info.height*info.width;j++){ *(pixels+j)=*(Qwe::image+j); } int h,w,oH,oW,nW; oH=info.width; oW=info.height; nW=info.width; for(w=0;w<oW;w++){ for(h=0;h<oH;h++){ *(pixels+nW-h-1+nW*w)=*(Qwe::image+oW*h+w); } } AndroidBitmap_unlockPixels(env, bitmap); free(Qwe::image); free(Qwe::integral); free(Qwe::mean); free(Qwe::dev); #ifdef USE_DEVIATION free(Qwe::intsquare); #endif }
void Java_com_example_nativebitmap_MainActivity_sepiaImage(JNIEnv* env, jobject thiz, jobject bitmap) { // 画像情報を取得 AndroidBitmapInfo info; if (0 > AndroidBitmap_getInfo(env, bitmap, &info)) { /////-----(2) LOGE("AndroidBitmap_getInfo() failed !"); return; } LOGI("imagesize(%d,%d)\n", info.width, info.height); // 指定フォーマット以外は処理を中断 if (!(info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 || info.format == ANDROID_BITMAP_FORMAT_RGB_565)){ /////-----(3) LOGE("Can't convert : format=%s",format_msg[info.format]); return; } LOGI("ImageFormat=%s",format_msg[info.format]); // 他からのBITMAPへのアクセスを排除する void* pixels; if (0 > AndroidBitmap_lockPixels(env, bitmap, &pixels)) { /////-----(4) LOGE("AndroidBitmap_lockPixels() failed !"); return; } // セピア調に変換する switch(info.format){ /////-----(5) case ANDROID_BITMAP_FORMAT_RGBA_8888: convert_to_sepia_rgba8888(&info, pixels); break; case ANDROID_BITMAP_FORMAT_RGB_565: convert_to_sepia_rgb565(&info, pixels); break; } // 他からのBITMAPへのアクセスを許可する AndroidBitmap_unlockPixels(env, bitmap); /////-----(6) }
JNIEXPORT void JNICALL Java_br_odb_nehe_lesson08_GL2JNILib_setTexture(JNIEnv *env, jclass type, jobject bitmap) { void *addr; AndroidBitmapInfo info; int errorCode; if ((errorCode = AndroidBitmap_lockPixels(env, bitmap, &addr)) != 0) { LOGI("error %d", errorCode); } if ((errorCode = AndroidBitmap_getInfo(env, bitmap, &info)) != 0) { LOGI("error %d", errorCode); } LOGI("bitmap info: %d wide, %d tall, %d ints per pixel", info.width, info.height, info.format); long size = info.width * info.height * info.format; pixels = new int[size]; memcpy(pixels, addr, size * sizeof(int)); if ((errorCode = AndroidBitmap_unlockPixels(env, bitmap)) != 0) { LOGI("error %d", errorCode); } }
/* * Class: org_opencv_android_Utils * Method: nBitmapToMat(long m, Bitmap b) * Signature: (JL)Z */ JNIEXPORT jboolean JNICALL Java_org_opencv_android_Utils_nMatToBitmap (JNIEnv * env, jclass cls, jlong m, jobject bitmap) { AndroidBitmapInfo info; void* pixels; cv::Mat* mat = (cv::Mat*) m; if ( mat == 0 || mat->data == 0) return false; // no native Mat behind if ( AndroidBitmap_getInfo(env, bitmap, &info) < 0 ) return false; // can't get info if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) return false; // incompatible format if ( AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0 ) return false; // can't get pixels if(mat->data && pixels) memcpy(pixels, mat->data, info.height * info.width * 4); AndroidBitmap_unlockPixels(env, bitmap); return true; }
int Java_com_example_android_wearable_mjpegviewwear_MjpegInputStream_pixeltobmp( JNIEnv* env,jobject thiz, jbyteArray jp, jint l, jobject bmp) { jboolean b; jbyte *p=(*env)->GetByteArrayElements(env,jp,&b); processimage ((const void *)p, l); AndroidBitmapInfo info; void* pixels; int ret; int i; int *colors; int width = IMG_WIDTH; int height = IMG_HEIGHT; if(bmp==NULL) return; if ((ret = AndroidBitmap_getInfo(env, bmp, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return -1; } if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE("Bitmap format is not RGBA_8888 !"); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return -1; } if (info.width != IMG_WIDTH || info.height != IMG_HEIGHT) { LOGE("Bitmap size differs !"); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return -1; } if ((ret = AndroidBitmap_lockPixels(env, bmp, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } colors = (int*)pixels; int *lrgb = &rgb[0]; for(i=0 ; i<width*height ; i++) { *colors++ = *lrgb++; } AndroidBitmap_unlockPixels(env, bmp); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return 0; }
JNIEXPORT jint JNICALL Java_com_badpx_BitmapFactoryCompat_BitmapHelper_nativeGetBytesPerPixel( JNIEnv* env, jobject, jobject javaBitmap) { if (NULL != javaBitmap) { AndroidBitmapInfo bmpInfo; AndroidBitmap_getInfo(env, javaBitmap, &bmpInfo); return computeBytesPerPixel(bmpInfo.format); } return 0; }
jint Java_com_googlecode_leptonica_android_ReadFile_nativeReadBitmap(JNIEnv *env, jclass clazz, jobject bitmap) { //LOGV(__FUNCTION__); l_int32 w, h, d; AndroidBitmapInfo info; void* pixels; int ret; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); return JNI_FALSE; } if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE("Bitmap format is not RGBA_8888 !"); return JNI_FALSE; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); return JNI_FALSE; } PIX *pixd = pixCreate(info.width, info.height, 8); l_uint32 *src = (l_uint32 *) pixels; l_int32 srcWpl = (info.stride / 4); l_uint32 *dst = pixGetData(pixd); l_int32 dstWpl = pixGetWpl(pixd); l_uint8 a, r, g, b, pixel8; for (int y = 0; y < info.height; y++) { l_uint32 *dst_line = dst + (y * dstWpl); l_uint32 *src_line = src + (y * srcWpl); for (int x = 0; x < info.width; x++) { // Get pixel from RGBA_8888 r = *src_line >> SK_R32_SHIFT; g = *src_line >> SK_G32_SHIFT; b = *src_line >> SK_B32_SHIFT; a = *src_line >> SK_A32_SHIFT; pixel8 = (l_uint8) ((r + g + b) / 3); // Set pixel to LUMA_8 SET_DATA_BYTE(dst_line, x, pixel8); // Move to the next pixel src_line++; } } AndroidBitmap_unlockPixels(env, bitmap); return (jint) pixd; }
jint Java_com_googlecode_leptonica_android_ReadFile_nativeReadBitmap(JNIEnv *env, jclass clazz, jobject bitmap) { LOGV(__FUNCTION__); l_int32 w, h, d; AndroidBitmapInfo info; void* pixels; int ret; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); return JNI_FALSE; } if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE("Bitmap format is not RGBA_8888 !"); return JNI_FALSE; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); return JNI_FALSE; } PIX *pix = pixCreate(info.width, info.height, 8); l_uint8 *src = (l_uint8 *) pixels; l_uint8 *dst = (l_uint8 *) pixGetData(pixs); l_int32 srcBpl = info.stride; l_int32 dstBpl = 4 * pixGetWpl(pixs); l_uint8 a, r, g, b; SET_PIXEL = SET_PIXEL_8; for (int y = 0; y < info.height; y++) { l_uint8 *dst_line = dst + (y * dstBpl); l_uint8 *src_line = src + (y * srcBpl); for (int x = 0; x < info.width; x++) { // Get pixel from ARGB_8888 r = *src_line++; g = *src_line++; b = *src_line++; a = *src_line++; // Set pixel to LUMA_8 *dst_line = (l_uint8) ((r + g + b) / 3); } } AndroidBitmap_unlockPixels(env, bitmap); return (jint) pix; }
int VibeUpdateForResult(JNIEnv* env, const unsigned char* frameIn, jobject bitmap, unsigned int wid, unsigned int hei ) { if ( detector_ == NULL) { detector_ = new bv::MD_ViBE(wid/SCALE, hei/SCALE); } if ( detector_ == NULL) { return -1; } bv::Image inImage(wid/SCALE + 1, hei/SCALE + 1); bv::Image outImage(wid/SCALE + 1, hei/SCALE + 1); inImage.data *= 0; for(int y = 0; y < (int)hei; y++) { for(int x = 0; x < (int)wid; x++) { int xx = x/SCALE; int yy = y/SCALE; inImage.data(xx, yy) = frameIn[y*wid+x] + inImage.data(xx, yy); } } inImage.data /= SCALE*SCALE; int ret; ret = detector_->run(inImage, outImage); AndroidBitmapInfo info; unsigned int* pixels; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGD("AndroidBitmap_getInfo() failed ! error=%d", ret); return -1; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, (void**)&pixels)) < 0) { LOGD("AndroidBitmap_lockPixels() failed ! error=%d", ret); return -1; } int lineStride = info.stride / 4; for(int y = 0; y < (int)hei; y++) { for(int x = 0; x < (int)wid; x++) { int xx = x/SCALE; int yy = y/SCALE; if ( outImage.data(xx, yy) ) { pixels[y*lineStride+x] = 0xFFFFFFFF; } else { pixels[y*lineStride+x] = 0x00000000; } } } AndroidBitmap_unlockPixels(env, bitmap); return 0; }
JNIEXPORT jint JNICALL Java_com_badpx_BitmapFactoryCompat_BitmapHelper_nativeReconfigure( JNIEnv* env, jobject, jobject javaBitmap, jint width, jint height) { if (NULL == javaBitmap || width <= 0 || height <= 0) { LOGD("Illegal Arguments(Bitmap=%p, width=%d, height = %d) in nativeChangeBitmapSize!", javaBitmap, width, height); return false; } if (0 == gInitFlag) { LOGD("NativeBitmapHelper initialize started"); gInitFlag = -1; jclass bitmap_class = env->FindClass("android/graphics/Bitmap"); if (NULL == bitmap_class) return INIT_FAILED; gBitmap_nativeBitmapFieldID = env->GetFieldID(bitmap_class, "mNativeBitmap", "I"); if (NULL == gBitmap_nativeBitmapFieldID) return INIT_FAILED; gBitmap_widthFieldID = env->GetFieldID(bitmap_class, "mWidth", "I"); if (NULL == gBitmap_widthFieldID) return INIT_FAILED; gBitmap_heightFieldID = env->GetFieldID(bitmap_class, "mHeight", "I"); if (NULL == gBitmap_heightFieldID) return INIT_FAILED; gBitmap_isMutableMethodID = env->GetMethodID(bitmap_class, "isMutable", "()Z"); if (NULL == gBitmap_isMutableMethodID) return INIT_FAILED; gInitFlag = 1; // Initialize success LOGD("NativeBitmapHelper initialize finished"); } else if (-1 == gInitFlag) { // Initialize failed, may be can't take some java fields or methods by reflection. LOGD("NativeBitmapHelper initialize failed!"); return INIT_FAILED; } if (!env->CallBooleanMethod(javaBitmap, gBitmap_isMutableMethodID)) { LOGD("Immutable bitmap can't be reused!"); return IMMUTABLE_BMP; } int bitmap = env->GetIntField(javaBitmap, gBitmap_nativeBitmapFieldID); AndroidBitmapInfo bmpInfo; AndroidBitmap_getInfo(env, javaBitmap, &bmpInfo); LOGD("Attempt to reconfigure Bitmap@%d(with rowBytes %d) from (%dx%d) to (%dx%d)", bitmap, bmpInfo.stride, bmpInfo.width, bmpInfo.height, width, height); if (reconfigure(bitmap, bmpInfo, width, height)) { // Update width/height fields of java object. env->SetIntField(javaBitmap, gBitmap_widthFieldID, width); env->SetIntField(javaBitmap, gBitmap_heightFieldID, height); return RECONFIGURE_SUCCESS; } return TRAVERSAL_FAILED; }
static size_t GetBitmapSize(JNIEnv *env, jobject jbitmap) { AndroidBitmapInfo info; memset(&info, 0, sizeof(info)); AndroidBitmap_getInfo(env, jbitmap, &info); size_t s = info.width * info.height; switch (info.format) { case ANDROID_BITMAP_FORMAT_RGBA_8888: s *= 4; break; case ANDROID_BITMAP_FORMAT_RGB_565: s *= 2; break; case ANDROID_BITMAP_FORMAT_RGBA_4444: s *= 2; break; } return s; }
jobject Java_org_giac_xcaspad_Calculator_getBitmap(JNIEnv* env, jobject thiz, jint windowsize, jint fontsize, jdouble r, jdouble g, jdouble b, jstring operation){ AndroidBitmapInfo info; void* pixels; int ret; const char *compute = env->GetStringUTFChars(operation, 0); giac::gen gen(compute, contextptr); xcas::PrettyPrint prettyprint((int)windowsize, (int)fontsize, gen); int width = prettyprint.getWidth(); int height = prettyprint.getHeight(); jobject bitmap = createBitmap(env, width, height, "ARGB_8888"); if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed: error=%d", ret); return; } if (info.format != ANDROID_BITMAP_FORMAT_RGB_565 && info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE("Bitmap format is not RGB_565 or ARGB_8888"); return; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed: error=%d", ret); } cairo_surface_t *cs = NULL; if (info.format == ANDROID_BITMAP_FORMAT_RGB_565) { cs = cairo_image_surface_create_for_data(pixels, CAIRO_FORMAT_RGB16_565, info.width, info.height, info.stride); } else if (info.format == ANDROID_BITMAP_FORMAT_RGBA_8888) { cs = cairo_image_surface_create_for_data(pixels, CAIRO_FORMAT_RGB24, info.width, info.height, info.stride); } try{ prettyprint.draw(cs, (double)r, (double)g, (double)b); }catch (std::runtime_error & err){ env->ThrowNew(env->FindClass("java/lang/Exception"), err.what()); } AndroidBitmap_unlockPixels(env, bitmap); return bitmap; }