void bitmap_to_mat(JNIEnv *env, jobject &srcBitmap, Mat &srcMat) { void *srcPixels = 0; AndroidBitmapInfo srcBitmapInfo; try { AndroidBitmap_getInfo(env, srcBitmap, &srcBitmapInfo); AndroidBitmap_lockPixels(env, srcBitmap, &srcPixels); uint32_t srcHeight = srcBitmapInfo.height; uint32_t srcWidth = srcBitmapInfo.width; srcMat.create(srcHeight, srcWidth, CV_8UC4); if (srcBitmapInfo.format == ANDROID_BITMAP_FORMAT_RGBA_8888) { Mat tmp(srcHeight, srcWidth, CV_8UC4, srcPixels); tmp.copyTo(srcMat); } else { Mat tmp = Mat(srcHeight, srcWidth, CV_8UC2, srcPixels); cvtColor(tmp, srcMat, COLOR_BGR5652RGBA); } AndroidBitmap_unlockPixels(env, srcBitmap); return; } catch (cv::Exception &e) { AndroidBitmap_unlockPixels(env, srcBitmap); jclass je = env->FindClass("java/lang/Exception"); env -> ThrowNew(je, e.what()); return; } catch (...) { AndroidBitmap_unlockPixels(env, srcBitmap); jclass je = env->FindClass("java/lang/Exception"); env -> ThrowNew(je, "unknown"); return; } }
JNIEXPORT void JNICALL Java_com_jiangpeng_android_antrace_Utils_unsharpMask(JNIEnv* env, jobject thiz, jobject input, jobject output) { AndroidBitmapInfo inputInfo; AndroidBitmapInfo outputInfo; void* src_pixels = 0; void* dst_pixels = 0; int ret = 0; if ((ret = AndroidBitmap_getInfo(env, input, &inputInfo)) < 0) { return; } if (inputInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { return; } if ((ret = AndroidBitmap_lockPixels(env, input, &src_pixels)) < 0) { return; } if ((ret = AndroidBitmap_getInfo(env, output, &outputInfo)) < 0) { return; } if (outputInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { return; } if ((ret = AndroidBitmap_lockPixels(env, output, &dst_pixels)) < 0) { return; } unsharpMask(src_pixels, dst_pixels, outputInfo.width, outputInfo.height, inputInfo.stride, 5); AndroidBitmap_unlockPixels(env, input); AndroidBitmap_unlockPixels(env, output); }
void JNIFUNCF(ImageFilterFx, nativeApplyFilter, jobject bitmap, jint width, jint height, jobject lutbitmap, jint lutwidth, jint lutheight, jint start, jint end) { char* destination = 0; char* lut = 0; AndroidBitmap_lockPixels(env, bitmap, (void**) &destination); AndroidBitmap_lockPixels(env, lutbitmap, (void**) &lut); unsigned char * rgb = (unsigned char * )destination; unsigned char * lutrgb = (unsigned char * )lut; int lutdim_r = lutheight; int lutdim_g = lutheight;; int lutdim_b = lutwidth/lutheight;; int STEP = 4; int off[8] = { 0, STEP*1, STEP*lutdim_r, STEP*(lutdim_r + 1), STEP*(lutdim_r*lutdim_b), STEP*(lutdim_r*lutdim_b+1), STEP*(lutdim_r*lutdim_b+lutdim_r), STEP*(lutdim_r*lutdim_b+lutdim_r + 1) }; float scale_R = (lutdim_r-1.f)/256.f; float scale_G = (lutdim_g-1.f)/256.f; float scale_B = (lutdim_b-1.f)/256.f; int i; for (i = start; i < end; i+= STEP) { int r = rgb[RED]; int g = rgb[GREEN]; int b = rgb[BLUE]; float fb = b*scale_B; float fg = g*scale_G; float fr = r*scale_R; int lut_b = (int)fb; int lut_g = (int)fg; int lut_r = (int)fr; int p = lut_r+lut_b*lutdim_r+lut_g*lutdim_r*lutdim_b; p*=STEP; float dr = fr-lut_r; float dg = fg-lut_g; float db = fb-lut_b; rgb[RED] = clamp(interp(lutrgb,p ,off,dr,dg,db)); rgb[GREEN] = clamp(interp(lutrgb,p+1,off,dr,dg,db)); rgb[BLUE] = clamp(interp(lutrgb,p+2,off,dr,dg,db)); } AndroidBitmap_unlockPixels(env, bitmap); AndroidBitmap_unlockPixels(env, lutbitmap); }
void JNIFUNCF(ImageFilterGeometry, nativeApplyFilterCrop, jobject src, jint srcWidth, jint srcHeight, jobject dst, jint dstWidth, jint dstHeight, jint offsetWidth, jint offsetHeight) { char* destination = 0; char* source = 0; int len = dstWidth * dstHeight * 4; AndroidBitmap_lockPixels(env, src, (void**) &source); AndroidBitmap_lockPixels(env, dst, (void**) &destination); crop(source, srcWidth, srcHeight, destination, dstWidth, dstHeight, offsetWidth, offsetHeight); AndroidBitmap_unlockPixels(env, dst); AndroidBitmap_unlockPixels(env, src); }
void JNIFUNCF(ImageFilterGeometry, nativeApplyFilterFlip, jobject src, jint srcWidth, jint srcHeight, jobject dst, jint dstWidth, jint dstHeight, jint flip) { char* destination = 0; char* source = 0; if (srcWidth != dstWidth || srcHeight != dstHeight) { return; } AndroidBitmap_lockPixels(env, src, (void**) &source); AndroidBitmap_lockPixels(env, dst, (void**) &destination); flip_fun(flip, source, srcWidth, srcHeight, destination, dstWidth, dstHeight); AndroidBitmap_unlockPixels(env, dst); AndroidBitmap_unlockPixels(env, src); }
JNIEXPORT void JNICALL Java_com_leth_convertimage_MainActivity_convertImage (JNIEnv *env, jobject obj, jobject origbm,jobject graybm) { AndroidBitmapInfo infocolor; void *pixelscolor; AndroidBitmapInfo infogray; void *pixelsgray; int y; int x; LOGI("Before convert"); AndroidBitmap_getInfo(env, origbm, &infocolor); LOGI("infocolor :: width-%d height-%d format-%d stride-%ld" ,infocolor.width ,infocolor.height ,infocolor.stride); AndroidBitmap_getInfo(env, graybm, &infogray); AndroidBitmap_lockPixels(env, origbm, &pixelscolor); AndroidBitmap_lockPixels(env, graybm, &pixelsgray); LOGI("Start convert"); uint8_t tmp; for ( y=0; y<infocolor.height; y++) { rgba * line = (rgba *) pixelscolor; uint32_t * grayline = (uint32_t *) pixelsgray; for ( x=0; x < infocolor.width; x++){ tmp = line[x].red * 0.33 + line[x].green * 0.33 + line[x].blue * 0.33 ; grayline[x] = (line[x].alpha & 0xff) << 24 | (tmp & 0xff) << 16 | (tmp & 0xff) << 8 | (tmp & 0xff) //| line[x].red << 16 //| line[x].green << 8 //| line[x].blue ; } pixelscolor = (int8_t *)pixelscolor + infocolor.stride; pixelsgray = (int8_t *) pixelsgray + infogray.stride; } LOGI("Finish convert"); AndroidBitmap_unlockPixels(env, origbm); AndroidBitmap_unlockPixels(env, graybm); }
BitmapPixelsLock::~BitmapPixelsLock() { if (ptr_) { jthrowable pendingException = env_->ExceptionOccurred(); if (!pendingException) { AndroidBitmap_unlockPixels(env_, bitmap_); return; } // unlockPixels by itself should not throw, but it can't run if // there's an exception pending env_->ExceptionClear(); AndroidBitmap_unlockPixels(env_, bitmap_); env_->Throw(pendingException); } }
JNIEXPORT void JNICALL Java_makemachine_android_examples_Nati_getrot(JNIEnv * env, jclass klass, jobject bitmap){ int ret,j; uint32_t * pixels; void * raw; AndroidBitmapInfo info; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) return; if ((ret = AndroidBitmap_lockPixels(env, bitmap, &raw)) < 0) return; pixels=(uint32_t*)raw; for(j=0;j<info.height*info.width;j++){ *(pixels+j)=*(Qwe::image+j); } int h,w,oH,oW,nW; oH=info.width; oW=info.height; nW=info.width; for(w=0;w<oW;w++){ for(h=0;h<oH;h++){ *(pixels+nW-h-1+nW*w)=*(Qwe::image+oW*h+w); } } AndroidBitmap_unlockPixels(env, bitmap); free(Qwe::image); free(Qwe::integral); free(Qwe::mean); free(Qwe::dev); #ifdef USE_DEVIATION free(Qwe::intsquare); #endif }
JNIEXPORT void JNICALL Java_app_ssm_duck_duckapp_CropActivity_seperateLetter(JNIEnv *env, jobject obj, jobject bitmap) { AndroidBitmapInfo info; void* pixels; //get information for bitmap object if(0>AndroidBitmap_getInfo(env, bitmap,&info)){ LOGE("AndroidBitmap_getInfo() failed!"); return; } LOGI("imagesize(%d,%d)\n",info.width,info.height); if(info.format != ANDROID_BITMAP_FORMAT_RGB_565){ LOGE("Bitmap format is not RGB_565:%d\n",info.format); //return; } //attemp to lock the pixel address. if(0> AndroidBitmap_lockPixels(env,bitmap,&pixels)){ LOGE("AndroidBitmap_lockPixels() failed!"); return; } //함수 AndroidBitmap_unlockPixels(env,bitmap); }
void JNIFUNCF(ImageFilterSaturated, nativeApplyFilter, jobject bitmap, jint width, jint height, jfloat saturation) { char* destination = 0; AndroidBitmap_lockPixels(env, bitmap, (void**) &destination); int i; int len = width * height * 4; float Rf = 0.2999f; float Gf = 0.587f; float Bf = 0.114f; float S = saturation;; float MS = 1.0f - S; float Rt = Rf * MS; float Gt = Gf * MS; float Bt = Bf * MS; float R, G, B; for (i = 0; i < len; i+=4) { int r = destination[RED]; int g = destination[GREEN]; int b = destination[BLUE]; int t = (r + g) / 2; R = r; G = g; B = b; float Rc = R * (Rt + S) + G * Gt + B * Bt; float Gc = R * Rt + G * (Gt + S) + B * Bt; float Bc = R * Rt + G * Gt + B * (Bt + S); destination[RED] = CLAMP(Rc); destination[GREEN] = CLAMP(Gc); destination[BLUE] = CLAMP(Bc); } AndroidBitmap_unlockPixels(env, bitmap); }
/*fill in data for a bitmap*/ JNIEXPORT jint JNICALL Java_feipeng_andzop_render_RenderView_naRenderAFrame(JNIEnv * pEnv, jobject pObj, jobject pBitmap, int _width, int _height, float _roiSh, float _roiSw, float _roiEh, float _roiEw) { AndroidBitmapInfo lInfo; //void* lPixels; int lRet; int li, lj, lk; int lPos; unsigned char* ltmp; LOGI(3, "start of render_a_frame"); //1. retrieve information about the bitmap if ((lRet = AndroidBitmap_getInfo(pEnv, pBitmap, &lInfo)) < 0) { LOGE(1, "AndroidBitmap_getInfo failed! error = %d", lRet); return; } if (lInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE(1, "Bitmap format is not RGBA_8888!"); return; } //2. lock the pixel buffer and retrieve a pointer to it if ((lRet = AndroidBitmap_lockPixels(pEnv, pBitmap, &gBitmap)) < 0) { LOGE(1, "AndroidBitmap_lockPixels() failed! error = %d", lRet); } //3. modify the pixel buffer //decode a video frame: the pBitmap will be filled with decoded pixels lRet = decode_a_frame(_width, _height, _roiSh, _roiSw, _roiEh, _roiEw); AndroidBitmap_unlockPixels(pEnv, pBitmap); LOGI(3, "~~~~~~~~~~end of rendering a frame~~~~~~~~~~~~~~~~~`"); return lRet; }
/* * Class: org_opencv_android_Utils * Method: nBitmapToMat(long m, Bitmap b) * Signature: (JL)Z */ JNIEXPORT jboolean JNICALL Java_org_opencv_android_Utils_nMatToBitmap (JNIEnv * env, jclass cls, jlong m, jobject bitmap) { AndroidBitmapInfo info; void* pixels; cv::Mat* mat = (cv::Mat*) m; if ( mat == 0 || mat->data == 0) return false; // no native Mat behind if ( AndroidBitmap_getInfo(env, bitmap, &info) < 0 ) return false; // can't get info if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) return false; // incompatible format if ( AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0 ) return false; // can't get pixels if(mat->data && pixels) memcpy(pixels, mat->data, info.height * info.width * 4); AndroidBitmap_unlockPixels(env, bitmap); return true; }
void Java_org_yabause_android_YabauseRunnable_stateSlotScreenshot( JNIEnv* env, jobject obj, jobject dirpath, jobject itemnum, int slot, jobject bitmap ) { int outputwidth, outputheight; u32 * buffer, * cur; AndroidBitmapInfo info; void * pixels; int x, y; jboolean dummy; const char * dp = (*env)->GetStringUTFChars(env, dirpath, &dummy); const char * in = (*env)->GetStringUTFChars(env, itemnum, &dummy); if (0 != LoadStateSlotScreenshot(dp, in, slot, &outputwidth, &outputheight, &buffer)) return; AndroidBitmap_getInfo(env, bitmap, &info); AndroidBitmap_lockPixels(env, bitmap, &pixels); cur = buffer; for(y = 0;y < info.height;y++) { for(x = 0;x < info.width;x++) { *((uint32_t *) pixels + x) = *(cur + x); } pixels += info.stride; cur += outputwidth; } free(buffer); AndroidBitmap_unlockPixels(env, bitmap); }
JNIEXPORT void JNICALL Java_com_daemon_aroundcircleviewdemo_ImageBlur_blurBitMap (JNIEnv *env, jclass obj, jobject bitmapIn, jint r) { AndroidBitmapInfo infoIn; void* pixelsIn; int ret; // Get image info if ((ret = AndroidBitmap_getInfo(env, bitmapIn, &infoIn)) < 0) return; // Check image if (infoIn.format != ANDROID_BITMAP_FORMAT_RGBA_8888) return; // Lock all images if ((ret = AndroidBitmap_lockPixels(env, bitmapIn, &pixelsIn)) < 0) { //AndroidBitmap_lockPixels failed! } //height width int h = infoIn.height; int w = infoIn.width; //Start pixelsIn = StackBlur((int*)pixelsIn, w, h, r); //End // Unlocks everything AndroidBitmap_unlockPixels(env, bitmapIn); }
JNIEXPORT void JNICALL Java_makemachine_android_examples_Nati_greyscale(JNIEnv * env, jclass klass, jobject bitmap,jint wind){ void * raw; uint32_t * pixels; int ret,j; int grey; AndroidBitmapInfo info; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) return; if ((ret = AndroidBitmap_lockPixels(env, bitmap, &raw)) < 0) return; pixels=(uint32_t*) raw; #define USE_CONTRAST STRETCH #ifdef USE_CONTRAST_STRETCH determine_contrast(&pixels,info.height,info.width); #else Qwe::mingrey=0; Qwe::maxgrey=255; #endif greyscale(&pixels,info.height,info.width); make_mean(info.height,info.width,wind); #ifdef USE_DEVIATION make_dev(info.height,info.width,wind); #endif imtoin(info.height,info.width); thresh(info.height,info.width); close(info.height,info.width,2); open(info.height,info.width,2); AndroidBitmap_unlockPixels(env, bitmap); }
JNIEXPORT void JNICALL Java_net_qiujuer_genius_app_BlurNative_fastBlurBitmap (JNIEnv *env, jclass obj, jobject bitmapIn, jint r) { AndroidBitmapInfo infoIn; void* pixelsIn; int ret; // Get image info if ((ret = AndroidBitmap_getInfo(env, bitmapIn, &infoIn)) < 0) return; // Check image if (infoIn.format != ANDROID_BITMAP_FORMAT_RGBA_8888) return; // Lock all images if ((ret = AndroidBitmap_lockPixels(env, bitmapIn, &pixelsIn)) < 0) { //AndroidBitmap_lockPixels failed! } // height width int h = infoIn.height; int w = infoIn.width; // Start pixelsIn = stackBlur((int*)pixelsIn, w, h, r); // End // Unlocks everything AndroidBitmap_unlockPixels(env, bitmapIn); }
JNIEXPORT void JNICALL Java_com_example_restreaming_StreamingHeadlessCamcorder_nativeCloseVideo(JNIEnv* env, jobject thiz) { __android_log_print(ANDROID_LOG_INFO, "com.example.ffmpegav", "nativeCloseVideo()"); if (gVideoCodecCtx) { avcodec_close(gVideoCodecCtx); gVideoCodecCtx = NULL; } sws_freeContext(gSwsContext); gSwsContext = NULL; if (gBitmapRef) { if (gBitmapRefPixelBuffer) { AndroidBitmap_unlockPixels(env, gBitmapRef); gBitmapRefPixelBuffer = NULL; } (*env)->DeleteGlobalRef(env, gBitmapRef); gBitmapRef = NULL; } gVideoStreamIdx = -1; av_free(gVideoFrame); gVideoFrame = NULL; memset(&gAbi, 0, sizeof(gAbi)); }
JNIEXPORT void JNICALL Java_br_odb_nehe_lesson08_GL2JNILib_setTexture(JNIEnv *env, jclass type, jobject bitmap) { void *addr; AndroidBitmapInfo info; int errorCode; if ((errorCode = AndroidBitmap_lockPixels(env, bitmap, &addr)) != 0) { LOGI("error %d", errorCode); } if ((errorCode = AndroidBitmap_getInfo(env, bitmap, &info)) != 0) { LOGI("error %d", errorCode); } LOGI("bitmap info: %d wide, %d tall, %d ints per pixel", info.width, info.height, info.format); long size = info.width * info.height * info.format; pixels = new int[size]; memcpy(pixels, addr, size * sizeof(int)); if ((errorCode = AndroidBitmap_unlockPixels(env, bitmap)) != 0) { LOGI("error %d", errorCode); } }
jobject Java_com_packpublishing_asynchronousandroid_chapter9_GrayImageLoader_convertImageToGray (JNIEnv *env, jobject obj, jobject bitmap) { AndroidBitmapInfo info; void *pixels; int ret; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { jclass clazz = env->FindClass("java/lang/RuntimeException"); env->ThrowNew(clazz, "Failed to get Information from the Bitmap!"); return 0; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, (void **) &pixels)) < 0) { jclass clazz = env->FindClass("java/lang/RuntimeException"); env->ThrowNew(clazz, "Failed to lock Bitmap pixels !"); return 0; } rgba *rgba_pixels = (rgba *) pixels; for (int i = 0; i < (info.width * info.height); i++) { uint8_t red = rgba_pixels[i].red; uint8_t green = rgba_pixels[i].green; uint8_t blue = rgba_pixels[i].blue; uint8_t gray = red * 0.3 + green * 0.59 + blue * 0.11; rgba_pixels[i].red = gray; rgba_pixels[i].green = gray; rgba_pixels[i].blue = gray; } AndroidBitmap_unlockPixels(env, bitmap); return bitmap; }
/** Program enable modify C++ Mat through JNI */ jobject setBitmapWrapper(JNIEnv *env, cv::Mat frame) { cv::Mat tmp; // CV_8UC4 cvtColor(frame, tmp, CV_BGR2RGBA); jclass bitmapClass = env->FindClass("android/graphics/Bitmap"); jmethodID create_bitmap = env->GetStaticMethodID(bitmapClass, "createBitmap", "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;"); jclass configClass = env->FindClass("android/graphics/Bitmap$Config"); jmethodID create_config = env->GetStaticMethodID(configClass, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;"); jstring name = env->NewStringUTF("ARGB_8888"); jobject config_obj = env->CallStaticObjectMethod(configClass, create_config, name); jobject bitmap_obj = env->CallStaticObjectMethod(bitmapClass, create_bitmap, tmp.cols, tmp.rows, config_obj); // Putting the pixels into the new bitmap int len = tmp.rows * tmp.cols * tmp.channels(); unsigned char buffer[len]; memcpy(&buffer, tmp.data, len); void *pixels = 0; CV_Assert( AndroidBitmap_lockPixels(env, bitmap_obj, &pixels) >= 0 ); unsigned char* newBitmapPixels = (unsigned char*)pixels; for (int i=0; i < len; i++) { newBitmapPixels[i] = buffer[i]; } AndroidBitmap_unlockPixels(env, bitmap_obj); return bitmap_obj; }
void Java_com_example_nativebitmap_MainActivity_sepiaImage(JNIEnv* env, jobject thiz, jobject bitmap) { // 画像情報を取得 AndroidBitmapInfo info; if (0 > AndroidBitmap_getInfo(env, bitmap, &info)) { /////-----(2) LOGE("AndroidBitmap_getInfo() failed !"); return; } LOGI("imagesize(%d,%d)\n", info.width, info.height); // 指定フォーマット以外は処理を中断 if (!(info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 || info.format == ANDROID_BITMAP_FORMAT_RGB_565)){ /////-----(3) LOGE("Can't convert : format=%s",format_msg[info.format]); return; } LOGI("ImageFormat=%s",format_msg[info.format]); // 他からのBITMAPへのアクセスを排除する void* pixels; if (0 > AndroidBitmap_lockPixels(env, bitmap, &pixels)) { /////-----(4) LOGE("AndroidBitmap_lockPixels() failed !"); return; } // セピア調に変換する switch(info.format){ /////-----(5) case ANDROID_BITMAP_FORMAT_RGBA_8888: convert_to_sepia_rgba8888(&info, pixels); break; case ANDROID_BITMAP_FORMAT_RGB_565: convert_to_sepia_rgb565(&info, pixels); break; } // 他からのBITMAPへのアクセスを許可する AndroidBitmap_unlockPixels(env, bitmap); /////-----(6) }
static jlong JNICALL nativeGetFrame( JNIEnv* env, jobject clazz, jlong frameSequenceStateLong, jint frameNr, jobject bitmap, jint previousFrameNr) { FrameSequenceState* frameSequenceState = reinterpret_cast<FrameSequenceState*>(frameSequenceStateLong); int ret; AndroidBitmapInfo info; void* pixels; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { jniThrowException(env, ILLEGAL_STATE_EXEPTION, "Couldn't get info from Bitmap "); return 0; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { jniThrowException(env, ILLEGAL_STATE_EXEPTION, "Bitmap pixels couldn't be locked"); return 0; } int pixelStride = info.stride >> 2; jlong delayMs = frameSequenceState->drawFrame(frameNr, (Color8888*) pixels, pixelStride, previousFrameNr); AndroidBitmap_unlockPixels(env, bitmap); return delayMs; }
JNIEXPORT void JNICALL Java_com_iiordanov_aSPICE_SpiceCommunicator_UpdateBitmap (JNIEnv* env, jobject obj, jobject bitmap, gint x, gint y, gint width, gint height) { uchar* pixels; SpiceDisplayPrivate *d = SPICE_DISPLAY_GET_PRIVATE(global_display); if (AndroidBitmap_lockPixels(env, bitmap, (void**)&pixels) < 0) { __android_log_write(6, "android-io", "AndroidBitmap_lockPixels() failed!"); return; } int slen = d->width * 4; int offset = (slen * y) + (x * 4); uchar *source = d->data; uchar *sourcepix = (uchar*) &source[offset]; uchar *destpix = (uchar*) &pixels[offset]; for (int i = 0; i < height; i++) { for (int j = 0; j < width * 4; j += 4) { destpix[j + 0] = sourcepix[j + 2]; destpix[j + 1] = sourcepix[j + 1]; destpix[j + 2] = sourcepix[j + 0]; } sourcepix = sourcepix + slen; destpix = destpix + slen; } AndroidBitmap_unlockPixels(env, bitmap); }
JNIEXPORT jboolean JNICALL jni_freerdp_update_graphics( JNIEnv *env, jclass cls, jint instance, jobject bitmap, jint x, jint y, jint width, jint height) { int ret; void* pixels; AndroidBitmapInfo info; freerdp* inst = (freerdp*)instance; rdpGdi *gdi = inst->context->gdi; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { DEBUG_ANDROID("AndroidBitmap_getInfo() failed ! error=%d", ret); return JNI_FALSE; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { DEBUG_ANDROID("AndroidBitmap_lockPixels() failed ! error=%d", ret); return JNI_FALSE; } copy_pixel_buffer(pixels, gdi->primary_buffer, x, y, width, height, gdi->width, gdi->height, gdi->bytesPerPixel); AndroidBitmap_unlockPixels(env, bitmap); return JNI_TRUE; }
JNIEXPORT void JNICALL Java_si_majcn_frameoffame_MainActivity_applyEffect(JNIEnv *env, jobject obj, jobject bitmap, jint effectNumber) { AndroidBitmapInfo info; int ret; void *pixels; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); return; } if (info.format != ANDROID_BITMAP_FORMAT_RGB_565) { LOGE("Bitmap format is not RGBA_565 !"); return; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } applyEffect(&info, pixels, effectNumber); AndroidBitmap_unlockPixels(env, bitmap); }
void Java_com_zspace_Android_updateBitmap(JNIEnv* env, jobject that, jobject bitmap, jlong data) { // timestamp: 122..305 microseconds mean=152. Contested lock up to 5800 mcs // timestamp("updateBitmap"); void* buffer = ll2p(data); AndroidBitmapInfo info = {0}; int r = AndroidBitmap_getInfo(env, bitmap, &info); if (r != 0) { trace("AndroidBitmap_getInfo() failed ! error=%d", r); return; } int width = info.width; int height = info.height; if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888 && info.format != ANDROID_BITMAP_FORMAT_A_8) { trace("Bitmap format is not RGBA_8888 or A_8"); return; } void* pixels = null; r = AndroidBitmap_lockPixels(env, bitmap, &pixels); if (r != 0) { trace("AndroidBitmap_lockPixels() failed ! error=%d", r); return; } memcpy(pixels, buffer, width * height * (info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ? sizeof(int) : 1)); AndroidBitmap_unlockPixels(env, bitmap); // timestamp("updateBitmap"); }
JNIEXPORT jint Java_ryulib_VideoZip_VPX_EncodeBitmap(JNIEnv* env, jclass clazz, jint handle, jobject bitmap, jbyteArray buffer, jint bufferSize, jint deadline) { RyuVPX *pHandle = (RyuVPX *) handle; jbyte *pByteBuffer = (*env)->GetByteArrayElements(env, buffer, 0); int packet_size = 0; int frame_cnt = 0; int flags = 0; unsigned long ulDeadline = VPX_DL_GOOD_QUALITY; switch (deadline) { case 0: ulDeadline = VPX_DL_REALTIME; break; case 1: ulDeadline = VPX_DL_GOOD_QUALITY; break; case 2: ulDeadline = VPX_DL_BEST_QUALITY; break; } void *pixelBitmap; if (AndroidBitmap_lockPixels(env, bitmap, &pixelBitmap) >= 0) { RGBtoYUV420((unsigned char*) pixelBitmap, pHandle->img.planes[0], pHandle->cfgEnc.g_w, pHandle->cfgEnc.g_h, _PixelSize); int encodeResult = vpx_codec_encode(&pHandle->codec, &pHandle->img, frame_cnt, 1, flags, ulDeadline); AndroidBitmap_unlockPixels(env, bitmap); if (encodeResult) goto EXIT; } const vpx_codec_cx_pkt_t *pPacket; vpx_codec_iter_t iter = NULL; unsigned char *pFrame = (unsigned char *) pByteBuffer; int *pFrameSize; while ( (pPacket = (vpx_codec_get_cx_data(&pHandle->codec, &iter))) ) { if ((packet_size + sizeof(int) + pPacket->data.frame.sz) >= bufferSize) goto EXIT; switch (pPacket->kind) { case VPX_CODEC_CX_FRAME_PKT: { pFrameSize = (int *) pFrame; *pFrameSize = pPacket->data.frame.sz; pFrame = pFrame + sizeof(int); memcpy(pFrame, pPacket->data.frame.buf, pPacket->data.frame.sz); pFrame = pFrame + pPacket->data.frame.sz; packet_size = packet_size + sizeof(int) + pPacket->data.frame.sz; } break; default: break; } } EXIT: (*env)->ReleaseByteArrayElements(env, buffer, pByteBuffer, 0); return packet_size; }
int Java_com_example_android_wearable_mjpegviewwear_MjpegInputStream_pixeltobmp( JNIEnv* env,jobject thiz, jbyteArray jp, jint l, jobject bmp) { jboolean b; jbyte *p=(*env)->GetByteArrayElements(env,jp,&b); processimage ((const void *)p, l); AndroidBitmapInfo info; void* pixels; int ret; int i; int *colors; int width = IMG_WIDTH; int height = IMG_HEIGHT; if(bmp==NULL) return; if ((ret = AndroidBitmap_getInfo(env, bmp, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return -1; } if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE("Bitmap format is not RGBA_8888 !"); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return -1; } if (info.width != IMG_WIDTH || info.height != IMG_HEIGHT) { LOGE("Bitmap size differs !"); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return -1; } if ((ret = AndroidBitmap_lockPixels(env, bmp, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); } colors = (int*)pixels; int *lrgb = &rgb[0]; for(i=0 ; i<width*height ; i++) { *colors++ = *lrgb++; } AndroidBitmap_unlockPixels(env, bmp); (*env)->ReleaseByteArrayElements(env, jp, p, 0); return 0; }
jint Java_com_googlecode_leptonica_android_ReadFile_nativeReadBitmap(JNIEnv *env, jclass clazz, jobject bitmap) { //LOGV(__FUNCTION__); l_int32 w, h, d; AndroidBitmapInfo info; void* pixels; int ret; if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) { LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret); return JNI_FALSE; } if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { LOGE("Bitmap format is not RGBA_8888 !"); return JNI_FALSE; } if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) { LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret); return JNI_FALSE; } PIX *pixd = pixCreate(info.width, info.height, 8); l_uint32 *src = (l_uint32 *) pixels; l_int32 srcWpl = (info.stride / 4); l_uint32 *dst = pixGetData(pixd); l_int32 dstWpl = pixGetWpl(pixd); l_uint8 a, r, g, b, pixel8; for (int y = 0; y < info.height; y++) { l_uint32 *dst_line = dst + (y * dstWpl); l_uint32 *src_line = src + (y * srcWpl); for (int x = 0; x < info.width; x++) { // Get pixel from RGBA_8888 r = *src_line >> SK_R32_SHIFT; g = *src_line >> SK_G32_SHIFT; b = *src_line >> SK_B32_SHIFT; a = *src_line >> SK_A32_SHIFT; pixel8 = (l_uint8) ((r + g + b) / 3); // Set pixel to LUMA_8 SET_DATA_BYTE(dst_line, x, pixel8); // Move to the next pixel src_line++; } } AndroidBitmap_unlockPixels(env, bitmap); return (jint) pixd; }
void JNIFUNCF(ImageFilterGeometry, nativeApplyFilterStraighten, jobject src, jint srcWidth, jint srcHeight, jobject dst, jint dstWidth, jint dstHeight, jfloat straightenAngle) { char* destination = 0; char* source = 0; int len = dstWidth * dstHeight * 4; AndroidBitmap_lockPixels(env, src, (void**) &source); AndroidBitmap_lockPixels(env, dst, (void**) &destination); // TODO: implement straighten int i = 0; for (; i < len; i += 4) { int r = source[RED]; int g = source[GREEN]; int b = source[BLUE]; destination[RED] = 128; destination[GREEN] = g; destination[BLUE] = 128; } AndroidBitmap_unlockPixels(env, dst); AndroidBitmap_unlockPixels(env, src); }