static int
nAllocationCubeCreateFromBitmap(JNIEnv *_env, jobject _this, RsContext con, jint type, jint mip, jobject jbitmap, jint usage)
{
    void *pixels = NULL;
    AndroidBitmap_lockPixels(_env, jbitmap, &pixels);

    jint id = 0;
    if (pixels != NULL) {
        id = (jint)dispatchTab.AllocationCubeCreateFromBitmap(con,
                                                    (RsType)type, (RsAllocationMipmapControl)mip,
                                                    pixels, GetBitmapSize(_env, jbitmap), usage);
        AndroidBitmap_unlockPixels(_env, jbitmap);
    }
    return id;
}
static int
nAllocationCreateBitmapBackedAllocation(JNIEnv *_env, jobject _this, RsContext con, jint type, jint mip, jobject jbitmap, jint usage)
{
    jint id = 0;
    void *pixels = NULL;
    AndroidBitmap_lockPixels(_env, jbitmap, &pixels);

    if (pixels != NULL) {
        id = (jint)dispatchTab.AllocationCreateTyped(con,
                                          (RsType)type, (RsAllocationMipmapControl)mip,
                                          (uint32_t)usage, (uintptr_t)pixels);
        AndroidBitmap_unlockPixels(_env, jbitmap);
    }
    return id;
}
/** LibVisualBitmapView.renderVisual() */
JNIEXPORT void JNICALL Java_org_libvisual_android_LibVisualBitmapView_renderVisual(JNIEnv * env, 
                                                                                   jobject  obj, 
                                                                                   jobject bitmap,
                                                                                   jint bin,
                                                                                   jint video)
{
    VisBin *b = (VisBin *) bin;
    VisVideo *bvideo = (VisVideo *) video;
        
    if(!visual_is_initialized() ||
       !bvideo ||
       !b || 
       !b->input || 
       !b->actor ||
       !b->actvideo)
    {
        LOGE("Not initialized properly");    
        return;
    }
        
    /* start fps timing */
    fps_startFrame(&_v.fps);

    /* run libvisual pipeline */
    visual_bin_run(b);

        
    /* lock bitmap for drawing */
    int ret;
    void *pixels;
    if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) 
    {
        LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
    }

    /* set buffer to pixels */
    visual_video_set_buffer(bvideo, pixels);
    
    /* depth transform */
    visual_video_convert_depth(bvideo, b->actvideo);

        
    /* unlock bitmap */
    AndroidBitmap_unlockPixels(env, bitmap);

    /* stop fps timing */
    fps_endFrame(&_v.fps);
}
static void
nAllocationCopyToBitmap(JNIEnv *_env, jobject _this, RsContext con, jint alloc, jobject jbitmap)
{
    AndroidBitmapInfo info;
    memset(&info, 0, sizeof(info));
    AndroidBitmap_getInfo(_env, jbitmap, &info);

    void *pixels = NULL;
    AndroidBitmap_lockPixels(_env, jbitmap, &pixels);

    if (pixels != NULL) {
        dispatchTab.AllocationCopyToBitmap(con, (RsAllocation)alloc, pixels, GetBitmapSize(_env, jbitmap));
        AndroidBitmap_unlockPixels(_env, jbitmap);
    }
    //bitmap.notifyPixelsChanged();
}
JNIEXPORT jfloatArray JNICALL Java_com_lge_ccdevs_tracker_CameraPreview_native_1cv_1match
(JNIEnv *env, jobject thiz, jobject srcimg) {
	AndroidBitmapInfo bInfo;
    char *bPixs;
	int bRet;

    // convert img
	if ((bRet = AndroidBitmap_getInfo(env, srcimg, &bInfo)) < 0) {
		LOGE("AndroidBitmap_getInfo failed(src)! error = %d", bRet);
		return 0;
	}
	if (bInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
		LOGE("Bitmap(src) format is not RGBA_8888!");
		return 0;
	}

	if ((bRet = AndroidBitmap_lockPixels(env, srcimg, (void**)&bPixs)) < 0) {
		LOGE("AndroidBitmap_lockPixels() failed(src)! error = %d", bRet);
		return 0;
	}

	IplImage* bimg = cvCreateImage(cvSize(bInfo.width,bInfo.height), IPL_DEPTH_8U, 4);
	memcpy(bimg->imageData, bPixs, bimg->imageSize);
	AndroidBitmap_unlockPixels(env, srcimg);
	IplImage* img = cvCreateImage(cvSize(bInfo.width,bInfo.height), IPL_DEPTH_8U, 3);
    cvCvtColor(bimg, img, CV_RGBA2BGR);

    
    res_pts = matcher->match(img);

    // test capture
    stringstream ss;
    ss << "/sdcard/tracker/tracker_match" << ncount_match++ << ".jpg";
    Converter::saveJPG(ss.str().c_str(), img);

    LOGE("#### matched pts = (%f,%f), (%f,%f), (%f,%f), (%f,%f) )",
            res_pts[0], res_pts[1], res_pts[2], res_pts[3],
            res_pts[4], res_pts[5], res_pts[6], res_pts[7] );
    cvReleaseImage( &bimg );
    cvReleaseImage( &img );

    jfloatArray result;
    result = env->NewFloatArray(8);
    env->SetFloatArrayRegion(result,0,8,res_pts);

    return result;
}
static void readBitmapPixels(JNIEnv* env, jclass /* clazz */, jobject jbitmap, jint fd) {
    // Read the info.
    AndroidBitmapInfo readInfo;
    bool read = readAllBytes(fd, (void*) &readInfo, sizeof(AndroidBitmapInfo));
    if (!read) {
        throwIllegalStateException(env, (char*) "Cannot read bitmap info");
        return;
    }

    // Get the info of the target bitmap.
    AndroidBitmapInfo targetInfo;
    int result = AndroidBitmap_getInfo(env, jbitmap, &targetInfo);
    if (result < 0) {
        throwIllegalStateException(env, (char*) "Cannot get bitmap info");
        return;
    }

    // Enforce we can reuse the bitmap.
    if (readInfo.width != targetInfo.width || readInfo.height != targetInfo.height
            || readInfo.stride != targetInfo.stride || readInfo.format != targetInfo.format
            || readInfo.flags != targetInfo.flags) {
        throwIllegalArgumentException(env, (char*) "Cannot reuse bitmap");
        return;
    }

    // Lock the pixels.
    void* pixels;
    result = AndroidBitmap_lockPixels(env, jbitmap, &pixels);
    if (result < 0) {
        throwIllegalStateException(env, (char*) "Cannot lock bitmap pixels");
        return;
    }

    // Read the pixels.
    size_t byteCount = readInfo.stride * readInfo.height;
    read = readAllBytes(fd, (void*) pixels, byteCount);
    if (!read) {
        throwIllegalStateException(env, (char*) "Cannot read bitmap pixels");
        return;
    }

    // Unlock the pixels.
    result = AndroidBitmap_unlockPixels(env, jbitmap);
    if (result < 0) {
        throwIllegalStateException(env, (char*) "Cannot unlock bitmap pixels");
    }
}
/** Program having access to java Bitmap through JNI */
cv::Mat getMatWrapper(JNIEnv *env, jobject bmap)
{
  AndroidBitmapInfo info;
  CV_Assert( AndroidBitmap_getInfo(env, bmap, &info) >= 0 ); // Get info
  CV_Assert( info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ); // Compatible format

  void *pixels = 0;
  CV_Assert( AndroidBitmap_lockPixels(env, bmap, &pixels) >= 0 ); // Get pixels
  CV_Assert( pixels );

  cv::Mat m(info.height, info.width, CV_8UC4, pixels);
  cv::Mat frame; // CV_8UC3
  cvtColor(m, frame, CV_RGBA2BGR);
  AndroidBitmap_unlockPixels(env, bmap);

  return frame;
}
static void
nAllocationCopyFromBitmap(JNIEnv *_env, jobject _this, RsContext con, jint alloc, jobject jbitmap)
{
    AndroidBitmapInfo info;
    memset(&info, 0, sizeof(info));
    AndroidBitmap_getInfo(_env, jbitmap, &info);

    void *pixels = NULL;
    AndroidBitmap_lockPixels(_env, jbitmap, &pixels);

    if (pixels != NULL) {
        dispatchTab.Allocation2DData(con, (RsAllocation)alloc, 0, 0,
                           0, RS_ALLOCATION_CUBEMAP_FACE_POSITIVE_X,
                           info.width, info.height, pixels, GetBitmapSize(_env, jbitmap), 0);
        AndroidBitmap_unlockPixels(_env, jbitmap);
    }
}
Exemple #9
0
void MyRenderer::Render(JNIEnv *env, jobject bitmap) noexcept
{
	int ret = 0;
	AndroidBitmapInfo bitmapInfo;

	if ((ret = AndroidBitmap_getInfo(env, bitmap, &bitmapInfo)) < 0)
	{
		info("MyRenderer", "AndroidBitmap_getInfo failed!");
		return;
	}
	else {
		info("MyRenderer", "bitmapInfo width = %d, height = %d", bitmapInfo.width, bitmapInfo.height);
	}

	GLuint textureId;
	GLint location = glGetUniformLocation(m_Program->GetProgramId(), "s_texture");

	glGenTextures(1, &textureId);
	glActiveTexture(GL_TEXTURE0);
	glBindTexture(GL_TEXTURE_2D, textureId);
	glUniform1i(location, 0);

	void *pixels = nullptr;
	AndroidBitmap_lockPixels(env, bitmap, &pixels);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bitmapInfo.width, bitmapInfo.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

	this->OnRender(m_Width, m_Height);
	AndroidBitmap_unlockPixels(env, bitmap);
	//GLenum format = GL_RGB;
	//if (bitmapInfo.format == ANDROID_BITMAP_FORMAT_RGBA_8888) {
	//	info("MyRenderer", "bitmapInfo format = ANDROID_BITMAP_FORMAT_RGBA_8888");
	//}
	//else if (bitmapInfo.format ==  ANDROID_BITMAP_FORMAT_RGB_565) {
	//	info("MyRenderer", "bitmapInfo format = ANDROID_BITMAP_FORMAT_RGB_565");
	//}
	//else if (bitmapInfo.format ==  ANDROID_BITMAP_FORMAT_RGBA_4444) {
	//	info("MyRenderer", "bitmapInfo format = ANDROID_BITMAP_FORMAT_RGBA_4444");
	//}
	//else if (bitmapInfo.format ==  ANDROID_BITMAP_FORMAT_A_8) {
	//	info("MyRenderer", "bitmapInfo format = ANDROID_BITMAP_FORMAT_A_8");
	//}
}
Exemple #10
0
void
Java_ch_nuicell_ecam_CameraPreview_pixeltobmp( JNIEnv* env,jobject thiz,jobject bitmap) {

    jboolean bo;


    AndroidBitmapInfo  info;
    void*              pixels;
    int                ret;
    int i;
    int *colors;

    int width=0;
    int height=0;

    if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
        LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
        return;
    }

    width = info.width;
    height = info.height;

    if(!rgb || !ybuf) return;

    if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
        LOGE("Bitmap format is not RGBA_8888 !");
        return;
    }

    if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
        LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
    }

    colors = (int*)pixels;
    int *lrgb =NULL;
    lrgb = &rgb[0];

    for(i=0 ; i<width*height ; i++) {
        *colors++ = *lrgb++;
    }

    AndroidBitmap_unlockPixels(env, bitmap);

}
JNIEXPORT jobject JNICALL Java_com_jni_bitmap_1operations_JniBitmapHolder_jniGetBitmapFromStoredBitmapData(
	JNIEnv * env, jobject obj, jobject handle)
    {
    JniBitmap* jniBitmap = (JniBitmap*) env->GetDirectBufferAddress(handle);
    if (jniBitmap->_storedBitmapPixels == NULL)
	{
	LOGD("no bitmap data was stored. returning null...");
	return NULL;
	}
    //
    //creating a new bitmap to put the pixels into it - using Bitmap Bitmap.createBitmap (int width, int height, Bitmap.Config config) :
    //
    jclass bitmapCls = env->FindClass("android/graphics/Bitmap");
    jmethodID createBitmapFunction = env->GetStaticMethodID(bitmapCls,
	    "createBitmap",
	    "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
    jstring configName = env->NewStringUTF("ARGB_8888");
    jclass bitmapConfigClass = env->FindClass("android/graphics/Bitmap$Config");
    jmethodID valueOfBitmapConfigFunction = env->GetStaticMethodID(
	    bitmapConfigClass, "valueOf",
	    "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;");
    jobject bitmapConfig = env->CallStaticObjectMethod(bitmapConfigClass,
	    valueOfBitmapConfigFunction, configName);
    jobject newBitmap = env->CallStaticObjectMethod(bitmapCls,
	    createBitmapFunction, jniBitmap->_bitmapInfo.width,
	    jniBitmap->_bitmapInfo.height, bitmapConfig);
    //
    // putting the pixels into the new bitmap:
    //
    int ret;
    void* bitmapPixels;
    if ((ret = AndroidBitmap_lockPixels(env, newBitmap, &bitmapPixels)) < 0)
	{
	LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
	return NULL;
	}
    uint32_t* newBitmapPixels = (uint32_t*) bitmapPixels;
    int pixelsCount = jniBitmap->_bitmapInfo.height
	    * jniBitmap->_bitmapInfo.width;
    memcpy(newBitmapPixels, jniBitmap->_storedBitmapPixels,
	    sizeof(uint32_t) * pixelsCount);
    AndroidBitmap_unlockPixels(env, newBitmap);
    //LOGD("returning the new bitmap");
    return newBitmap;
    }
Exemple #12
0
jlong Java_com_googlecode_leptonica_android_ReadFile_nativeReadBitmap(JNIEnv *env, jclass clazz,
                                                                      jobject bitmap) {
	LOGV(__FUNCTION__);

	l_int32 w, h, d;
	AndroidBitmapInfo info;
	void* pixels;
	int ret;

	if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
		LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
		return JNI_FALSE;
	}

	if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
		LOGE("Bitmap format is not RGBA_8888 !");
		return JNI_FALSE;
	}

	if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
		LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
		return JNI_FALSE;
	}

	PIX *pixd = pixCreate(info.width, info.height, 32);
	l_uint8 *src = (l_uint8 *) pixels;
	l_uint8 *dst = (l_uint8 *) pixGetData(pixd);
	l_int32 srcBpl = (info.stride);
	l_int32 dstBpl = pixGetWpl(pixd)*4;

	for (int dy = 0; dy < info.height; dy++) {
		memcpy(dst, src, 4 * info.width);
		dst += dstBpl;
		src += srcBpl;

	}
	  pixEndianByteSwap(pixd);


	AndroidBitmap_unlockPixels(env, bitmap);

	return (jlong) pixd;
}
Exemple #13
0
    virtual LVDrawBuf * lock(JNIEnv* env, jobject jbitmap) {
		AndroidBitmapInfo info;
		if ( ANDROID_BITMAP_RESUT_SUCCESS!=AndroidBitmap_getInfo(env, jbitmap, &info) ) {
			return NULL;
		}
		int width = info.width;
		int height = info.height;
		int stride = info.stride;
		int format = info.format;
		if ( format!=ANDROID_BITMAP_FORMAT_RGBA_8888 && format!=ANDROID_BITMAP_FORMAT_RGB_565  && format!=8 ) {
			return NULL;
		}
		int bpp = (format==ANDROID_BITMAP_FORMAT_RGBA_8888) ? 32 : 16;
		lUInt8 * pixels = NULL; 
		if ( ANDROID_BITMAP_RESUT_SUCCESS!=AndroidBitmap_lockPixels(env, jbitmap, (void**)&pixels) ) {
		    pixels = NULL;
		}
		return new LVColorDrawBufEx( width, height, pixels, bpp );
    } 
Exemple #14
0
void Java_org_opentalking_drawskill_SurfaceDiff_applyAndSwap(JNIEnv *env, jobject obj, jobject dest, jint boundTop, jint boundBottom, jint boundLeft, jint boundRight, jbooleanArray bitmask, jintArray pixels) {

    AndroidBitmapInfo  info;
    uint32_t*          destPixels;
    int                ret;

    if ((ret = AndroidBitmap_getInfo(env, dest, &info)) < 0) {
        LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
        return;
    }
    if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
        LOGE("Destination bitmap format is not RGBA_8888!");
        return;
    }

    AndroidBitmap_lockPixels(env, dest, (void*)&destPixels);

    jint* pixelsPtr = (*env)->GetIntArrayElements(env, pixels, 0);
    jboolean* bitmaskPtr = (*env)->GetBooleanArrayElements(env, bitmask, 0);

    uint32_t* destPixelsPtr;

    for (int y = boundTop; y <= boundBottom; y += 1) {
        destPixelsPtr = destPixels + y * info.width + boundLeft;

        for (int x = boundLeft; x <= boundRight; x += 1) {
            if (*bitmaskPtr) {
                int swapPixel = *destPixelsPtr;
                *destPixelsPtr = *pixelsPtr;
                *pixelsPtr = swapPixel;
                pixelsPtr += 1;
            }
            destPixelsPtr += 1;
            bitmaskPtr += 1;
        }
    }

    (*env)->ReleaseIntArrayElements(env, pixels, pixelsPtr, 0);
    (*env)->ReleaseBooleanArrayElements(env, bitmask, bitmaskPtr, 0);

    AndroidBitmap_unlockPixels(env, dest);
}
Exemple #15
0
static void filter_bitmap(JNIEnv* env, jobject obj, jobject bitmap,
    float intensity, Filter t)
{
    AndroidBitmapInfo info;
    int ret = checkInfo(env, bitmap, &info);

    if (ret < 0) {
        return;
    }

    void* pixels;

    if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
        LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
    }

    do_filter(&info, pixels, intensity, t);

    AndroidBitmap_unlockPixels(env, bitmap);
}
Exemple #16
0
JNIEXPORT jint JNICALL Java_com_jazzros_ffmpegtest_AVThread_nativeSetBitmapBuffer(JNIEnv* env, jobject thiz, jobject bitmap)
{
    __android_log_print(ANDROID_LOG_INFO, "com.jazzros.ffmpegtest", "nativeSetBitmapBuffer()");

    if ((*env)->IsSameObject(env, bitmap, NULL))
    {
        __android_log_print(ANDROID_LOG_ERROR, "com.jazzros.ffmpegtest", "invalid arguments");
        return -2;
    }
    // Do not forget release previous instance of resources
    releaseBitmap (env, thiz);

    gBitmapRef = (*env)->NewGlobalRef(env, bitmap); //lock the bitmap preventing the garbage collector from destructing it
    if (gBitmapRef == NULL)
    {
        __android_log_print(ANDROID_LOG_ERROR, "com.jazzros.ffmpegtest", "NewGlobalRef() failed");
        return -4;
    }

    int result = AndroidBitmap_lockPixels(env, gBitmapRef, &gBitmapRefPixelBuffer);
    if (result != 0)
    {
        __android_log_print(ANDROID_LOG_ERROR, "com.jazzros.ffmpegtest", "AndroidBitmap_lockPixels() failed with %d", result);
        gBitmapRefPixelBuffer = NULL;
        return -5;
    }

    if (AndroidBitmap_getInfo(env, gBitmapRef, &gAbi) != 0)
    {
        __android_log_print(ANDROID_LOG_ERROR, "com.jazzros.ffmpegtest", "AndroidBitmap_getInfo() failed");
        return -6;
    }

    __android_log_print(ANDROID_LOG_INFO, "com.jazzros.ffmpegtest", "bitmap width: %d", gAbi.width);
    __android_log_print(ANDROID_LOG_INFO, "com.jazzros.ffmpegtest", "bitmap height: %d", gAbi.height);
    __android_log_print(ANDROID_LOG_INFO, "com.jazzros.ffmpegtest", "bitmap stride: %d", gAbi.stride);
    __android_log_print(ANDROID_LOG_INFO, "com.jazzros.ffmpegtest", "bitmap format: %d", gAbi.format);
    __android_log_print(ANDROID_LOG_INFO, "com.jazzros.ffmpegtest", "bitmap flags: %d", gAbi.flags);

    return 0;
}
Exemple #17
0
    jobject createBitmap(QImage img, JNIEnv *env)
    {
        if (img.format() != QImage::Format_ARGB32 && img.format() != QImage::Format_RGB16)
            img = img.convertToFormat(QImage::Format_ARGB32);

        jobject bitmap = env->CallStaticObjectMethod(m_bitmapClass,
                                                     m_createBitmapMethodID,
                                                     img.width(),
                                                     img.height(),
                                                     img.format() == QImage::Format_ARGB32
                                                        ? m_ARGB_8888_BitmapConfigValue
                                                        : m_RGB_565_BitmapConfigValue);
        if (!bitmap)
            return 0;

        AndroidBitmapInfo info;
        if (AndroidBitmap_getInfo(env, bitmap, &info) < 0) {
            env->DeleteLocalRef(bitmap);
            return 0;
        }

        void *pixels;
        if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) {
            env->DeleteLocalRef(bitmap);
            return 0;
        }

        if (info.stride == uint(img.bytesPerLine())
                && info.width == uint(img.width())
                && info.height == uint(img.height())) {
            memcpy(pixels, img.constBits(), info.stride * info.height);
        } else {
            uchar *bmpPtr = static_cast<uchar *>(pixels);
            const unsigned width = qMin(info.width, (uint)img.width()); //should be the same
            const unsigned height = qMin(info.height, (uint)img.height()); //should be the same
            for (unsigned y = 0; y < height; y++, bmpPtr += info.stride)
                memcpy(bmpPtr, img.constScanLine(y), width);
        }
        AndroidBitmap_unlockPixels(env, bitmap);
        return bitmap;
    }
JNIEXPORT jboolean JNICALL Java_com_ly_widget_GifDrawable_updateFrame(JNIEnv * env, jobject obj, jlong jgif_handle,  jobject bitmap, jint level)
{
  GifFileType* gif_handle = (GifFileType *)jgif_handle;

  int frame = getFrame(gif_handle, level);
  if (((GifAnimInfo *)gif_handle->UserData)->current_frame == frame) {
    return JNI_FALSE;
  }

  AndroidBitmapInfo  info;
  void*              pixels;
  int                ret;

  if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
    LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
    return;
  }

  if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
    LOGE("Bitmap format is not RGBA_8888 !");
    return;
  }

  if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
    LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
  }

  //double now = now_ms();
  drawFrame(gif_handle, &info, (int*)pixels, frame, false);
  //now = now_ms() - now;
  //LOGE("Gif Time Taken : %.2f", now);
  AndroidBitmap_unlockPixels(env, bitmap);

  ((GifAnimInfo *)gif_handle->UserData)->current_frame = frame;

  /*
   * TODO : Check if you actually need to draw the frame, if not return JNI_FALSE
   */

  return JNI_TRUE;
}
JniBitmap* getJniBitmap(JNIEnv *env, jobject bitmap){
	AndroidBitmapInfo bitmapInfo;
	uint32_t* storedBitmapPixels = NULL;
	uint32_t* src = NULL;
	void* bitmapPixels;

	//LOGD("reading bitmap info...");
	int ret;
	if ((ret = AndroidBitmap_getInfo(env, bitmap, &bitmapInfo)) < 0){
		LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
		return NULL;
	}

	//LOGD("width:%d height:%d stride:%d", bitmapInfo.width, bitmapInfo.height, bitmapInfo.stride);
	if (bitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888){
		LOGE("Bitmap format is not RGBA_8888!");
		return NULL;
	}

	//LOGD("reading bitmap pixels...");
	if ((ret = AndroidBitmap_lockPixels(env, bitmap, &bitmapPixels)) < 0){
		LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
		return NULL;
	}

	src = (uint32_t*) bitmapPixels;

	storedBitmapPixels = new uint32_t[bitmapInfo.height * bitmapInfo.width];
	int pixelsCount = bitmapInfo.height * bitmapInfo.width;
	memcpy(storedBitmapPixels, src, sizeof(uint32_t) * pixelsCount);
	AndroidBitmap_unlockPixels(
			env, bitmap);

	jniBitmap->_bitmapInfo = bitmapInfo;
	jniBitmap->_storedBitmapPixels = storedBitmapPixels;

	//delete(storedBitmapPixels);

	return jniBitmap;
}
void mat_to_bitmap(JNIEnv *env, Mat &srcMat, jobject &dstBitmap) {
    void *dstPixels = 0;
    AndroidBitmapInfo dstBitmapInfo;
    try {
        AndroidBitmap_getInfo(env, dstBitmap, &dstBitmapInfo);
        AndroidBitmap_lockPixels(env, dstBitmap, &dstPixels);
        uint32_t dstHeight = dstBitmapInfo.height;
        uint32_t dstWidth = dstBitmapInfo.width;
        if (dstBitmapInfo.format == ANDROID_BITMAP_FORMAT_RGBA_8888) {
            Mat tmp(dstHeight, dstWidth, CV_8UC4, dstPixels);
            if(srcMat.type() == CV_8UC1) {
                cvtColor(srcMat, tmp, COLOR_GRAY2RGBA);
            } else if (srcMat.type() == CV_8UC3) {
                cvtColor(srcMat, tmp, COLOR_RGB2RGBA);
            } else if (srcMat.type() == CV_8UC4) {
                srcMat.copyTo(tmp);
            }
        } else {
            Mat tmp = Mat(dstHeight, dstWidth, CV_8UC2, dstPixels);
            if(srcMat.type() == CV_8UC1) {
                cvtColor(srcMat, tmp, COLOR_GRAY2BGR565);
            } else if (srcMat.type() == CV_8UC3) {
                cvtColor(srcMat, tmp, COLOR_RGB2BGR565);
            } else if (srcMat.type() == CV_8UC4) {
                cvtColor(srcMat, tmp, COLOR_RGBA2BGR565);
            }
        }
        AndroidBitmap_unlockPixels(env, dstBitmap);
    }catch (cv::Exception &e) {
        AndroidBitmap_unlockPixels(env, dstBitmap);
        jclass je = env->FindClass("java/lang/Exception");
        env -> ThrowNew(je, e.what());
        return;
    } catch (...) {
        AndroidBitmap_unlockPixels(env, dstBitmap);
        jclass je = env->FindClass("java/lang/Exception");
        env -> ThrowNew(je, "unknown");
        return;
    }
}
Exemple #21
0
jboolean Java_com_mcxiaoke_ndk_Native_renderBitmap(
    JNIEnv* env,
    jclass clazz,
    jlong avi,
    jobject bitmap)
{
    jboolean isFrameRead = JNI_FALSE;

    char* frameBuffer = 0;
    long frameSize = 0;
    int keyFrame = 0;

    // Lock bitmap and get the raw bytes
    if (0 > AndroidBitmap_lockPixels(env, bitmap, (void**) &frameBuffer))
    {
        ThrowException(env, "java/io/IOException",
                       "Unable to lock pixels.");
        goto exit;
    }

    // Read AVI frame bytes to bitmap
    frameSize = AVI_read_frame((avi_t*) avi, frameBuffer, &keyFrame);

    // Unlock bitmap
    if (0 > AndroidBitmap_unlockPixels(env, bitmap))
    {
        ThrowException(env, "java/io/IOException",
                       "Unable to unlock pixels.");
        goto exit;
    }

    // Check if frame is successfully read
    if (0 < frameSize)
    {
        isFrameRead = JNI_TRUE;
    }

exit:
    return isFrameRead;
}
Exemple #22
0
static void* create_buffer(BufferContainer* container, uint32_t width, uint32_t height, int32_t config) {
  BitmapContainerData* data = container->data;
  void *pixels = NULL;

  if (data->bitmap != NULL) {
    LOGE("Can't call create_buffer twice.");
    return NULL;
  }

  data->bitmap = (*data->env)->CallStaticObjectMethod(data->env, data->clazz, data->method, width, height, config);
  if (data->bitmap == NULL) {
    LOGE("Can't create bitmap.");
    return NULL;
  }

  AndroidBitmap_lockPixels(data->env, data->bitmap, &pixels);
  if (pixels == NULL) {
    LOGE("Can't lock pixels of Bitmap.");
  }

  return pixels;
}
void JNIFUNCF(ImageFilterWBalance, nativeApplyFilter, jobject bitmap, jint width, jint height, int locX,int locY)
{
    char* destination = 0;
    AndroidBitmap_lockPixels(env, bitmap, (void**) &destination);
    int i;
    int len = width * height * 4;
    unsigned char * rgb = (unsigned char * )destination;
    int wr;
    int wg;
    int wb;

    if (locX==-1)
        estmateWhite(rgb,len,&wr,&wg,&wb);
    else
        estmateWhiteBox(rgb, width, height,locX,locY,&wr,&wg,&wb);

    int min = MIN(wr, MIN(wg, wb));
    int max = MAX(wr, MAX(wg, wb));
    float avg = (min+max)/2.f;
    float scaleR =  avg/wr;
    float scaleG =  avg/wg;
    float scaleB =  avg/wb;

    for (i = 0; i < len; i+=4)
    {
        int r = rgb[RED];
        int g = rgb[GREEN];
        int b = rgb[BLUE];

        float Rc =  r*scaleR;
        float Gc =  g*scaleG;
        float Bc =  b*scaleB;

        rgb[RED]   = clamp(Rc);
        rgb[GREEN] = clamp(Gc);
        rgb[BLUE]  = clamp(Bc);
    }
    AndroidBitmap_unlockPixels(env, bitmap);
}
Exemple #24
0
JNIEXPORT void JNICALL Java_jp_dego_sample_ipcv_MainActivity_toGrayScale(JNIEnv *env, jobject obj, jobject bmp)
{
    AndroidBitmapInfo info;
    void* pixels;
    int ret, i, j;

    // Bitmapの情報を取得
    if ((ret = AndroidBitmap_getInfo(env, bmp, &info)) < 0) {
        return;
    }
    // Bitmapのフォーマットをチェック
    if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
        return;
    }
    // Bitmapをロック
    if ((ret = AndroidBitmap_lockPixels(env, bmp, &pixels)) < 0) {
    }

    unsigned int *p = pixels;
    int h = info.height;
    int w = info.width;
    unsigned int r, g, b;
    unsigned char gray;
    for (j = 0; j < h; j++) {
        for (i = 0; i < w; i++) {
            // r = (p[j*w + i] & 0x000000FF);
            // g = (p[j*w + i] & 0x0000FF00) >> 8;
            // b = (p[j*w + i] & 0x00FF0000) >> 16;
            r = getR(p[j * w + i]);
            g = getG(p[j * w + i]);
            b = getB(p[j * w + i]);
            gray = (unsigned char)((float)r * 0.287f + (float)g * 0.599f + (float)b * 0.114);
            *(p + j * w + i) = 0xFF000000 | (gray << 16) | (gray << 8) | gray;
        }
    }

    // Bitmapのロックを解除
    AndroidBitmap_unlockPixels(env, bmp);
}
void setDetectedCardImage(JNIEnv* env, jobject jCardResultBitmap, IplImage* cardY, IplImage* cb, IplImage* cr,
                          dmz_corner_points corner_points, int orientation) {

  char* pixels = NULL;

  AndroidBitmapInfo  bmInfo;
  int bmRes = AndroidBitmap_getInfo(env, jCardResultBitmap, &bmInfo);
  // Yes, it really is defined as _RESUT_ ... figures. <sigh>
  bool validCardInfo = (bmRes == ANDROID_BITMAP_RESUT_SUCCESS);
  if (!validCardInfo) {
    dmz_error_log("AndroidBitmap_getInfo() failed! error=%i", bmRes);
  }
  if (validCardInfo && bmInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
    dmz_error_log("the dmz was given a bitmap that is not RGBA_8888");
    validCardInfo = false;
  }

  bmRes = AndroidBitmap_lockPixels(env, jCardResultBitmap, (void**) &pixels );
  if (bmRes != ANDROID_BITMAP_RESUT_SUCCESS) {
    dmz_error_log("couldn't lock bitmap:%i", bmRes);
  }
  else {
    IplImage* bigCb = NULL;
    dmz_transform_card(NULL, cb, corner_points, orientation, true, &bigCb);

    IplImage* bigCr = NULL;
    dmz_transform_card(NULL, cr, corner_points, orientation, true, &bigCr);

    IplImage* cardResult = cvCreateImageHeader(cvSize(bmInfo.width, bmInfo.height), IPL_DEPTH_8U, 4);
    cvSetData(cardResult, pixels, bmInfo.stride);

    dmz_YCbCr_to_RGB(cardY, bigCb, bigCr, &cardResult);
    AndroidBitmap_unlockPixels(env, jCardResultBitmap);

    cvReleaseImageHeader(&cardResult);
    cvReleaseImage(&bigCb);
    cvReleaseImage(&bigCr);
  }
}
JNIEXPORT void JNICALL Java_com_shang_blurjni_blur_BlurUtil_blurBitmap
    (JNIEnv * env,jclass obj, jobject bitmapIn,jint r) {
        AndroidBitmapInfo infoIn;
        void *pixels;

        // Get image info
        if (AndroidBitmap_getInfo(env, bitmapIn, &infoIn) != ANDROID_BITMAP_RESULT_SUCCESS) {
        LOG_D("AndroidBitmap_getInfo failed!");
        return;
        }

        // Check image
        if (infoIn.format != ANDROID_BITMAP_FORMAT_RGBA_8888 &&
        infoIn.format != ANDROID_BITMAP_FORMAT_RGB_565) {
        LOG_D("Only support ANDROID_BITMAP_FORMAT_RGBA_8888 and ANDROID_BITMAP_FORMAT_RGB_565");
        return;
        }

        // Lock all images
        if (AndroidBitmap_lockPixels(env, bitmapIn, &pixels) != ANDROID_BITMAP_RESULT_SUCCESS) {
        LOG_D("AndroidBitmap_lockPixels failed!");
            return;
        }
        // height width
        int h = infoIn.height;
        int w = infoIn.width;

        // Start
        if (infoIn.format == ANDROID_BITMAP_FORMAT_RGBA_8888) {
            pixels = blur_ARGB_8888((int *) pixels, w, h, r);
        } else if (infoIn.format == ANDROID_BITMAP_FORMAT_RGB_565) {
            pixels = blur_RGB_565((short *) pixels, w, h, r);
        }

        // End

        // Unlocks everything
        AndroidBitmap_unlockPixels(env, bitmapIn);
}
JNIEXPORT jobject JNICALL Java_com_jni_bitmap_1operations_JniBitmapHolder_jniStoreBitmapData(
	JNIEnv * env, jobject obj, jobject bitmap)
    {
    AndroidBitmapInfo bitmapInfo;
    uint32_t* storedBitmapPixels = NULL;
    //LOGD("reading bitmap info...");
    int ret;
    if ((ret = AndroidBitmap_getInfo(env, bitmap, &bitmapInfo)) < 0)
	{
	LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
	return NULL;
	}
    //LOGD("width:%d height:%d stride:%d", bitmapInfo.width, bitmapInfo.height, bitmapInfo.stride);
    if (bitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888)
	{
	LOGE("Bitmap format is not RGBA_8888!");
	return NULL;
	}
    //
    //read pixels of bitmap into native memory :
    //
    //LOGD("reading bitmap pixels...");
    void* bitmapPixels;
    if ((ret = AndroidBitmap_lockPixels(env, bitmap, &bitmapPixels)) < 0)
	{
	LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
	return NULL;
	}
    uint32_t* src = (uint32_t*) bitmapPixels;
    storedBitmapPixels = new uint32_t[bitmapInfo.height * bitmapInfo.width];
    int pixelsCount = bitmapInfo.height * bitmapInfo.width;
    memcpy(storedBitmapPixels, src, sizeof(uint32_t) * pixelsCount);
    AndroidBitmap_unlockPixels(env, bitmap);
    JniBitmap *jniBitmap = new JniBitmap();
    jniBitmap->_bitmapInfo = bitmapInfo;
    jniBitmap->_storedBitmapPixels = storedBitmapPixels;
    return env->NewDirectByteBuffer(jniBitmap, 0);
    }
static void writeBitmapPixels(JNIEnv* env, jclass /* clazz */, jobject jbitmap, jint fd) {
    // Get the info.
    AndroidBitmapInfo info;
    int result = AndroidBitmap_getInfo(env, jbitmap, &info);
    if (result < 0) {
        throwIllegalStateException(env, (char*) "Cannot get bitmap info");
        return;
    }

    // Write the info.
    bool written = writeAllBytes(fd, (void*) &info, sizeof(AndroidBitmapInfo));
    if (!written) {
        throwIllegalStateException(env, (char*) "Cannot write bitmap info");
        return;
    }

    // Lock the pixels.
    void* pixels;
    result = AndroidBitmap_lockPixels(env, jbitmap, &pixels);
    if (result < 0) {
        throwIllegalStateException(env, (char*) "Cannot lock bitmap pixels");
        return;
    }

    // Write the pixels.
    size_t byteCount = info.stride * info.height;
    written = writeAllBytes(fd, (void*) pixels, byteCount);
    if (!written) {
        throwIllegalStateException(env, (char*) "Cannot write bitmap pixels");
        return;
    }

    // Unlock the pixels.
    result = AndroidBitmap_unlockPixels(env, jbitmap);
    if (result < 0) {
        throwIllegalStateException(env, (char*) "Cannot unlock bitmap pixels");
    }
}
Exemple #29
0
JNIEXPORT void JNICALL Java_jp_dego_sample_ipcv_MainActivity_checkColorSpace(JNIEnv *env, jobject obj, jobject bmp)
{
    AndroidBitmapInfo info;
    void* pixels;

    // Bitmapの情報を取得
    if (AndroidBitmap_getInfo(env, bmp, &info) < 0)
        return;
    // Bitmapのフォーマットをチェック
    if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888)
        return;
    // Bitmapをロック
    if (AndroidBitmap_lockPixels(env, bmp, &pixels) < 0)
        return;

    int i, j;
    jint *p = pixels;
    for (j = 0; j < info.height; j++) {
        for (i = 0; i < info.width; i++) {
            ARGB argb = {0, 0, 0, 0};
            argb.red = getR(p[j * info.width + i]);
            argb.green = getG(p[j * info.width + i]);
            argb.blue = getB(p[j * info.width + i]);
            XYZ xyz;
            // LUV luv;
            // RGBtoXYZ(&argb, &xyz);
            // XYZtoLUV(&xyz, &luv);
            // LUVtoXYZ(&luv, &xyz);
            // XYZtoRGB(&xyz, &argb);
            LAB lab;
            RGBtoXYZ(&argb, &xyz);
            XYZtoLAB(&xyz, &lab);
            LABtoXYZ(&lab, &xyz);
            XYZtoRGB(&xyz, &argb);
            p[j * info.width + i] = createColorFromARGB(argb);
        }
    }
}
Exemple #30
0
bool Java_se_forskningsavd_automatonbrain_Decoder_decode(JNIEnv *env, jobject thiz, jbyteArray frame, jobject bitmap) {
    AndroidBitmapInfo bitmapInfo;
    if (AndroidBitmap_getInfo(env, bitmap, &bitmapInfo) != 0) {
        return false;
    }

    uint8_t *dest_data = 0; //TODO
    avpkt.data = (*env)->GetByteArrayElements(env, frame, 0);
    avpkt.size = (*env)->GetArrayLength(env, frame);
    avpkt.flags = AV_PKT_FLAG_KEY;
    int len = avcodec_decode_video2( pCodecCtx, pFrame, &temp, &avpkt );
    (*env)->ReleaseByteArrayElements(env, frame, avpkt.data, JNI_ABORT);

    if (len < 0 ) {
        return false;
        //printf( "RoboCortex [info]: Decoding error (packet loss)\n" );
    } else {
        void *bitmapData;
        AndroidBitmap_lockPixels(env, bitmap, &bitmapData);
        const uint8_t * data[1] = { bitmapData };
        int linesize[1] = { bitmapInfo.stride };

        // Create scaling & color-space conversion context
        convertCtx = sws_getContext( pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
            bitmapInfo.width, bitmapInfo.height, PIX_FMT_BGRA, SWS_AREA, NULL, NULL, NULL);

        // Scale and convert the frame
        sws_scale( convertCtx, (const uint8_t**) pFrame->data, pFrame->linesize, 0,
        pCodecCtx->height, (uint8_t * const*) data, linesize );

        // Cleanup
        sws_freeContext( convertCtx );

        //
        AndroidBitmap_unlockPixels(env, bitmap);
    }
    return true;
}