static SkTypeface* Typeface_createFromAsset(JNIEnv* env, jobject, jobject jassetMgr, jstring jpath) { NPE_CHECK_RETURN_ZERO(env, jassetMgr); NPE_CHECK_RETURN_ZERO(env, jpath); AssetManager* mgr = assetManagerForJavaObject(env, jassetMgr); if (NULL == mgr) { return NULL; } AutoJavaStringToUTF8 str(env, jpath); Asset* asset = mgr->open(str.c_str(), Asset::ACCESS_BUFFER); if (NULL == asset) { return NULL; } SkStream* stream = new AssetStreamAdaptor(asset, AssetStreamAdaptor::kYes_OwnAsset, AssetStreamAdaptor::kYes_HasMemoryBase); SkTypeface* face = SkTypeface::CreateFromStream(stream); // SkTypeFace::CreateFromStream calls ref() on the stream, so we // need to unref it here or it won't be freed later on stream->unref(); return face; }
AAsset* AAssetManager_open(AAssetManager* amgr, const char* filename, int mode) { Asset::AccessMode amMode; switch (mode) { case AASSET_MODE_UNKNOWN: amMode = Asset::ACCESS_UNKNOWN; break; case AASSET_MODE_RANDOM: amMode = Asset::ACCESS_RANDOM; break; case AASSET_MODE_STREAMING: amMode = Asset::ACCESS_STREAMING; break; case AASSET_MODE_BUFFER: amMode = Asset::ACCESS_BUFFER; break; default: return NULL; } AssetManager* mgr = static_cast<AssetManager*>(amgr); Asset* asset = mgr->open(filename, amMode); if (asset == NULL) { return NULL; } return new AAsset(asset); }
/** * 指定されたサウンドファイルをOpenALに設定します. * @param fname ファイル名 * @return true: 成功 */ bool OpenALPlayer::load(const char *fname) { AssetManager mgr = AssetManager::getInstance(); std::vector<char> *fdata = mgr.open(fname); if (fdata) { bool result = load(fdata); delete fdata; return result; } return false; }
status_t BootAnimation::initTexture(Texture* texture, AssetManager& assets, const char* name) { Asset* asset = assets.open(name, Asset::ACCESS_BUFFER); if (!asset) return NO_INIT; SkBitmap bitmap; SkImageDecoder::DecodeMemory(asset->getBuffer(false), asset->getLength(), &bitmap, SkBitmap::kNo_Config, SkImageDecoder::kDecodePixels_Mode); asset->close(); delete asset; // ensure we can call getPixels(). No need to call unlock, since the // bitmap will go out of scope when we return from this method. bitmap.lockPixels(); const int w = bitmap.width(); const int h = bitmap.height(); const void* p = bitmap.getPixels(); GLint crop[4] = { 0, h, w, -h }; texture->w = w; texture->h = h; glGenTextures(1, &texture->name); glBindTexture(GL_TEXTURE_2D, texture->name); switch (bitmap.getConfig()) { case SkBitmap::kA8_Config: glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w, h, 0, GL_ALPHA, GL_UNSIGNED_BYTE, p); break; case SkBitmap::kARGB_4444_Config: glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_SHORT_4_4_4_4, p); break; case SkBitmap::kARGB_8888_Config: glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, p); break; case SkBitmap::kRGB_565_Config: glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, p); break; default: break; } glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_CROP_RECT_OES, crop); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); return NO_ERROR; }
void WebRequest::handleBrowserURL(GURL url) { std::string data("data:text/html;charset=utf-8,"); if (url.spec() == "browser:incognito") { AssetManager* assetManager = globalAssetManager(); Asset* asset = assetManager->open("webkit/incognito_mode_start_page.html", Asset::ACCESS_BUFFER); if (asset) { data.append((const char*)asset->getBuffer(false), asset->getLength()); delete asset; } } GURL dataURL(data.c_str()); handleDataURL(dataURL); }
void Page::initDictionary() { #if ENABLE(SPELLCHECK) if(!m_dictionary) { AssetManager* assetManager = globalAssetManager(); m_asset = assetManager->open("webkit/main.dict", Asset::ACCESS_BUFFER); void *dict; if (m_asset){ dict = (void*) m_asset->getBuffer(false); m_dictionary = new Dictionary(dict, typedLetterMultiplier, fullWordMultiplier); m_dictionary->setAsset(m_asset); android_printLog(ANDROID_LOG_DEBUG, "Page","Page::initDictionary Dictionary initialisatised sucessfully"); } } #endif }
int LoadTexture(JNIEnv* env, AssetManager& assetManager, const BitmapFactory::Options& options, const char* texturePath, int textureId) { String strController = String(texturePath); InputStream stream = assetManager.open(strController); Bitmap bitmap = BitmapFactory::decodeStream(stream, 0, options); stream.close(); int width = bitmap.getWidth(); int height = bitmap.getHeight(); __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "Loaded %s bitmap width=%d height=%d", texturePath, width, height); AndroidBitmapInfo info = AndroidBitmapInfo(); AndroidBitmap_getInfo(env, bitmap.GetInstance(), &info); __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "info.width=%d", info.width); __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "info.height=%d", info.height); __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "info.stride=%d", info.stride); __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "info.format=%d", info.format); __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, "info.flags=%d", info.flags); if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) { __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, "Bitmap format is not RGBA_8888! %s", texturePath); return -1; } glBindTexture(GL_TEXTURE_2D, textures[textureId]); __android_log_print(ANDROID_LOG_INFO, LOG_TAG, "glBindTexture=%d", textures[textureId]); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); android_opengl_GLUtils::GLUtils::texImage2D(GL_TEXTURE_2D, 0, bitmap.GetInstance(), 0); bitmap.recycle(); return textureId; }
/** * Initialize an EGL context for the current display. */ static int engine_init_display(struct engine* engine) { // initialize OpenGL ES and EGL if (JNI_ERR == RegisterClasses(engine->app->activity)) { return JNI_ERR; } g_model = Build::MODEL(); { Activity activity = Activity(engine->app->activity->clazz); Context context = activity.getApplicationContext(); AssetManager assetManager = context.getAssets(); InputStream inputStream = assetManager.open("input.json", AssetManager::ACCESS_BUFFER()); int length = inputStream.available(); jbyte* configurationBytes = new jbyte[length]; inputStream.read(configurationBytes, length); String json = String(configurationBytes, length); std::string strJson = json.ToString(); inputStream.close(); delete configurationBytes; g_parser.parse(strJson); } /* * Here specify the attributes of the desired configuration. * Below, we select an EGLConfig with at least 8 bits per color * component compatible with on-screen windows */ const EGLint attribs[] = { EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_BLUE_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_RED_SIZE, 8, EGL_NONE }; EGLint w, h, dummy, format; EGLint numConfigs; EGLConfig config; EGLSurface surface; EGLContext context; EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY); eglInitialize(display, 0, 0); /* Here, the application chooses the configuration it desires. In this * sample, we have a very simplified selection process, where we pick * the first EGLConfig that matches our criteria */ eglChooseConfig(display, attribs, &config, 1, &numConfigs); /* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is * guaranteed to be accepted by ANativeWindow_setBuffersGeometry(). * As soon as we picked a EGLConfig, we can safely reconfigure the * ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */ eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format); ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format); surface = eglCreateWindowSurface(display, config, engine->app->window, NULL); context = eglCreateContext(display, config, NULL, NULL); if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) { LOGW("Unable to eglMakeCurrent"); return -1; } eglQuerySurface(display, surface, EGL_WIDTH, &w); eglQuerySurface(display, surface, EGL_HEIGHT, &h); engine->display = display; engine->context = context; engine->surface = surface; engine->width = w; engine->height = h; engine->state.angle = 0; // Initialize GL state. glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST); glEnable(GL_CULL_FACE); glEnable(GL_TEXTURE_2D); glShadeModel(GL_SMOOTH); glDisable(GL_DEPTH_TEST); LoadBitmaps(engine->app->activity->vm, engine->app->activity->env, engine->app->activity->clazz); glGenBuffers(3, g_vbo); glBindBuffer(GL_ARRAY_BUFFER, g_vbo[0]); glBufferData(GL_ARRAY_BUFFER, 4 * 12, g_positions, GL_STATIC_DRAW); glBindBuffer(GL_ARRAY_BUFFER, g_vbo[1]); glBufferData(GL_ARRAY_BUFFER, 4 * 8, g_textureCoords, GL_STATIC_DRAW); glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, g_vbo[2]); glBufferData(GL_ELEMENT_ARRAY_BUFFER, 2 * 4, g_indices, GL_STATIC_DRAW); return 0; }