AssetInputStream::AssetInputStream(AAssetManager *as, const Common::String &path) : _eos(false), _pos(0) { _asset = AAssetManager_open(as, path.c_str(), AASSET_MODE_RANDOM); _len = AAsset_getLength(_asset); }
bool Audio::createChannelFromAsset(const char* fname, int index) { SLresult result; Channel* channel = &this->channels[index]; if (channel->loaded == SL_BOOLEAN_TRUE) { this->closeChannel(index); } AAssetManager* mgr = engine->app->activity->assetManager; if (mgr == NULL) { engine->setLastError(ERR_ASSET_LOAD); LOGE("emo_audio: failed to load AAssetManager"); return false; } AAsset* asset = AAssetManager_open(mgr, fname, AASSET_MODE_UNKNOWN); if (asset == NULL) { engine->setLastError(ERR_ASSET_OPEN); LOGE("emo_audio: failed to open an audio file"); LOGE(fname); return false; } // open asset as file descriptor off_t start, length; int fd = AAsset_openFileDescriptor(asset, &start, &length); if (fd < 0) { engine->setLastError(ERR_ASSET_OPEN); LOGE("emo_audio: failed to open an audio file"); LOGE(fname); return false; } AAsset_close(asset); // configure audio source SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length}; SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED}; SLDataSource audioSrc = {&loc_fd, &format_mime}; // configure audio sink SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, this->outputMixObject}; SLDataSink audioSnk = {&loc_outmix, NULL}; // create audio player const SLInterfaceID player_ids[3] = {SL_IID_PLAY, SL_IID_VOLUME, SL_IID_SEEK}; const SLboolean player_req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*engineEngine)->CreateAudioPlayer(this->engineEngine, &channel->playerObject, &audioSrc, &audioSnk, 3, player_ids, player_req); if (SL_RESULT_SUCCESS != result) { engine->setLastError(ERR_AUDIO_ASSET_INIT); LOGE("emo_audio: failed to create an audio player"); return false; } // realize the player result = (*channel->playerObject)->Realize(channel->playerObject, SL_BOOLEAN_FALSE); if (SL_RESULT_SUCCESS != result) { engine->setLastError(ERR_AUDIO_ASSET_INIT); LOGE("emo_audio: failed to realize an audio player"); return false; } channel->loaded = SL_BOOLEAN_TRUE; // get the play interface result = (*channel->playerObject)->GetInterface(channel->playerObject, SL_IID_PLAY, &channel->playerPlay); if (SL_RESULT_SUCCESS != result) { engine->setLastError(ERR_AUDIO_ASSET_INIT); LOGE("emo_audio: failed to get an audio player interface"); return false; } // get the seek interface result = (*channel->playerObject)->GetInterface(channel->playerObject, SL_IID_SEEK, &channel->playerSeek); if (SL_RESULT_SUCCESS != result) { engine->setLastError(ERR_AUDIO_ASSET_INIT); LOGE("emo_audio: failed to get an audio seek interface"); return false; } // the volume interface result = (*channel->playerObject)->GetInterface(channel->playerObject, SL_IID_VOLUME, &channel->playerVolume); if (SL_RESULT_SUCCESS != result) { engine->setLastError(ERR_AUDIO_ASSET_INIT); LOGE("emo_audio: failed to create an audio volume interface"); return false; } return true; }
/* Play a buffer */ bool SoundPlayer::playAsset(const char *assetname) { SLresult result; // destroy file descriptor audio player object, and invalidate all associated interfaces if (fdPlayerObject != NULL) { (*fdPlayerObject)->Destroy(fdPlayerObject); fdPlayerObject = NULL; fdPlayerPlay = NULL; fdPlayerSeek = NULL; fdPlayerMuteSolo = NULL; fdPlayerVolume = NULL; } assert(NULL != mgr); AAsset* asset = AAssetManager_open(mgr, assetname, AASSET_MODE_UNKNOWN); // the asset might not be found if (NULL == asset) { return false; } // open asset as file descriptor off_t start, length; int fd = AAsset_openFileDescriptor(asset, &start, &length); assert(0 <= fd); AAsset_close(asset); // configure audio source SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length}; SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED}; SLDataSource audioSrc = {&loc_fd, &format_mime}; // configure audio sink SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject}; SLDataSink audioSnk = {&loc_outmix, NULL}; // create audio player const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*engineEngine)->CreateAudioPlayer(engineEngine, &fdPlayerObject, &audioSrc, &audioSnk, 3, ids, req); assert(SL_RESULT_SUCCESS == result); // realize the player result = (*fdPlayerObject)->Realize(fdPlayerObject, SL_BOOLEAN_FALSE); assert(SL_RESULT_SUCCESS == result); // get the play interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_PLAY, &fdPlayerPlay); assert(SL_RESULT_SUCCESS == result); // get the seek interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_SEEK, &fdPlayerSeek); assert(SL_RESULT_SUCCESS == result); // get the mute/solo interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_MUTESOLO, &fdPlayerMuteSolo); assert(SL_RESULT_SUCCESS == result); // get the volume interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_VOLUME, &fdPlayerVolume); assert(SL_RESULT_SUCCESS == result); // set the player's state result = (*fdPlayerPlay)->SetPlayState(fdPlayerPlay, SL_PLAYSTATE_PLAYING); assert(SL_RESULT_SUCCESS == result); return (result == SL_RESULT_SUCCESS); }
File * APKFile::CreateFromAssets(const FilePath &filePath, uint32 attributes) { // Logger::Debug("[APKFile::CreateFromAssets] wan't to create file %s", filePath.c_str()); // FileSystem * fileSystem = FileSystem::Instance(); for (List<FileSystem::ResourceArchiveItem>::iterator ai = fileSystem->resourceArchiveList.begin(); ai != fileSystem->resourceArchiveList.end(); ++ai) { FileSystem::ResourceArchiveItem & item = *ai; String filenamecpp = filePath.GetAbsolutePathname(); String::size_type pos = filenamecpp.find(item.attachPath); if (pos == 0) { String relfilename = filenamecpp.substr(item.attachPath.length()); int32 size = item.archive->LoadResource(relfilename, 0); if ( size == -1 ) { return 0; } uint8 * buffer = new uint8[size]; item.archive->LoadResource(relfilename, buffer); APKFile *fileInstance = CreateFromData(relfilename, buffer, size, attributes); SafeDeleteArray(buffer); return fileInstance; } } bool isDirectory = FileSystem::Instance()->IsDirectory(filePath); if(isDirectory) { Logger::Error("[APKFile::CreateFromAssets] Can't create file because of it is directory (%s)", filePath.GetAbsolutePathname().c_str()); return NULL; } CorePlatformAndroid *core = (CorePlatformAndroid *)Core::Instance(); DVASSERT_MSG(core, "Need create core before loading of files"); AAssetManager *assetManager = core->GetAssetManager(); DVASSERT_MSG(assetManager, "Need setup assetManager on core creation"); AAsset * asset = AAssetManager_open(assetManager, filePath.GetAbsolutePathname().c_str(), AASSET_MODE_UNKNOWN); if(!asset) { Logger::Error("[APKFile::CreateFromAssets] Can't load asset for path %s", filePath.GetAbsolutePathname().c_str()); return NULL; } uint32 dataSize = AAsset_getLength(asset); // Logger::Debug("[APKFile::CreateFromAssets] fileSize is %d (%s)", dataSize, filePath.c_str()); uint8 *data = new uint8[dataSize]; uint32 readSize = AAsset_read(asset, data, dataSize * sizeof(uint8)); AAsset_close(asset); DVASSERT_MSG(readSize == dataSize * sizeof(uint8), "Can't read full file"); APKFile *fileInstance = CreateFromData(filePath, data, readSize, attributes); DVASSERT_MSG(fileInstance, "Can't create dynamic file from memory"); SafeDeleteArray(data); return fileInstance; }
bool sl_bgm_load(const char* file_name) { if(!file_name) { return false; } struct sl_bgm* bgm = &(ENV.bgm); if(bgm->file_name && strcmp(file_name, bgm->file_name) == 0) { SLresult result = (*bgm->fdPlayerPlay)->SetPlayState(bgm->fdPlayerPlay, SL_PLAYSTATE_STOPPED); return result == SL_RESULT_SUCCESS; } _bgm_free(bgm); bgm->file_name = strdup(file_name); if(bgm->file_name == NULL) { return false; } AAsset* asset = AAssetManager_open(ENV.asset_mgr, file_name, AASSET_MODE_UNKNOWN); if(asset == NULL) { goto ERROR; } // open asset as file descriptor off_t start, length; int fd = AAsset_openFileDescriptor(asset, &start, &length); AAsset_close(asset); if(fd < 0) { goto ERROR; } // configure audio source SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length}; SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED}; SLDataSource audioSrc = {&loc_fd, &format_mime}; // configure audio sink SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, ENV.outputMixObject}; SLDataSink audioSnk = {&loc_outmix, NULL}; // create audio player SLresult result; const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*ENV.engineEngine)->CreateAudioPlayer(ENV.engineEngine, &bgm->fdPlayerObject, &audioSrc, &audioSnk, 3, ids, req); _check_result(result); // realize the player result = (*bgm->fdPlayerObject)->Realize(bgm->fdPlayerObject, SL_BOOLEAN_FALSE); _check_result(result); // get the play interface result = (*bgm->fdPlayerObject)->GetInterface(bgm->fdPlayerObject, SL_IID_PLAY, &bgm->fdPlayerPlay); _check_result(result); // get the seek interface result = (*bgm->fdPlayerObject)->GetInterface(bgm->fdPlayerObject, SL_IID_SEEK, &bgm->fdPlayerSeek); _check_result(result); // get the volume interface result = (*bgm->fdPlayerObject)->GetInterface(bgm->fdPlayerObject, SL_IID_VOLUME, &bgm->fdPlayerVolume); _check_result(result); // enable whole file looping result = (*bgm->fdPlayerSeek)->SetLoop(bgm->fdPlayerSeek, SL_BOOLEAN_FALSE, 0, SL_TIME_UNKNOWN); _check_result(result); // set bgm stop result = (*bgm->fdPlayerPlay)->SetPlayState(bgm->fdPlayerPlay, SL_PLAYSTATE_STOPPED); _check_result(result); return true; ERROR: _bgm_free(bgm); return false; }
bool kgmGameResources::getFile(const char* id, kgmMemory<u8>& m) { kgmString path; int i = 0; if(!id) return false; #ifdef WIN32 const kgmString delim((const char*)"\\", 1); #else const kgmString delim((const char*)"/", 1); #endif #ifdef ANDROID #ifdef DEBUG kgm_log() << "\nkgmEngine android loading file " << id << "\n"; #endif AAsset* asset = AAssetManager_open(kgm_android_getAssetManager(), (const char *) id, AASSET_MODE_UNKNOWN); if (NULL == asset) { #ifdef DEBUG kgmLog::log("_ASSET_NOT_FOUND_"); #endif return false; } long size = AAsset_getLength(asset); m.alloc(size); AAsset_read (asset, m.data(), size); #ifdef DEBUG kgm_log() << "\nkgmEngine android file size: " << (s32)size << "\n"; #endif AAsset_close(asset); return true; #else for(i = 0; i < m_paths.size(); i++) { kgmFile file; if(m_paths[i]->type == 2) { path = m_paths[i]->path + delim + kgmString(id, strlen(id)); if(kgmIGame::getGame()->getSystem()->isFile(path) && file.open(path, kgmFile::Read)) { m.alloc(file.length()); file.read(m.data(), file.length()); file.close(); return true; } } else if(m_paths[i]->type == 1) { if(m_paths[i]->archive.copy(id, m)) { return true; } } } #endif #ifdef DEBUG kgm_log() << "kgmGameResources::getFile Cannot load file: " << id << "\n"; #endif return false; }
NixUI8 loadDataFromWavFile(JNIEnv *env, jobject assetManager, const char* pathToWav, STNix_audioDesc* audioDesc, NixUI8** audioData, NixUI32* audioDataBytes){ NixUI8 success = 0; AAssetManager* mgr = AAssetManager_fromJava(env, assetManager); AAsset* wavFile = AAssetManager_open(mgr, pathToWav, AASSET_MODE_UNKNOWN); //FILE* wavFile = fopen(pathToWav, "rb"); if(wavFile==NULL){ PRINTF_ERROR("WAV fopen failed: '%s'\n", pathToWav); } else { char chunckID[4]; AAsset_read(wavFile, chunckID, sizeof(char) * 4); if(chunckID[0]!='R' || chunckID[1]!='I' || chunckID[2]!='F' || chunckID[3]!='F'){ PRINTF_ERROR("WAV chunckID not valid: '%s'\n", pathToWav); } else { NixUI8 continuarLectura = 1; NixUI8 errorOpeningFile = 0; NixUI32 chunckSize; AAsset_read(wavFile, &chunckSize, sizeof(chunckSize) * 1); char waveID[4]; AAsset_read(wavFile, waveID, sizeof(char) * 4); if(waveID[0]!='W' || waveID[1]!='A' || waveID[2]!='V' || waveID[3]!='E'){ PRINTF_ERROR("WAV::WAVE chunckID not valid: '%s'\n", pathToWav); } else { //Leer los subchuncks de WAVE char bufferPadding[64]; NixSI32 tamBufferPadding = 64; //Optimizacion para evitar el uso de fseek char subchunckID[4]; NixUI32 bytesReadedID = 0; NixUI8 formatChunckPresent = 0, chunckDataReaded = 0; do { bytesReadedID = (NixUI32)AAsset_read(wavFile, subchunckID, sizeof(char) * 4); if(bytesReadedID==4){ NixUI32 subchunckBytesReaded = 0; NixUI32 subchunckSize; AAsset_read(wavFile, &subchunckSize, sizeof(subchunckSize) * 1); //subchunckBytesReaded += sizeof(subchunckSize); NixUI8 tamanoChunckEsImpar = ((subchunckSize % 2)!=0); if(subchunckID[0]=='f' && subchunckID[1]=='m' && subchunckID[2]=='t' && subchunckID[3]==' '){ if(!formatChunckPresent){ // NixUI16 formato; AAsset_read(wavFile, &formato, sizeof(formato) * 1); subchunckBytesReaded += sizeof(formato); if(formato!=1 && formato!=3){ //WAVE_FORMAT_PCM=1 WAVE_FORMAT_IEEE_FLOAT=3 errorOpeningFile = 1; PRINTF_ERROR("Wav format(%d) is not WAVE_FORMAT_PCM(1) or WAVE_FORMAT_IEEE_FLOAT(3)\n", formato); } else { NixUI16 canales; AAsset_read(wavFile, &canales, sizeof(canales) * 1); subchunckBytesReaded += sizeof(canales); NixUI32 muestrasPorSegundo; AAsset_read(wavFile, &muestrasPorSegundo, sizeof(muestrasPorSegundo) * 1); subchunckBytesReaded += sizeof(muestrasPorSegundo); NixUI32 bytesPromedioPorSegundo; AAsset_read(wavFile, &bytesPromedioPorSegundo, sizeof(bytesPromedioPorSegundo) * 1); subchunckBytesReaded += sizeof(bytesPromedioPorSegundo); NixUI16 alineacionBloques; AAsset_read(wavFile, &alineacionBloques, sizeof(alineacionBloques) * 1); subchunckBytesReaded += sizeof(alineacionBloques); NixUI16 bitsPorMuestra; AAsset_read(wavFile, &bitsPorMuestra, sizeof(bitsPorMuestra) * 1); subchunckBytesReaded += sizeof(bitsPorMuestra); //if((canales!=1 && canales!=2) || (bitsPorMuestra!=8 && bitsPorMuestra!=16 && bitsPorMuestra!=32) || (muestrasPorSegundo!=8000 && muestrasPorSegundo!=11025 && muestrasPorSegundo!=22050 && muestrasPorSegundo!=44100)){ // errorOpeningFile = 1; // PRINTF_ERROR("Wav format not supported\n"); //} else { // audioDesc->samplesFormat = (formato==3 ? ENNix_sampleFormat_float : ENNix_sampleFormat_int); audioDesc->channels = canales; audioDesc->bitsPerSample = bitsPorMuestra; audioDesc->samplerate = muestrasPorSegundo; audioDesc->blockAlign = alineacionBloques; //} formatChunckPresent = 1; } } } else if(subchunckID[0]=='d' && subchunckID[1]=='a' && subchunckID[2]=='t' && subchunckID[3]=='a') { if(!formatChunckPresent){ //WARNING } else if(chunckDataReaded){ //WARNING } else { NixUI32 pcmDataBytes = subchunckSize; *audioDataBytes = pcmDataBytes; //printf("Tamano chunck PCM: %u\n", pcmDataBytes); if(*audioData!=NULL) free(*audioData); *audioData = (NixUI8*)malloc(pcmDataBytes); NixUI32 bytesReaded = (NixUI32)AAsset_read(wavFile, *audioData, sizeof(NixUI8) * pcmDataBytes); subchunckBytesReaded += bytesReaded; if(bytesReaded!=pcmDataBytes){ //WARNING } chunckDataReaded = 1; } } else { if(chunckDataReaded){ continuarLectura = 0; } else { AAsset_seek(wavFile, subchunckSize, SEEK_CUR); subchunckBytesReaded += subchunckSize; } } //Validar la cantidad de bytes leidos y el tamano del subchunck if(!errorOpeningFile && continuarLectura && subchunckBytesReaded!=subchunckSize){ if(subchunckBytesReaded<subchunckSize){ NixSI32 bytesPaddear = (subchunckSize-subchunckBytesReaded); if(bytesPaddear<=tamBufferPadding){ AAsset_read(wavFile, bufferPadding, sizeof(char) * bytesPaddear); //Optimizacion para evitar el uso de fseek } else { AAsset_seek(wavFile, subchunckSize-subchunckBytesReaded, SEEK_CUR); } } else { errorOpeningFile = 1; } } //padding para tamano par de los subchuncks if(!errorOpeningFile && continuarLectura && tamanoChunckEsImpar) { char charPadding; AAsset_read(wavFile, &charPadding, sizeof(char) * 1); } } } while(bytesReadedID==4 && !errorOpeningFile && continuarLectura); success = (formatChunckPresent && chunckDataReaded && !errorOpeningFile) ? 1 : 0; if(!formatChunckPresent) PRINTF_WARNING("formatChunckPresent no leido\n"); if(!chunckDataReaded) PRINTF_WARNING("chunckDataReaded no leido\n"); if(errorOpeningFile) PRINTF_WARNING("errorOpeningFile error presente\n"); } } AAsset_close(wavFile); } return success; }
//--------------------------------------- int _OpenFileFileSystem( const char* fname, char*& _out_file, unsigned int& _out_len ) { #ifdef ANDROID assertion( gAssetManager != NULL, "AssetManaget is NULL! Make sure to call InitializeAssetManager( AAssetManager* assetManager )", "" ); AAsset* asset; asset = AAssetManager_open( gAssetManager, fname, AASSET_MODE_UNKNOWN ); if ( asset == NULL ) { WarnFail( "Failed to load file: '%s'\n", fname ); return FSE_FILE_NOT_FOUND; } _out_len = AAsset_getLength(asset); _out_file = new char[ _out_len+1 ]; _out_file[ _out_len ] = 0; _out_len = AAsset_read( asset, _out_file, _out_len ); AAsset_close( asset ); return FSE_NO_ERROR; #else char dataPath[ FILESYSTEM_MAX_PATH + 1 ]; char filenameInArchive[ MAX_FILENAME ]; int err; FILE* fpFile=NULL; // Check each data path for file for ( int i = CurrentNumerOfDataArchives - 1; i >= 0; --i ) { // Try to open the file strncpy_s( filenameInArchive, fname, MAX_FILENAME ); fopen_s( &fpFile, filenameInArchive, "rb" ); if ( !fpFile ) { // Build archive name strncpy_s( dataPath, PathToData[ i ], FILESYSTEM_MAX_PATH - 1 ); dataPath[ FILESYSTEM_MAX_PATH ] = '\0'; strcat_s( dataPath, "/" ); strcpy_s( filenameInArchive, dataPath ); strcat_s( filenameInArchive, fname ); // Try to open the file fopen_s( &fpFile, filenameInArchive, "rb" ); if ( !fpFile ) { // ConsolePrintf( CONSOLE_INFO, "File '%s' not found. Searching additional files...\n", filenameInArchive ); err = FSE_FILE_NOT_FOUND; continue; } } // Get file size fseek( fpFile, 0, SEEK_END ); _out_len = ftell( fpFile ); rewind( fpFile ); // Allocate memory for data _out_file = new char[ _out_len+1 ]; _out_file[ _out_len ] = 0; // Copy file into memory err = fread( _out_file, 1, _out_len, fpFile ); if ( (unsigned int) err != _out_len ) { ConsolePrintf( CONSOLE_WARNING, "Warning : File '%s' failed to read.\n", filenameInArchive ); err = FSE_CORRUPT_FILE; } else { err = FSE_NO_ERROR; } if ( err == FSE_NO_ERROR ) { ConsolePrintf( CONSOLE_SUCCESS, "Loaded file '%s'\n", filenameInArchive ); } else { WarnFail( "Failed to load file: '%s'\n", filenameInArchive ); } fclose( fpFile ); return err; } return err; #endif }
// create asset audio player int createAssetAudioPlayer(AAssetManager *assetManager, char *filename) { SLresult result; // convert Java string to UTF-8 // use asset manager to open asset by filename AAssetManager* mgr = assetManager; assert(NULL != mgr); AAsset* asset = AAssetManager_open(mgr, filename, AASSET_MODE_UNKNOWN); // the asset might not be found if (NULL == asset) { return JNI_FALSE; } // open asset as file descriptor off_t start, length; int fd = AAsset_openFileDescriptor(asset, &start, &length); assert(0 <= fd); AAsset_close(asset); // configure audio source SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length}; SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED}; SLDataSource audioSrc = {&loc_fd, &format_mime}; // configure audio sink SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject}; SLDataSink audioSnk = {&loc_outmix, NULL}; // create audio player const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME}; const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE}; result = (*engineEngine)->CreateAudioPlayer(engineEngine, &fdPlayerObject, &audioSrc, &audioSnk, 3, ids, req); assert(SL_RESULT_SUCCESS == result); (void)result; // realize the player result = (*fdPlayerObject)->Realize(fdPlayerObject, SL_BOOLEAN_FALSE); assert(SL_RESULT_SUCCESS == result); (void)result; // get the play interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_PLAY, &fdPlayerPlay); assert(SL_RESULT_SUCCESS == result); (void)result; // get the seek interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_SEEK, &fdPlayerSeek); assert(SL_RESULT_SUCCESS == result); (void)result; // get the mute/solo interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_MUTESOLO, &fdPlayerMuteSolo); assert(SL_RESULT_SUCCESS == result); (void)result; // get the volume interface result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_VOLUME, &fdPlayerVolume); assert(SL_RESULT_SUCCESS == result); (void)result; // enable whole file looping result = (*fdPlayerSeek)->SetLoop(fdPlayerSeek, SL_BOOLEAN_TRUE, 0, SL_TIME_UNKNOWN); assert(SL_RESULT_SUCCESS == result); (void)result; return JNI_TRUE; }
// #include <android/asset_manager.h> #include <unistd.h> #include <sys/stat.h> #include <core/cxUtil.h> #include <streams/cxAssetsStream.h> #include <streams/cxAssetsLuaImp.c> #include "cxAndroid.h" static cxBool cxAssetsStreamOpen(cxAny this) { cxAssetsStream asserts = this; CX_ASSERT(asserts->super.isOpen == false,"stream repeat open"); cxConstChars path = cxStringBody(asserts->super.path); asserts->asset = AAssetManager_open(cxEngineGetAssetManager(), path, AASSET_MODE_UNKNOWN); if(asserts->asset == NULL){ CX_ERROR("open asset file %s failed",path); return false; } asserts->super.length = (cxInt)AAsset_getLength(asserts->asset); asserts->super.canRead = true; asserts->super.canSeek = true; asserts->super.canWrite = false; asserts->super.isOpen = true; return true; } static cxInt cxAssetsStreamRead(cxAny this,cxPointer buffer,cxInt size) { cxAssetsStream asserts = this;
DataSourceRef InputSource::loadDataSource() { switch (type) { case TYPE_RESOURCE: { #if defined(CHR_COMPLEX) && defined(CINDER_ANDROID) AAsset* asset = AAssetManager_open(FileSystem::getAndroidAssetManager(), filePathHint.c_str(), AASSET_MODE_STREAMING); if (asset) { AAsset_close(asset); return DataSourceAsset::create(FileSystem::getAndroidAssetManager(), filePathHint); } else { throw Exception("RESOURCE NOT FOUND: " + filePathHint); } #elif defined(CINDER_ANDROID) try { return app::loadResource(filePathHint); // TODO: TEST IF IT REALLY THROWS UPON ERROR } catch (exception &e) { throw Exception("RESOURCE NOT FOUND: " + filePathHint); } #else if (fs::exists(filePath)) // NECESSARY, BECAUSE THE FOLLOWING WON'T THROW IF FILE DOESN'T EXIST { return DataSourcePath::create(filePath); } else { throw Exception("RESOURCE NOT FOUND: " + relativePath.string()); } #endif } case TYPE_RESOURCE_MSW: { try { return app::loadResource(filePathHint, mswID, mswType); // TODO: TEST IF IT REALLY THROWS UPON ERROR } catch (exception &e) { throw Exception("RESOURCE NOT FOUND: " + filePathHint); } } case TYPE_FILE: { if (fs::exists(filePath)) // NECESSARY, BECAUSE THE FOLLOWING WON'T THROW IF FILE DOESN'T EXIST { return DataSourcePath::create(filePath); } else { throw Exception("FILE NOT FOUND: " + filePath.string()); } } case TYPE_ASSET: { #if defined(CHR_COMPLEX) && defined(CINDER_ANDROID) string resourcePath = ("assets" / relativePath).string(); AAsset* asset = AAssetManager_open(FileSystem::getAndroidAssetManager(), resourcePath.c_str(), AASSET_MODE_STREAMING); if (asset) { AAsset_close(asset); return DataSourceAsset::create(FileSystem::getAndroidAssetManager(), resourcePath); } else { throw Exception("ASSET NOT FOUND: " + relativePath.string()); } #elif defined(CINDER_ANDROID) try { return app::loadResource(("assets" / relativePath).string()); // TODO: TEST IF IT REALLY THROWS UPON ERROR } catch (exception &e) { throw Exception("ASSET NOT FOUND: " + relativePath.string()); } #else if (!filePath.empty() && fs::exists(filePath)) // NECESSARY, BECAUSE THE FOLLOWING WON'T THROW IF FILE DOESN'T EXIST { return DataSourcePath::create(filePath); } else { throw Exception("ASSET NOT FOUND: " + relativePath.string()); } #endif } } return DataSourceRef(); }
void android_main(struct android_app* state) { lua_State *L; AAsset* luaCode; const void *buf; off_t bufsize; int status; // Suppress link-time optimization that removes unreferenced code // to make sure glue isn't stripped. app_dummy(); // wait until everything is initialized before launching LuaJIT assets state->onAppCmd = handle_cmd; LOGI("Waiting for app ready..."); int events; struct android_poll_source* source; // we block forever waiting for events. while (ALooper_pollAll(-1, NULL, &events, (void**)&source) >= 0) { // Process this event. if (source != NULL) { source->process(state, source); } if (window_ready && gained_focus) { break; } // Check if we are exiting. if (state->destroyRequested != 0) { return; } } LOGI("Launching LuaJIT assets..."); luaCode = AAssetManager_open(state->activity->assetManager, LOADER_ASSET, AASSET_MODE_BUFFER); if (luaCode == NULL) { LOGE("error loading loader asset"); goto quit; } bufsize = AAsset_getLength(luaCode); buf = AAsset_getBuffer(luaCode); if (buf == NULL) { LOGE("error getting loader asset buffer"); goto quit; } // Load initial Lua loader from our asset store: L = luaL_newstate(); luaL_openlibs(L); status = luaL_loadbuffer(L, (const char*) buf, (size_t) bufsize, LOADER_ASSET); AAsset_close(luaCode); if (status) { LOGE("error loading file: %s", lua_tostring(L, -1)); goto quit; } // pass the android_app state to Lua land: lua_pushlightuserdata(L, state); status = lua_pcall(L, 1, LUA_MULTRET, 0); if (status) { LOGE("Failed to run script: %s", lua_tostring(L, -1)); goto quit; } lua_close(L); quit: ANativeActivity_finish(state->activity); }
// Load a model from file using the ASSIMP model loader and generate all resources required to render the model void loadModel(std::string filename) { // Load the model from file using ASSIMP const aiScene* scene; Assimp::Importer Importer; // Flags for loading the mesh static const int assimpFlags = aiProcess_FlipWindingOrder | aiProcess_Triangulate | aiProcess_PreTransformVertices; #if defined(__ANDROID__) // Meshes are stored inside the apk on Android (compressed) // So they need to be loaded via the asset manager AAsset* asset = AAssetManager_open(androidApp->activity->assetManager, filename.c_str(), AASSET_MODE_STREAMING); assert(asset); size_t size = AAsset_getLength(asset); assert(size > 0); void *meshData = malloc(size); AAsset_read(asset, meshData, size); AAsset_close(asset); scene = Importer.ReadFileFromMemory(meshData, size, assimpFlags); free(meshData); #else scene = Importer.ReadFile(filename.c_str(), assimpFlags); #endif // Generate vertex buffer from ASSIMP scene data float scale = 1.0f; std::vector<Vertex> vertexBuffer; // Iterate through all meshes in the file and extract the vertex components for (uint32_t m = 0; m < scene->mNumMeshes; m++) { for (uint32_t v = 0; v < scene->mMeshes[m]->mNumVertices; v++) { Vertex vertex; // Use glm make_* functions to convert ASSIMP vectors to glm vectors vertex.pos = glm::make_vec3(&scene->mMeshes[m]->mVertices[v].x) * scale; vertex.normal = glm::make_vec3(&scene->mMeshes[m]->mNormals[v].x); // Texture coordinates and colors may have multiple channels, we only use the first [0] one vertex.uv = glm::make_vec2(&scene->mMeshes[m]->mTextureCoords[0][v].x); // Mesh may not have vertex colors vertex.color = (scene->mMeshes[m]->HasVertexColors(0)) ? glm::make_vec3(&scene->mMeshes[m]->mColors[0][v].r) : glm::vec3(1.0f); // Vulkan uses a right-handed NDC (contrary to OpenGL), so simply flip Y-Axis vertex.pos.y *= -1.0f; vertexBuffer.push_back(vertex); } } size_t vertexBufferSize = vertexBuffer.size() * sizeof(Vertex); // Generate index buffer from ASSIMP scene data std::vector<uint32_t> indexBuffer; for (uint32_t m = 0; m < scene->mNumMeshes; m++) { uint32_t indexBase = static_cast<uint32_t>(indexBuffer.size()); for (uint32_t f = 0; f < scene->mMeshes[m]->mNumFaces; f++) { // We assume that all faces are triangulated for (uint32_t i = 0; i < 3; i++) { indexBuffer.push_back(scene->mMeshes[m]->mFaces[f].mIndices[i] + indexBase); } } } size_t indexBufferSize = indexBuffer.size() * sizeof(uint32_t); model.indices.count = static_cast<uint32_t>(indexBuffer.size()); // Static mesh should always be device local bool useStaging = true; if (useStaging) { struct { VkBuffer buffer; VkDeviceMemory memory; } vertexStaging, indexStaging; // Create staging buffers // Vertex data VK_CHECK_RESULT(vulkanDevice->createBuffer( VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, vertexBufferSize, &vertexStaging.buffer, &vertexStaging.memory, vertexBuffer.data())); // Index data VK_CHECK_RESULT(vulkanDevice->createBuffer( VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, indexBufferSize, &indexStaging.buffer, &indexStaging.memory, indexBuffer.data())); // Create device local buffers // Vertex buffer VK_CHECK_RESULT(vulkanDevice->createBuffer( VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, vertexBufferSize, &model.vertices.buffer, &model.vertices.memory)); // Index buffer VK_CHECK_RESULT(vulkanDevice->createBuffer( VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, indexBufferSize, &model.indices.buffer, &model.indices.memory)); // Copy from staging buffers VkCommandBuffer copyCmd = VulkanExampleBase::createCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY, true); VkBufferCopy copyRegion = {}; copyRegion.size = vertexBufferSize; vkCmdCopyBuffer( copyCmd, vertexStaging.buffer, model.vertices.buffer, 1, ©Region); copyRegion.size = indexBufferSize; vkCmdCopyBuffer( copyCmd, indexStaging.buffer, model.indices.buffer, 1, ©Region); VulkanExampleBase::flushCommandBuffer(copyCmd, queue, true); vkDestroyBuffer(device, vertexStaging.buffer, nullptr); vkFreeMemory(device, vertexStaging.memory, nullptr); vkDestroyBuffer(device, indexStaging.buffer, nullptr); vkFreeMemory(device, indexStaging.memory, nullptr); } else { // Vertex buffer VK_CHECK_RESULT(vulkanDevice->createBuffer( VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, vertexBufferSize, &model.vertices.buffer, &model.vertices.memory, vertexBuffer.data())); // Index buffer VK_CHECK_RESULT(vulkanDevice->createBuffer( VK_BUFFER_USAGE_INDEX_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, indexBufferSize, &model.indices.buffer, &model.indices.memory, indexBuffer.data())); } }
GLuint loadTextureFromJPEG(const char* filename, int &width, int &height){ AAsset* pAsset = NULL; struct jpeg_decompress_struct cInfo; struct jpeg_error_mgr jError; cInfo.err = jpeg_std_error(&jError); // register error handler 1 jError.error_exit = _JpegError; // register error handler 2 jpeg_create_decompress(&cInfo); // create a decompresser // load from asset pAsset = AAssetManager_open(mgr, filename, AASSET_MODE_UNKNOWN); if (!pAsset) { LOGD("!pAsset"); return NULL; } unsigned char* ucharRawData = (unsigned char*)AAsset_getBuffer(pAsset); long myAssetLength = (long)AAsset_getLength(pAsset); // the jpeg_stdio_src alternative func, which is also included in IJG's lib. jpeg_mem_src(&cInfo, ucharRawData, myAssetLength); uint32_t* pTexUint; int yy; int pixelSize, lineSize; char* lpbtBits; JSAMPLE tmp; int rectHeight, rectWidth; jpeg_read_header(&cInfo, TRUE); // read header jpeg_start_decompress(&cInfo); // start decompression //LOGD("cInfo %i - %i",cInfo.output_width,cInfo.output_height); width = cInfo.output_width; height = height = cInfo.output_height; pixelSize = cInfo.output_components; lineSize = width * pixelSize; pTexUint = (uint32_t*)calloc(sizeof(uint32_t), width * height); if (pTexUint == NULL){ AAsset_close(pAsset); return NULL; } JSAMPLE* pSample = (JSAMPLE*)calloc(sizeof(JSAMPLE), lineSize + 10); if (!pSample){ LOGE("Jpeg Lib","cannot alloc pSample"); if (pTexUint) free(pTexUint); AAsset_close(pAsset); return NULL; //error } JSAMPROW buffer[1]; buffer[0] = pSample; uint32_t* pPixelsUint = pTexUint; yy = 0; while(cInfo.output_scanline < cInfo.output_height){ if(yy >= cInfo.output_height) break; jpeg_read_scanlines(&cInfo, buffer, 1); int xx; int x3; for(xx = 0, x3 = 0; xx < width; xx++, x3 += 3) pPixelsUint[xx] = make8888(buffer[0][x3], buffer[0][x3 + 1], buffer[0][x3 + 2], 0xff); pPixelsUint = (uint32_t*)pPixelsUint + width; yy++; } //LOGD("sizeof(*pPixelsUint) = %i", sizeof(*pPixelsUint)); GLuint texture; glGenTextures(1, &texture); glBindTexture(GL_TEXTURE_2D, texture); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid*)pTexUint); jpeg_finish_decompress(&cInfo); jpeg_destroy_decompress(&cInfo); if (pSample) free(pSample); AAsset_close(pAsset); //free(pTexUint); return texture; //return (unsigned char*)pTexUint; }
SceneManager::SceneManager(GLint width, GLint height) #endif { Log << Function << endl; // The projection matrix is represented by the perspective matrix given by glm, assign it to each one of the objects GLfloat aspect = static_cast<GLfloat>(width) / static_cast<GLfloat>(height); projectionMatrix = glm::perspective( 45.0f, // Field of view, is the amount of zoom. A wide angle is 90 and a narrow angle is 30 aspect, // Depends on the size of the window 0.1f, // Near clipping plane 500.0f // Far clipping plane ); // Read the resources.txt file to obtain the valid configuration for the engine string resourcesFileName = "resources.txt"; #if defined(__ANDROID__) AAssetManager* mgr = AAssetManager_fromJava(*env, assetManager); AAsset* pFile = AAssetManager_open(mgr, resourcesFileName.c_str(), AASSET_MODE_UNKNOWN); if (!pFile) #else ifstream resourcesFile(resourcesFileName, ios::in); if (!resourcesFile.is_open()) #endif { Log << Error << "Unable to read the resources file: " << resourcesFileName << endl; terminate(); } #if defined(__ANDROID__) // Get the file size size_t fileSize = AAsset_getLength(pFile); // Read data from the file char* pData = (char*)calloc(fileSize + 1, sizeof(char)); AAsset_read(pFile, pData, fileSize); // fix the string to be zero-terminated pData[fileSize] = 0; // Copy the data to a stringstream stringstream resourcesFile(pData); AAsset_close(pFile); free(pData); #endif Log << Debug << "Parsing the resources.txt file." << endl; string line, name, vertex, fragment, object, texture, projection, modelview; vector<string> cubeTextures{ NumCubeFaces }; GLuint size, bufferType; vec3 pos, scl, rot; char token; bool finished = true; while (getline(resourcesFile, line)) { stringstream ssLine(line); // Ignore empty lines on the configuration file if(line.size() == 0) continue; ssLine >> token; switch (token) { // If the line is a comment get the next token case '#': continue; // Start of object definition case '.': // Create a new scene object Log << Debug << "Starting an object definition." << endl; sceneobjects.push_back(make_unique<SceneObject>()); finished = false; break; // End of an object definition case '-': Log << Debug << "End an object definition." << endl; finished = true; break; // Attributes used on the shaders case 'A': ssLine >> name >> size >> bufferType; Log << Debug << "Adding the attribute: " << name << endl; attributes.push_back(make_unique<Variable>(name, size, (BufferType)bufferType)); break; // Uniforms used on the shaders case 'U': ssLine >> name; Log << Debug << "Adding the uniform: " << name << endl; uniforms.push_back(make_unique<Variable>(name)); break; // Shaders creation case 'S': ssLine >> vertex >> fragment; Log << Debug << "Creating the shaders." << endl; #if defined (__ANDROID__) sceneobjects.back()->SetShader(make_shared<Shader>(&mgr, vertex, fragment, attributes, uniforms)); #else sceneobjects.back()->SetShader(make_shared<Shader>(vertex, fragment, attributes, uniforms)); #endif break; // Object definitions case 'O': ssLine >> object; Log << Debug << "Loading a model." << endl; #if defined (__ANDROID__) sceneobjects.back()->SetMesh(make_unique<Mesh>(&mgr, object, sceneobjects.back()->GetShader())); #else sceneobjects.back()->SetMesh(make_unique<Mesh>(object, sceneobjects.back()->GetShader())); #endif break; // Textures case 'T': ssLine >> texture; Log << Debug << "Loading a texture." << endl; #if defined(__ANDROID__) sceneobjects.back()->SetTexture(make_unique<Texture>(&mgr, texture, sceneobjects.back()->GetShader())); #else sceneobjects.back()->SetTexture(make_unique<Texture>(texture, sceneobjects.back()->GetShader())); #endif break; // Initial coordinates case 'C': Log << Debug << "Adding coordinates to the object." << endl; ssLine >> pos.x >> pos.y >> pos.z >> scl.x >> scl.y >> scl.z >> rot.x >> rot.y >> rot.z >> angle; sceneobjects.back()->SetCoordinates(pos, scl, rot, angle); break; // Projection matrix case 'P': ssLine >> projection; break; // Modelview matrix case 'M': ssLine >> modelview; break; // Skybox case 'B': Log << Debug << "Adding a skybox." << endl; ssLine >> cubeTextures[0] >> cubeTextures[1] >> cubeTextures[2] >> cubeTextures[3] >> cubeTextures[4] >> cubeTextures[5]; sceneobjects.back()->SetSkymap(); #if defined(__ANDROID__) sceneobjects.back()->SetMesh(make_unique<Mesh>(&mgr, string(""), sceneobjects.back()->GetShader())); sceneobjects.back()->SetTexture(make_unique<Texture>(&mgr, cubeTextures, sceneobjects.back()->GetShader())); #else sceneobjects.back()->SetMesh(make_unique<Mesh>(string(""), sceneobjects.back()->GetShader())); sceneobjects.back()->SetTexture(make_unique<Texture>(cubeTextures, sceneobjects.back()->GetShader())); #endif break; // Terrain Heightmap case 'H': Log << Debug << "Loading the terrain." << endl; ssLine >> texture >> object; #if defined(__ANDROID__) sceneobjects.back()->SetTexture(make_unique<Texture>(&mgr, texture, sceneobjects.back()->GetShader())); sceneobjects.back()->SetMesh(make_unique<Mesh>(&mgr, object, sceneobjects.back()->GetShader(), &sceneobjects.back()->GetTexture())); #else sceneobjects.back()->SetTexture(make_unique<Texture>(texture, sceneobjects.back()->GetShader())); sceneobjects.back()->SetMesh(make_unique<Mesh>(object, sceneobjects.back()->GetShader(), &sceneobjects.back()->GetTexture())); #endif break; default: continue; } // Check if the definition of an object is complete or if more lines are needed if (finished) { // Get the projection and modelview uniforms if (!projection.empty()) { sceneobjects.back()->SetProjectionUni(glGetUniformLocation(sceneobjects.back()->GetShader()->getProgramObject(), "Projection")); projection.clear(); } if (!modelview.empty()) { sceneobjects.back()->SetModelviewUni(glGetUniformLocation(sceneobjects.back()->GetShader()->getProgramObject(), "Modelview")); modelview.clear(); } // Clear the attributes and uniforms in order to load the next object attributes.clear(); uniforms.clear(); } } #if !defined(__ANDROID__) // Close de the resources file resourcesFile.close(); #endif // Set the initial position of the camera camera = vec3(2.5f, -1.0f, -5.0f); // Initial value of the rotation angle angle = 0.0f; }