Esempio n. 1
0
void JSONDataParser::parseTextureAtlasData(const char* rawData, TextureAtlasData& textureAtlasData, float scale)
{
    if (rawData)
    {
        rapidjson::Document document;
        document.Parse(rawData);

        textureAtlasData.format = _getTextureFormat(_getString(document, FORMAT, ""));
        textureAtlasData.name = _getString(document, NAME, "");
        textureAtlasData.imagePath = _getString(document, IMAGE_PATH, "");

        if (scale > 0.f)
        {
            textureAtlasData.scale = scale;
        }
        else
        {
            scale = textureAtlasData.scale = _getNumber(document, SCALE, textureAtlasData.scale);
        }

        scale = 1.f / scale;

        if (document.HasMember(SUB_TEXTURE))
        {
            for (const auto& textureObject : document[SUB_TEXTURE].GetArray())
            {
                const auto textureData = textureAtlasData.generateTexture();
                textureData->name = _getString(textureObject, NAME, "");
                textureData->rotated = _getBoolean(textureObject, ROTATED, false);
                textureData->region.x = _getNumber(textureObject, X, 0.f) * scale;
                textureData->region.y = _getNumber(textureObject, Y, 0.f) * scale;
                textureData->region.width = _getNumber(textureObject, WIDTH, 0.f) * scale;
                textureData->region.height = _getNumber(textureObject, HEIGHT, 0.f) * scale;

                const auto frameWidth = _getNumber(textureObject, FRAME_WIDTH, -1.f);
                const auto frameHeight = _getNumber(textureObject, FRAME_HEIGHT, -1.f);
                if (frameWidth > 0.f && frameHeight > 0.f)
                {
                    textureData->frame = TextureData::generateRectangle();
                    textureData->frame->x = _getNumber(textureObject, FRAME_X, 0.f) * scale;
                    textureData->frame->y = _getNumber(textureObject, FRAME_Y, 0.f) * scale;
                    textureData->frame->width = frameWidth * scale;
                    textureData->frame->height = frameHeight * scale;
                }

                textureAtlasData.addTexture(textureData);
            }
        }
    }
    else
    {
        DRAGONBONES_ASSERT(false, "Argument error.");
    }
}
Esempio n. 2
0
	void VideoObject::_createClip(bool waitForCache)
	{
		hstr path = getFullPath();
		april::Image::Format textureFormat = _getTextureFormat();
		destroyResources();
		
		if (path.endsWith(".mp4"))
		{
			hstr archive = hresource::getArchive();
			if (archive != "")
			{
				path = hrdir::joinPath(archive, path);
			}
		}
		
		try
		{
			TheoraOutputMode mode = TH_RGBA;

			if (textureFormat == april::Image::FORMAT_RGBA)				mode = TH_RGBA;
			else if (textureFormat == april::Image::FORMAT_RGBX)		mode = TH_RGBX;
			else if (textureFormat == april::Image::FORMAT_BGRA)		mode = TH_BGRA;
			else if (textureFormat == april::Image::FORMAT_BGRX)		mode = TH_BGRX;
			else if (textureFormat == april::Image::FORMAT_ARGB)		mode = TH_ARGB;
			else if (textureFormat == april::Image::FORMAT_XRGB)		mode = TH_XRGB;
			else if (textureFormat == april::Image::FORMAT_ABGR)		mode = TH_ABGR;
			else if (textureFormat == april::Image::FORMAT_XBGR)		mode = TH_XBGR;
			else if (textureFormat == april::Image::FORMAT_RGB)			mode = TH_RGBX;
			else if (textureFormat == april::Image::FORMAT_BGR)			mode = TH_BGRX;
			else if (textureFormat == april::Image::FORMAT_GRAYSCALE)	mode = TH_GREY;
			int ram = april::getSystemInfo().ram;
			int precached = 16;
#if defined(_ANDROID) || defined(_WINRT) && !defined(_WINP8)
			// Android and WinRT libtheoraplayer uses an optimized libtheora which is faster, but still slower than
			// a native hardware accelerated codec. So (for now) we use a larger precache to counter it. Though, WinP8 can't handle this memory-wise.
			if (ram > 512) precached = 32;
#else
			if      (ram < 384) precached = 6;
			else if (ram < 512) precached = 8;
			else if (ram < 1024)
			{
				if (path.contains("lowres")) precached = 16;
				else precached = 8;
			}
#endif
			
			if (path.endsWith("mp4"))
			{
				try
				{
					if (april::window->getName() == "OpenKODE") // because mp4's are opened via apple's api, and that doesn't play nice with OpenKODE dir structure.
						mClip = gVideoManager->createVideoClip(hrdir::joinPath("res", path).cStr(), mode, precached);
					else
						mClip = gVideoManager->createVideoClip(path.cStr(), mode, precached);
				}
				catch (_TheoraGenericException& e)
				{
					// pass the exception further as a hexception so the general system can understand it
					throw Exception(e.getErrorText().c_str());
				}
			}
			else
			{
				if (!path.endsWith(".mp4") && ram > 256)
				{
					hresource r;
					r.open(path);
					unsigned long size = (unsigned long) r.size();
					TheoraDataSource* source;

					// additional performance optimization: preload file in RAM to speed up decoding, every bit counts on Android/WinRT ARM
					// but only for "reasonably" sized files
					if (size < 64 * 1024 * 1024)
					{
						hlog::write(logTag, "Preloading video file to memory: " + path);
						unsigned char* data = new unsigned char[size];
						r.readRaw(data, (int) size);
						source = new TheoraMemoryFileDataSource(data, size, path.cStr());
					}
					else
					{
						source = new AprilVideoDataSource(path);
					}
					
					mClip = gVideoManager->createVideoClip(source, mode, precached);
					r.close();
					hlog::write(logTag, "Created video clip.");
				}
				else
				{
					mClip = gVideoManager->createVideoClip(new AprilVideoDataSource(path), mode, precached);
				}
			}
		}
		catch (_TheoraGenericException& e)
		{
			throw Exception(e.getErrorText().c_str());
		}
		if (mClip->getWidth() == 0) throw Exception("Failed to load video file: " + path);
		mClip->setAutoRestart(mLoop);
		
		int tw = mClip->getWidth();
		int th = mClip->getHeight();
		april::RenderSystem::Caps caps = april::rendersys->getCaps();
		if (!caps.npotTexturesLimited && !caps.npotTextures)
		{
			tw = hpotceil(tw);
			th = hpotceil(th);
		}

		hlog::write(logTag, "Creating video textures for " + mClipName);
		april::Texture* tex;
		for (int i = 0; i < 2; i++)
		{
			tex = april::rendersys->createTexture(tw, th, april::Color::Clear, textureFormat, april::Texture::TYPE_VOLATILE);
			tex->setAddressMode(april::Texture::ADDRESS_CLAMP);
			mTexture = new aprilui::Texture(tex->getFilename() + "_" + hstr(i + 1), tex);

			mVideoImage = new aprilui::Image(mTexture, "video_img_" + hstr(i + 1), grect(mClip->getSubFrameOffsetX(), mClip->getSubFrameOffsetY(), mClip->getSubFrameWidth(), mClip->getSubFrameHeight()));
			mVideoImage->setBlendMode(mBlendMode);

			mTextures += mTexture;
			mVideoImages += mVideoImage;
		}

		if (waitForCache && mInitialPrecacheFactor > 0.0f)
		{
			float factor = hmax(2.0f / mClip->getNumPrecachedFrames(), mInitialPrecacheFactor);
			float precached = (float) mClip->getNumReadyFrames() / mClip->getNumPrecachedFrames();
			if (precached < factor)
			{
				hlog::writef(logTag, "Waiting for cache (%.1f%% / %.1f%%): %s", precached * 100.0f, factor * 100.0f, path.cStr());
				if (factor > 0)
				{
					precached = mClip->waitForCache(factor, mInitialPrecacheTimeout); // better to wait a while then to display an empty image
				}
				if (precached < factor)
				{
					hlog::writef(logTag, "Initial precache cached %.1f%% frames, target precache factor was %.1f%%", precached * 100.0f, factor * 100.0f);
				}
			}
		}

		if (mAudioName != "")
		{
			hstr category = "video";
			if (mAudioName.contains("/"))
			{
				harray<hstr> folders = hrdir::splitPath(mAudioName);
				hstr path_category = folders[folders.size() - 2];
				if (xal::manager->hasCategory(path_category)) category = path_category;
			}
			if (category == "video" && !xal::manager->hasCategory("video"))
			{
#if defined(_WINRT) || defined(_ANDROID)
				xal::manager->createCategory("video", xal::ON_DEMAND, xal::DISK);
#else
				if (april::getSystemInfo().ram >= 512)
				{
					xal::manager->createCategory("video", xal::STREAMED, xal::RAM);
				}
				else
				{
					xal::manager->createCategory("video", xal::STREAMED, xal::DISK);
				}
#endif
			}
			mSound = xal::manager->createSound(hrdir::joinPath(hrdir::joinPath(this->dataset->getFilePath(), "video"), mAudioName), category);
			if (mSound != NULL)
			{
				mAudioPlayer = xal::manager->createPlayer(mSound->getName());
				mTimer = new AudioVideoTimer(this, mAudioPlayer, mAudioSyncOffset);
			}
		}
		if (mTimer == NULL)
		{
			mTimer = new VideoTimer(this);
		}
		mClip->setTimer(mTimer);
		mClip->setPlaybackSpeed(mSpeed);
		update(0); // to grab the first frame.
	}