示例#1
0
/*!
  Starts the camera with the given device. The available devices
  can be queried with the availableDevices method. Starting will release
  and destroy all earlier camera resources before creating new ones.
*/
void CustomCamera::start(const QString &device)
{
    destroyResources();
    
    m_camera = new QCamera(device.toLatin1(), this);
    
    m_videoSurface = new VideoSurface(this, m_camera);
    m_camera->setViewfinder(m_videoSurface);
    
    // Set the image capturing objects.
    m_cameraImageCapture = new QCameraImageCapture(m_camera);
    m_cameraImageCapture->setCaptureDestination(
                QCameraImageCapture::CaptureToFile);
    
    // The following code finds a 16:9 resolution and sets it as capture
    // resolution. The view finder image should also change to corresponding
    // aspect ratio.
    
    // Find resolution that matches to device's full screen, 16:9
    QImageEncoderSettings imageSettings;
    imageSettings.setCodec("image/jpeg");
    
    QList<QSize> resolutions = m_cameraImageCapture->supportedResolutions();
    QSize resolution;
    
    foreach (const QSize size, resolutions) {
        float a = size.width() * 1.0f / (size.height() * 1.0f);
        float b = 640.0f / 360.0f;
        
        if (qAbs(a - b) <= 0.1f * qMin(qAbs(a), qAbs(b))) {
            resolution = size;
            break;
        }
    }
示例#2
0
void EMDOgre::rebuild() {
	cleanNodes();
	destroyResources();

	for (list<Ogre::SceneNode *>::iterator it = scene_nodes.begin(); it != scene_nodes.end(); it++) {
		createOgreSceneNodeEMD(*it, (*it)->getCreator());
		mesh_resources_created = true;
	}

	to_rebuild = false;
}
//-----------------------------------------------------------------------
void CompositorChain::viewportRemoved(const RenderTargetViewportEvent& evt)
{
	// check this is the viewport we're attached to (multi-viewport targets)
	if (evt.source == mViewport) 
	{
		// this chain is now orphaned
		// can't delete it since held from outside, but release all resources being used
		destroyResources();
	}

}
示例#4
0
void cleanup(void)
{
	SDL_LogMessage(SDL_LOG_CATEGORY_APPLICATION, SDL_LOG_PRIORITY_INFO, "Cleaning up ...");

	SDL_DestroyRenderer(app.renderer);
	SDL_DestroyWindow(app.window);

	destroyLookups();

	destroyTextures();

	expireTexts(1);

	destroyFonts();

	destroySounds();

	destroyGame();

	destroyFighterDefs();

	destroyCapitalShipDefs();

	destroyBulletDefs();

	destroyItemDefs();

	destroyStarSystems();

	destroyBattle();

	destroyGalacticMap();

	destroyWidgets();

	destroyResources();
	
	destroyCredits();
	
	SDL_LogMessage(SDL_LOG_CATEGORY_APPLICATION, SDL_LOG_PRIORITY_INFO, "Done");

	TTF_Quit();

	SDL_Quit();
}
示例#5
0
// Destructor
MpFlowGraphBase::~MpFlowGraphBase()
{
   int      msecsPerFrame;
   OsStatus res;

   // release the flow graph and any resources it contains
   res = destroyResources();
   assert(res == OS_SUCCESS);

   // since the destroyResources() call may not take effect until the start
   // of the next frame processing interval, we loop until this flow graph is
   // stopped and contains no resources
   msecsPerFrame = (mSamplesPerFrame * 1000) / mSamplesPerSec;
   while (mCurState != STOPPED || mResourceCnt != 0)
   {
      res = OsTask::delay(msecsPerFrame);
      assert(res == OS_SUCCESS);
   }
}
示例#6
0
EMDOgre::~EMDOgre() {
	destroyResources();
	delete material_pack;
	cleanNodes(true);
}
//-----------------------------------------------------------------------
CompositorChain::~CompositorChain()
{
	destroyResources();
}
示例#8
0
ESKOgre::~ESKOgre() {
	destroyResources();
}
示例#9
0
/*!
  Destructor.
*/
CustomCamera::~CustomCamera()
{
    destroyResources();
}
示例#10
0
	void VideoObject::_createClip(bool waitForCache)
	{
		hstr path = getFullPath();
		april::Image::Format textureFormat = _getTextureFormat();
		destroyResources();
		
		if (path.endsWith(".mp4"))
		{
			hstr archive = hresource::getArchive();
			if (archive != "")
			{
				path = hrdir::joinPath(archive, path);
			}
		}
		
		try
		{
			TheoraOutputMode mode = TH_RGBA;

			if (textureFormat == april::Image::FORMAT_RGBA)				mode = TH_RGBA;
			else if (textureFormat == april::Image::FORMAT_RGBX)		mode = TH_RGBX;
			else if (textureFormat == april::Image::FORMAT_BGRA)		mode = TH_BGRA;
			else if (textureFormat == april::Image::FORMAT_BGRX)		mode = TH_BGRX;
			else if (textureFormat == april::Image::FORMAT_ARGB)		mode = TH_ARGB;
			else if (textureFormat == april::Image::FORMAT_XRGB)		mode = TH_XRGB;
			else if (textureFormat == april::Image::FORMAT_ABGR)		mode = TH_ABGR;
			else if (textureFormat == april::Image::FORMAT_XBGR)		mode = TH_XBGR;
			else if (textureFormat == april::Image::FORMAT_RGB)			mode = TH_RGBX;
			else if (textureFormat == april::Image::FORMAT_BGR)			mode = TH_BGRX;
			else if (textureFormat == april::Image::FORMAT_GRAYSCALE)	mode = TH_GREY;
			int ram = april::getSystemInfo().ram;
			int precached = 16;
#if defined(_ANDROID) || defined(_WINRT) && !defined(_WINP8)
			// Android and WinRT libtheoraplayer uses an optimized libtheora which is faster, but still slower than
			// a native hardware accelerated codec. So (for now) we use a larger precache to counter it. Though, WinP8 can't handle this memory-wise.
			if (ram > 512) precached = 32;
#else
			if      (ram < 384) precached = 6;
			else if (ram < 512) precached = 8;
			else if (ram < 1024)
			{
				if (path.contains("lowres")) precached = 16;
				else precached = 8;
			}
#endif
			
			if (path.endsWith("mp4"))
			{
				try
				{
					if (april::window->getName() == "OpenKODE") // because mp4's are opened via apple's api, and that doesn't play nice with OpenKODE dir structure.
						mClip = gVideoManager->createVideoClip(hrdir::joinPath("res", path).cStr(), mode, precached);
					else
						mClip = gVideoManager->createVideoClip(path.cStr(), mode, precached);
				}
				catch (_TheoraGenericException& e)
				{
					// pass the exception further as a hexception so the general system can understand it
					throw Exception(e.getErrorText().c_str());
				}
			}
			else
			{
				if (!path.endsWith(".mp4") && ram > 256)
				{
					hresource r;
					r.open(path);
					unsigned long size = (unsigned long) r.size();
					TheoraDataSource* source;

					// additional performance optimization: preload file in RAM to speed up decoding, every bit counts on Android/WinRT ARM
					// but only for "reasonably" sized files
					if (size < 64 * 1024 * 1024)
					{
						hlog::write(logTag, "Preloading video file to memory: " + path);
						unsigned char* data = new unsigned char[size];
						r.readRaw(data, (int) size);
						source = new TheoraMemoryFileDataSource(data, size, path.cStr());
					}
					else
					{
						source = new AprilVideoDataSource(path);
					}
					
					mClip = gVideoManager->createVideoClip(source, mode, precached);
					r.close();
					hlog::write(logTag, "Created video clip.");
				}
				else
				{
					mClip = gVideoManager->createVideoClip(new AprilVideoDataSource(path), mode, precached);
				}
			}
		}
		catch (_TheoraGenericException& e)
		{
			throw Exception(e.getErrorText().c_str());
		}
		if (mClip->getWidth() == 0) throw Exception("Failed to load video file: " + path);
		mClip->setAutoRestart(mLoop);
		
		int tw = mClip->getWidth();
		int th = mClip->getHeight();
		april::RenderSystem::Caps caps = april::rendersys->getCaps();
		if (!caps.npotTexturesLimited && !caps.npotTextures)
		{
			tw = hpotceil(tw);
			th = hpotceil(th);
		}

		hlog::write(logTag, "Creating video textures for " + mClipName);
		april::Texture* tex;
		for (int i = 0; i < 2; i++)
		{
			tex = april::rendersys->createTexture(tw, th, april::Color::Clear, textureFormat, april::Texture::TYPE_VOLATILE);
			tex->setAddressMode(april::Texture::ADDRESS_CLAMP);
			mTexture = new aprilui::Texture(tex->getFilename() + "_" + hstr(i + 1), tex);

			mVideoImage = new aprilui::Image(mTexture, "video_img_" + hstr(i + 1), grect(mClip->getSubFrameOffsetX(), mClip->getSubFrameOffsetY(), mClip->getSubFrameWidth(), mClip->getSubFrameHeight()));
			mVideoImage->setBlendMode(mBlendMode);

			mTextures += mTexture;
			mVideoImages += mVideoImage;
		}

		if (waitForCache && mInitialPrecacheFactor > 0.0f)
		{
			float factor = hmax(2.0f / mClip->getNumPrecachedFrames(), mInitialPrecacheFactor);
			float precached = (float) mClip->getNumReadyFrames() / mClip->getNumPrecachedFrames();
			if (precached < factor)
			{
				hlog::writef(logTag, "Waiting for cache (%.1f%% / %.1f%%): %s", precached * 100.0f, factor * 100.0f, path.cStr());
				if (factor > 0)
				{
					precached = mClip->waitForCache(factor, mInitialPrecacheTimeout); // better to wait a while then to display an empty image
				}
				if (precached < factor)
				{
					hlog::writef(logTag, "Initial precache cached %.1f%% frames, target precache factor was %.1f%%", precached * 100.0f, factor * 100.0f);
				}
			}
		}

		if (mAudioName != "")
		{
			hstr category = "video";
			if (mAudioName.contains("/"))
			{
				harray<hstr> folders = hrdir::splitPath(mAudioName);
				hstr path_category = folders[folders.size() - 2];
				if (xal::manager->hasCategory(path_category)) category = path_category;
			}
			if (category == "video" && !xal::manager->hasCategory("video"))
			{
#if defined(_WINRT) || defined(_ANDROID)
				xal::manager->createCategory("video", xal::ON_DEMAND, xal::DISK);
#else
				if (april::getSystemInfo().ram >= 512)
				{
					xal::manager->createCategory("video", xal::STREAMED, xal::RAM);
				}
				else
				{
					xal::manager->createCategory("video", xal::STREAMED, xal::DISK);
				}
#endif
			}
			mSound = xal::manager->createSound(hrdir::joinPath(hrdir::joinPath(this->dataset->getFilePath(), "video"), mAudioName), category);
			if (mSound != NULL)
			{
				mAudioPlayer = xal::manager->createPlayer(mSound->getName());
				mTimer = new AudioVideoTimer(this, mAudioPlayer, mAudioSyncOffset);
			}
		}
		if (mTimer == NULL)
		{
			mTimer = new VideoTimer(this);
		}
		mClip->setTimer(mTimer);
		mClip->setPlaybackSpeed(mSpeed);
		update(0); // to grab the first frame.
	}