Пример #1
0
int main()
{
	VideoPtr video;
	BaseApplicationPtr application = CreateBaseApplication();

	if ((video = CreateVideo(480, 854, L"GS2D", true, true, L"assets/fonts/")))
	{
		InputPtr input = CreateInput(0, true);
		AudioPtr audio = CreateAudio(0);

		application->Start(video, input, audio);

		Video::APP_STATUS status;
		while ((status = video->HandleEvents()) != Video::APP_QUIT)
		{
			if (status == Video::APP_SKIP)
				continue;

			input->Update();
			application->Update(Min(static_cast<unsigned long>(1000), ComputeElapsedTime(video)));
			application->RenderFrame();
		}
	}
	application->Destroy();	

	#ifdef _DEBUG
	 #ifdef WIN32
	  _CrtSetDbgFlag ( _CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF );
	  #endif
	#endif
	return 0;
}
Пример #2
0
void MedievalTorch::Start(VideoPtr video, InputPtr input, AudioPtr audio)
{
	this->video = video;
	this->input = input;
	this->audio = audio;

	m_props.ambient = Vector3(1,1,1);

	Platform::FileManagerPtr fileManager = video->GetFileManager();
	m_provider = ETHResourceProviderPtr(new ETHResourceProvider(
		ETHGraphicResourceManagerPtr(new ETHGraphicResourceManager()),
		ETHAudioResourceManagerPtr(new ETHAudioResourceManager()),
		boost::shared_ptr<ETHShaderManager>(new ETHShaderManager(video, GS_L("assets/data/"))),
		GS_L("assets/"), video, audio, input));

	const Vector2 screenSize = video->GetScreenSizeF();

	const float TARGET_WIDTH = 480/4;
	const float TARGET_HEIGHT = 854/4;

	if (pTorch)
		pTorch->Release();
	pTorch = new ETHRenderEntity(GS_L("assets/entities/medieval_torch.ent"), m_provider);
	pTorch->SetOrphanPositionXY(Vector2(TARGET_WIDTH / 2.0f, TARGET_HEIGHT - (TARGET_HEIGHT / 10.0f)));
	pTorch->ScaleParticleSystem(0, 1.6f);
	pTorch->ScaleParticleSystem(0, screenSize.x / 480.0f / 4);

	renderTarget = video->CreateRenderTarget(static_cast<unsigned int>(TARGET_WIDTH), static_cast<unsigned int>(TARGET_HEIGHT), GSTF_DEFAULT);
}
Пример #3
0
 int wmain(int argc, gs2d::str_type::char_t* argv[])
#endif
{
	Platform::FileManagerPtr fileManager(new Platform::StdFileManager());

	Platform::FileIOHubPtr fileIOHub = Platform::CreateFileIOHub(fileManager, GS_L("data/"));

	VideoPtr video;
	if ((video = CreateVideo(1024, 768, GS_L("temp"), true, true, fileIOHub, Texture::PF_UNKNOWN, false)))
	{
		InputPtr input = CreateInput(0, false);
		AudioPtr audio = CreateAudio(0);

		Video::APP_STATUS status;
		while ((status = video->HandleEvents()) != Video::APP_QUIT)
		{
			if (status == Video::APP_SKIP)
				continue;

			input->Update();
			video->BeginSpriteScene();
			video->EndSpriteScene();
		}
	}
	return 0;
}
float ETHEntityRenderingManager::ComputeDrawHash(VideoPtr video, const float entityDepth, const ETHSpriteEntity* entity) const
{
	static const float precisionScale = 100.0f;
	float drawHash;
	float verticalHashShift;
	const float screenHeight = video->GetScreenSize().y * precisionScale;
	const float hashDepth = entityDepth * screenHeight;

	switch (entity->GetType())
	{
	case ETHEntityProperties::ET_HORIZONTAL:
		drawHash = hashDepth;
		break;
	case ETHEntityProperties::ET_VERTICAL:
		verticalHashShift = ((entity->GetPositionY() - video->GetCameraPos().y) * precisionScale) / screenHeight;
		drawHash = hashDepth + verticalHashShift + 0.1f;
		break;
	case ETHEntityProperties::ET_GROUND_DECAL:
	case ETHEntityProperties::ET_OPAQUE_DECAL:
		drawHash = hashDepth + 0.1f;
		break;
	default:
		drawHash = hashDepth;
	}
	return drawHash;
}
Пример #5
0
bool ETHLineDrawer::Draw(const unsigned long lastFrameElapsedTimeMS)
{
	GS2D_UNUSED_ARGUMENT(lastFrameElapsedTimeMS);
	VideoPtr video = provider->GetVideo();
	video->SetLineWidth(width);
	return video->DrawLine(a, b, colorA, colorB);
}
Пример #6
0
JNIEXPORT void JNICALL Java_net_asantee_gs2d_GS2DJNI_start(
	JNIEnv* env, jobject thiz, jstring apkPath, jstring externalPath, jstring globalPath, jint width, jint height)
{
	g_splashShown = false;

	jboolean isCopy;
	const char* strApk = env->GetStringUTFChars(apkPath, &isCopy);
	const char* strExt = env->GetStringUTFChars(externalPath, &isCopy);
	const char* strGlo = env->GetStringUTFChars(globalPath, &isCopy);
	zip = boost::shared_ptr<Platform::ZipFileManager>(new Platform::ZipFileManager(strApk));
	Platform::FileIOHubPtr fileIOHub(new Platform::AndroidFileIOHub(zip, strExt, strGlo, ETHDirectories::GetBitmapFontDirectory()));

	video = CreateVideo(width, height, fileIOHub);
	input = CreateInput(&g_inputStr, true);
	audio = CreateAudio(0);

	video->ResetVideoMode(width, height, Texture::PF_DEFAULT, false);
	audio->SetGlobalVolume(g_globalVolume);

	splashSprite = video->CreateSprite(GS_L("assets/data/splash.bmp"));

	// if the application is already initialized, let's reset the device
	if (application)
	{
		application->Start(video, input, audio);
	}
}
Пример #7
0
gs2d::math::Vector2 ETHParallaxManager::ComputeOffset(
	const VideoPtr& video,
	const Vector3 &pos,
	const float& individualParallaxIntensity) const
{
	const Vector2 screenSpacePos = Vector2(pos.x, pos.y) - video->GetCameraPos();
	return ((screenSpacePos - GetInScreenOrigin(video)) / video->GetScreenSizeF().x) * pos.z * m_intensity * individualParallaxIntensity;
}
Пример #8
0
void ETHLight::SetLightScissor(const VideoPtr& video, const Vector2& zAxisDir) const
{
	const float squareEdgeSize = range * 2.0f;
	Vector2 sum(zAxisDir * pos.z * 2.0f);
	sum.x = Abs(sum.x);
	sum.y = Abs(sum.y);
	const Vector2 squareSize(Vector2(squareEdgeSize, squareEdgeSize) + sum);
	const Vector2 absPos(ETHGlobal::ToScreenPos(pos, zAxisDir) - video->GetCameraPos() - (squareSize * 0.5f));
	video->SetScissor(Rect2D(absPos.ToVector2i(), squareSize.ToVector2i()));
}
Пример #9
0
bool EditorBase::DrawTab(VideoPtr video, InputPtr input, const Vector2 &v2Pos, const float width, const wstring &text, Color color)
{
	video->SetAlphaMode(Video::AM_PIXEL);
	video->SetVertexShader(ShaderPtr());
	video->SetPixelShader(ShaderPtr());
	video->SetZBuffer(false);
	video->SetZWrite(false);

	const Vector2 v2Cam = video->GetCameraPos();
	video->SetCameraPos(Vector2(0,0));
	bool mouseOver = false;
	if (mouseOver = ETHGlobal::PointInRect(input->GetCursorPositionF(video), v2Pos+Vector2(width/2, m_menuSize), Vector2(width, m_menuSize)))
	{
		if (color.a < 200)
			color.a = 200;
	}

	const Vector2 v2CurveSize = m_curve->GetBitmapSizeF();
	const float rectWidth = width-v2CurveSize.x*2;
	m_curve->FlipX();
	m_curve->Draw(v2Pos, color);

	const Vector2 v2RectPos = v2Pos+Vector2(v2CurveSize.x, 0);
	video->DrawRectangle(v2RectPos, Vector2(rectWidth, m_menuSize*2), color);

	m_curve->FlipX();
	m_curve->Draw(v2Pos+Vector2(v2CurveSize.x+rectWidth, 0), color);

	ShadowPrint(v2RectPos+Vector2(v2CurveSize.x, m_menuSize/2), text.c_str(), L"Verdana14_shadow.fnt", Color(color.a,255,255,255));
	video->SetCameraPos(v2Cam);
	return (mouseOver && input->GetKeyState(GSK_LMOUSE) == GSKS_HIT);
}
void ETHScriptWrapper::SetCameraPos(const Vector2 &v2Pos)
{
	if (WarnIfRunsInMainFunction(GS_L("SetCameraPos")))
		return;

	// rounds up camera final position
	VideoPtr video = m_provider->GetVideo();
	video->RoundUpPosition(m_roundUpPosition);
	video->SetCameraPos(v2Pos);
	video->RoundUpPosition(false);
}
void ETHScriptWrapper::AddToCameraPos(const Vector2 &v2Add)
{
	if (WarnIfRunsInMainFunction(GS_L("AddToCameraPos")))
		return;

	// rounds up camera final position
	VideoPtr video = m_provider->GetVideo();
	video->RoundUpPosition(m_roundUpPosition);
	video->MoveCamera(m_provider->GetGlobalScaleManager()->Scale(v2Add));
	video->RoundUpPosition(false);
}
Пример #12
0
static void DrawSplashScreen()
{
	video->BeginSpriteScene(gs2d::constant::BLACK);
	if (splashSprite)
	{
		splashSprite->SetOrigin(gs2d::Sprite::EO_CENTER);
		const Vector2 screenSize(video->GetScreenSizeF());
		const float scale = ComputeSplashScale(screenSize);
		splashSprite->Draw(screenSize * 0.5f, gs2d::constant::WHITE, 0.0f, Vector2(scale, scale));
	}
	video->EndSpriteScene();
}
Пример #13
0
void CustomDataEditor::DrawInputFieldRect(const Vector2 &v2Pos,
										   const GS_GUI *pGui,
										   EditorBase *pEditor,
										   const str_type::char_t *text) const
{
	VideoPtr video = pEditor->GetVideoHandler();
	const Vector2 v2BorderSize(5, 5);
	const Vector2 v2FinalPos(v2Pos - v2BorderSize-Vector2(0, pGui->GetSize()));
	const Vector2 v2Size(Vector2(pGui->GetWidth(), pGui->GetSize() * 2 + pEditor->GetMenuSize()) + v2BorderSize * 2);
	const GSGUI_STYLE* style = pGui->GetGUIStyle();
	video->DrawRectangle(v2FinalPos, v2Size, style->active_top, style->active_top, style->active_bottom, style->active_bottom);
	pEditor->ShadowPrint(Vector2(v2FinalPos.x, v2FinalPos.y + v2Size.y - pEditor->GetMenuSize()), text, gs2d::constant::BLACK);
}
Пример #14
0
str_type::string AssembleCommands()
{
	str_type::stringstream ss;
	try
	{
		ss
		<< boost::any_cast<str_type::string>(audio->GetAudioContext())
		<< boost::any_cast<str_type::string>(video->GetGraphicContext());
		return ss.str();
	}
	catch (const boost::bad_any_cast& e)
	{
		video->Message(GS_L("Invalid type for command assembling"), GSMT_ERROR);
		return GS_L("");
	}
}
Пример #15
0
Vector3 ETHFakeEyePositionManager::ComputeFakeEyePosition(
	VideoPtr video, ShaderPtr pShader, const bool drawToTarget,
	const Vector3 &v3LightPos, const float entityAngle)
{
	const Vector2 &v2CamPos(video->GetCameraPos());
	const Vector2 &v2ScreenDim(video->GetScreenSizeF());
	Vector3 v3RelativeEyePos(0, v2ScreenDim.y*1.5f, GetFakeEyeHeight());
	if (!drawToTarget)
		v3RelativeEyePos.y -= v3LightPos.y-v2CamPos.y;

	Matrix4x4 matRot = RotateZ(-DegreeToRadian(entityAngle));
	v3RelativeEyePos = Multiply(v3RelativeEyePos, matRot);

	//pShader->SetConstant(GS_L("fakeEyePos"), v3RelativeEyePos+Vector3(0, v2CamPos.y, 0)+Vector3(v3LightPos.x,0,0));
	return v3RelativeEyePos+Vector3(0, v2CamPos.y, 0)+Vector3(v3LightPos.x,0,0);
}
Пример #16
0
JNIEXPORT jstring JNICALL Java_net_asantee_gs2d_GS2DJNI_destroy(JNIEnv* env, jobject thiz)
{
	application->Destroy();
	video->Message(GS_L("Application resources destroyed"), GSMT_INFO);
	g_globalVolume = audio->GetGlobalVolume();
	return env->NewStringUTF(GS_L(""));
}
Пример #17
0
void GSGUI_STRING_INPUT::Place(const Vector2& pos, const str_type::string& font,
				const float size, const Color& dwColor, VideoPtr video)
{
	str_type::string outputString = ss;
	video->DrawBitmapText(pos, outputString, font, dwColor);
	SendCursorToEnd();
}
Пример #18
0
void GSGUI_STRING_INPUT::Place(const Vector2 pos, const std::wstring font,
				const float size, const GS_COLOR dwColor, VideoPtr video)
{
	std::wstring outputString = ss;
	video->DrawBitmapText(pos, outputString, font, dwColor);
	SendCursorToEnd();
}
Пример #19
0
SpritePtr ETHGraphicResourceManager::AddFile(VideoPtr video, const str_type::string &path, const bool cutOutBlackPixels)
{
	str_type::string fileName = ETHGlobal::GetFileName(path);
	{
		SpritePtr sprite = FindSprite(path, fileName);
		if (sprite)
			return sprite;
	}

	SpritePtr pBitmap;
	str_type::string fixedName(path);
	Platform::FixSlashes(fixedName);

	ETHSpriteDensityManager::DENSITY_LEVEL densityLevel;
	const str_type::string finalFileName(m_densityManager.ChooseSpriteVersion(fixedName, video, densityLevel));

	if (!(pBitmap = video->CreateSprite(finalFileName, (cutOutBlackPixels)? 0xFF000000 : 0xFFFF00FF)))
	{
		pBitmap.reset();
		ETH_STREAM_DECL(ss) << GS_L("(Not loaded) ") << path;
		ETHResourceProvider::Log(ss.str(), Platform::Logger::ERROR);
		return SpritePtr();
	}

	m_densityManager.SetSpriteDensity(pBitmap, densityLevel);

	//#ifdef _DEBUG
	ETH_STREAM_DECL(ss) << GS_L("(Loaded) ") << fileName;
	ETHResourceProvider::Log(ss.str(), Platform::Logger::INFO);
	//#endif
	m_resource.insert(std::pair<str_type::string, SpriteResource>(fileName, SpriteResource(fixedName, pBitmap)));
	return pBitmap;
}
Пример #20
0
JNIEXPORT jstring JNICALL Java_net_asantee_gs2d_GS2DJNI_mainLoop(JNIEnv* env, jobject thiz, jstring inputStr)
{
	jboolean isCopy;
	g_inputStr = env->GetStringUTFChars(inputStr, &isCopy);

	video->HandleEvents();
	input->Update();

	// if the splash screen has already been shown, do the regular engine loop
	if (g_splashShown)
	{
		// if the engine hasn't been started yet (which means the previous frame was the splash screen frame),
		// start the engine machine before performing the regular loop
		if (!application)
		{
			StartApplication();
		}
		application->Update(Min(static_cast<unsigned long>(1000), ComputeElapsedTime(video)));
		application->RenderFrame();
	}
	else
	{
		// draw the splash screen and prepare the engine start
		DrawSplashScreen();
		g_splashShown = true;
	}

	return env->NewStringUTF(AssembleCommands().c_str());
}
Пример #21
0
void PlayerManager::Update(VideoPtr video, InputPtr input, ShootManagerPtr shootManager, SpritePtr playerSprite, SpritePtr shootSprite)
{
	static Vector2 hitPos, releasePos;
	static int turn = 0;
	if (input->GetTouchState(0) == GSKS_HIT)
	{	
		if (IsInArea(GetAbsolutePos(input, video), Rect2Df(GetPlayer(turn)->GetPos(), playerSprite->GetRect().size), Vector2(0.5f, 0.5f), video))
		{
			hitPos = input->GetTouchPos(0, video);
		}
		else
		{
			hitPos = GS_NO_TOUCH;
		}
	}
	if (input->GetTouchState(0) == GSKS_DOWN)
	{
		releasePos = hitPos - input->GetTouchPos(0, video);
	}
	if (input->GetTouchState(0) == GSKS_RELEASE && hitPos != GS_NO_TOUCH)
	{
		{
			Vector2 pos = m_simulator->GetPosition(GetPlayer(turn)->GetBodyId());
			shootManager->Shoot(pos, turn, releasePos);
			turn = !turn;
		}
	}
	if (hitPos == GS_NO_TOUCH)
	{
		video->MoveCamera(input->GetTouchMove(0) * -1);
	}
}
Пример #22
0
void EditorBase::SetFileNameToTitle(VideoPtr video, const wchar_t *wszTitle)
{
	wstring newTitle, file;
	file = utf8::c(GetCurrentFile(false)).wc_str();
	newTitle = wszTitle;
	newTitle += L" - ";
	newTitle += file;
	video->SetWindowTitle(newTitle);
}
Пример #23
0
bool IsSphereInScreen(const Vector3& pos, const float radius, const Vector2& zAxisDir, const VideoPtr& video)
{
	const Vector2 v2Pos(ToScreenPos(pos, zAxisDir));
	const Vector2& v2Cam = video->GetCameraPos();
	const Vector2& v2Screen = video->GetScreenSizeF();
	const float minX = v2Cam.x - radius;
	const float maxX = v2Cam.x + v2Screen.x + radius;
	const float minY = v2Cam.y - radius;
	const float maxY = v2Cam.y + v2Screen.y + radius;
	if (v2Pos.x < minX || v2Pos.x > maxX || v2Pos.y < minY || v2Pos.y > maxY)
	{
		return false;
	}
	else
	{
		return true;
	}
}
Пример #24
0
void TouchButton::DrawButton(VideoPtr video, InputPtr input, SpriteResourceManager& spr)
{
	SpritePtr sprite = spr.GetSprite(video, m_buttonSprite);
	video->SetAlphaMode(GSAM_PIXEL);
	if (sprite)
	{
		sprite->SetRect(m_buttonFrame);
		sprite->SetOrigin(m_origin);
		sprite->Draw(m_pos);

		if (m_status == PRESSED)
		{
			video->SetAlphaMode(GSAM_ADD);
			sprite->Draw(m_pos);
			video->SetAlphaMode(GSAM_PIXEL);
		}
	}
}
Пример #25
0
void ETHEngine::Start(VideoPtr video, InputPtr input, AudioPtr audio)
{
	Platform::FileIOHubPtr fileIOHub = video->GetFileIOHub();

	ETHAppEnmlFile file(fileIOHub->GetResourceDirectory() + ETH_APP_PROPERTIES_FILE, fileIOHub->GetFileManager(), video->GetPlatformName());
	m_richLighting = file.IsRichLightingEnabled();

	m_provider = ETHResourceProviderPtr(new ETHResourceProvider(
		ETHGraphicResourceManagerPtr(new ETHGraphicResourceManager(file.GetDensityManager())),
		ETHAudioResourceManagerPtr(new ETHAudioResourceManager()),
		ETHShaderManagerPtr(new ETHShaderManager(video, fileIOHub->GetStartResourceDirectory() + ETHDirectories::GetShaderDirectory(), m_richLighting)),
		video, audio, input, fileIOHub, false));

	m_ethInput.SetProvider(m_provider);

	CreateDynamicBackBuffer(file);

	if (!m_pASEngine)
	{
		video->SetBGColor(gs2d::constant::BLACK);

		if (!PrepareScriptingEngine(file.GetDefinedWords()))
		{
			Abort();
			return;
		}

		if (m_compileAndRun)
		{
			if (!RunMainFunction(GetMainFunctionId()))
			{
				Abort();
				return;
			}
			video->EnableQuitShortcuts(true);
			m_v2LastCamPos = video->GetCameraPos();
		}
	}
	else
	{
		video->SetBGColor(m_lastBGColor);
		m_pScene->RecoverResources();
	}
}
Пример #26
0
void CustomDataEditor::InputVariableName(ETHEntity* pEntity, EditorBase *pEditor)
{
	VideoPtr video = pEditor->GetVideoHandler();
	const Vector2 v2Pos = video->GetScreenSizeF()/2.0f - Vector2(m_inVariableName.GetWidth()/2.0f, 0.0f);

	if (m_inVariableName.IsActive())
	{
		DrawInputFieldRect(v2Pos, &m_inVariableName, pEditor, GS_L("Enter variable name"));
		m_inVariableName.PlaceInput(v2Pos);

		str_type::stringstream ss;
		ss << ETHCustomDataManager::GetDataName(m_cdesState);
		pEditor->ShadowPrint(v2Pos-Vector2(0.0f,m_inVariableName.GetSize()), ss.str().c_str(),
			GS_L("Verdana14_shadow.fnt"), gs2d::constant::BLACK);

		// if it has just been unactivated
		if (!m_inVariableName.IsActive())
		{
			if (m_inVariableName.GetValue() != GS_L(""))
			{
				switch (m_cdesState)
				{
				case CDES_ADDING_INT:
					pEntity->SetInt(m_inVariableName.GetValue(), 0);
					break;
				case CDES_ADDING_UINT:
					pEntity->SetUInt(m_inVariableName.GetValue(), 0);
					break;
				case CDES_ADDING_FLOAT:
					pEntity->SetFloat(m_inVariableName.GetValue(), 0.0f);
					break;
				case CDES_ADDING_STRING:
					pEntity->SetString(m_inVariableName.GetValue(), GS_L("none"));
					break;
				default:
					break;
				}
				Rebuild(pEntity, pEditor);
			}
			m_cdesState = CDES_IDLE;
		}
	}
}
Пример #27
0
ETHShaderManager::ETHShaderManager(VideoPtr video, const str_type::string& shaderPath, const bool richLighting) :
	m_lastAM(Video::AM_PIXEL),
	m_fakeEyeManager(new ETHFakeEyePositionManager),
	m_richLighting(richLighting)
{
	m_video = video;

	Shader::SHADER_PROFILE sp = Shader::SP_MODEL_2;
	#ifdef OPENGL
	 sp = Shader::SP_MODEL_1;
	#endif

	m_defaultVS = m_video->LoadShaderFromFile(ETHGlobal::GetDataResourceFullPath(shaderPath, ETHShaders::DefaultVS()).c_str(), Shader::SF_VERTEX, sp);
	m_particle  = m_video->LoadShaderFromFile(ETHGlobal::GetDataResourceFullPath(shaderPath, ETHShaders::Particle_VS()).c_str(), Shader::SF_VERTEX, sp);
	m_defaultStaticAmbientVS  = m_video->LoadShaderFromFile(ETHGlobal::GetDataResourceFullPath(shaderPath, ETHShaders::Ambient_VS_Hor()).c_str(), Shader::SF_VERTEX, sp);
	m_verticalStaticAmbientVS = m_video->LoadShaderFromFile(ETHGlobal::GetDataResourceFullPath(shaderPath, ETHShaders::Ambient_VS_Ver()).c_str(), Shader::SF_VERTEX, sp);
	m_shadowVS = m_video->LoadShaderFromFile(ETHGlobal::GetDataResourceFullPath(shaderPath, ETHShaders::Shadow_VS_Ver()).c_str(), Shader::SF_VERTEX, sp);

	#if defined(GLES2) || defined(OPENGL)
		m_projShadow = m_video->CreateSprite(ETHGlobal::GetDataResourceFullPath(shaderPath, GS_L("shadow.png")));
	#else
		m_projShadow = m_video->CreateSprite(ETHGlobal::GetDataResourceFullPath(shaderPath, GS_L("shadow.dds")));
	#endif

	m_opaqueSprite = m_video->CreateSprite(ETHGlobal::GetDataResourceFullPath(shaderPath, GS_L("default_nm.png")));

	if (m_richLighting)
	{
		// Not yet implemented on GLES2
		#if !defined(GLES2)
		  {ETHLightingProfilePtr profile(new ETHVertexLightDiffuse(m_video, shaderPath));
		  if (profile->IsSupportedByHardware())
		  {
			 m_lightingProfiles[VERTEX_LIGHTING_DIFFUSE] = profile;
		  }}
		#endif

		{ETHLightingProfilePtr profile(new ETHPixelLightDiffuseSpecular(m_video, shaderPath, m_fakeEyeManager));
		if (profile->IsSupportedByHardware())
		{
			m_lightingProfiles[PIXEL_LIGHTING_DIFFUSE_SPECULAR] = profile;
		}}
	}

	if (m_lightingProfiles.empty())
	{
		video->Message(GS_L("ETHShaderManager::ETHShaderManager: no lighting profile"), GSMT_WARNING);
	}
	else
	{
		m_currentProfile = FindHighestLightingProfile();
	}
}
Пример #28
0
void MainWindow::VideoParsed( VideoPtr Video )
{
    QListWidgetItem *Item = new QListWidgetItem( ui->ItemView );

    Item->setSizeHint( QSize(300,64) );

    ListElement *Widget = new ListElement;

    Widget->setInfo( Video->cast_BaseInfo() );
    Widget->setSelected(false);

    ui->ItemView->setItemWidget( Item, Widget );

}
Пример #29
0
ZombieManager::ZombieManager(VideoPtr video, const std::vector<gs2d::str_type::string>& spriteNames, BallPtr ball,
							 const std::vector<str_type::string>& bloodDecals) :
	m_ball(ball),
	m_normalizedSpawnSpots(4),
	m_currentSpawnSpot(0),
	m_bloodDecals(bloodDecals)
{
	m_lastAddTime = video->GetElapsedTime();
	m_spriteNames = spriteNames;
	m_normalizedSpawnSpots[0] = Vector2(0, 0);
	m_normalizedSpawnSpots[1] = Vector2(1, 0);
	m_normalizedSpawnSpots[2] = Vector2(1, 1);
	m_normalizedSpawnSpots[3] = Vector2(0, 1);
}
Пример #30
0
bool Morph::run (Audio const& audio, VideoPtr const& src1, VideoPtr const& src2)
{
    visual_return_val_if_fail (src1, FALSE);
    visual_return_val_if_fail (src2, FALSE);

    auto morph_plugin = m_impl->get_morph_plugin ();

    // If we're morphing using the timer, start the timer
    if (!m_impl->timer.is_active ()) {
        m_impl->timer.start ();
    }

    if (morph_plugin->palette) {
        morph_plugin->palette (m_impl->plugin, m_impl->progress, const_cast<Audio*> (&audio), m_impl->morphpal, src1.get (), src2.get ());
    }
    else {
        auto const& src1_pal = src1->get_palette ();
        auto const& src2_pal = src2->get_palette ();

        //if (src1_pal && src2_pal) {
        m_impl->morphpal->blend (src1_pal, src2_pal, m_impl->progress);
        //}
    }

    morph_plugin->apply (m_impl->plugin, m_impl->progress, const_cast<Audio*> (&audio), m_impl->dest.get (), src1.get (), src2.get ());

    m_impl->dest->set_palette (*get_palette ());

    // Update morph progression

    double usec_elapsed = m_impl->timer.elapsed ().to_usecs ();
    double usec_morph   = m_impl->morphtime.to_usecs ();

    m_impl->progress = std::min (std::max (usec_elapsed / usec_morph, 0.0), 1.0);

    return true;
}