bool RenderWindow::OnWindowCreated()
	{
		m_parameters.doubleBuffered = true;
		m_parameters.window = GetHandle();

		std::unique_ptr<Context> context(new Context);
		if (!context->Create(m_parameters))
		{
			NazaraError("Failed to create context");
			return false;
		}

		m_context = context.release();

		if (!SetActive(true)) // Les fenêtres s'activent à la création
			NazaraWarning("Failed to activate window");

		EnableVerticalSync(false);

		Vector2ui size = GetSize();

		// Le scissorBox/viewport (à la création) est de la taille de la fenêtre
		// https://www.opengl.org/sdk/docs/man/xhtml/glGet.xml
		OpenGL::SetScissorBox(Recti(0, 0, size.x, size.y));
		OpenGL::SetViewport(Recti(0, 0, size.x, size.y));

		OnRenderTargetParametersChange(this);
		OnRenderTargetSizeChange(this);

		m_clock.Restart();

		return true;
	}
void ConsoleInterface::initialize( long size )
 {
  assert( size > 0 );

  console_size = size;
  line_list.initialize( size );

  line_index = console_size - 1;

  surface_size = PointXYi( 640, 480 );
  bounds = Recti( 5, 5, 640 - 5, 480 - 5 );
 
  max_char_per_line = (bounds.max.x - bounds.min.x) / 8;
    
  vertical_spacing = 2; 

  line_offset.x = 0;
  line_offset.y = (8 + vertical_spacing); 

  input_string_active = false;

  long line_loop;

  for ( line_loop = 0; line_loop < console_size; line_loop++ )
   {
    line_list[ line_loop ].color = GameConfig::getConsoleTextColor();
    line_list[ line_loop ].string[0] = 0;
    line_list[ line_loop ].life_timer.changePeriod( 8 );
   }  

  log_file_active = false;
  stdout_pipe = false;
 }
Beispiel #3
0
bool Fluid::Init(int width, int height) {
  _velocities = Image2f::Create(Recti(0, 0, width, height), width, 1, 1);
  if (_velocities->Data() == nullptr) {
    return false;
  }
  return true;
}
Beispiel #4
0
    void Graphics::SetUpViewport()
    {
        if (currentFbo_)
        {
            auto width = currentFbo_->GetWidth();
            auto height = currentFbo_->GetHeight();
            SetViewport(Recti(0, 0, width, height), false);
        }
        else if (activeWindow_)
        {
            auto width = activeWindow_->GetWidth();
            auto height = activeWindow_->GetHeight();
            SetViewport(Recti(0, 0, width, height), false);
        }
        else
            SetViewport(Recti(0, 0, 0, 0), false);

    }
	bool DeferredDOFPass::Process(const SceneData& sceneData, unsigned int firstWorkTexture, unsigned int secondWorkTexture) const
	{
		NazaraUnused(sceneData);

		Renderer::SetTextureSampler(0, m_pointSampler);
		Renderer::SetTextureSampler(1, m_bilinearSampler);
		Renderer::SetTextureSampler(2, m_pointSampler);

		Renderer::SetTarget(&m_dofRTT);
		Renderer::SetViewport(Recti(0, 0, m_dimensions.x/4, m_dimensions.y/4));

		Renderer::SetShader(m_gaussianBlurShader);

		const unsigned int dofBlurPass = 2;
		for (unsigned int i = 0; i < dofBlurPass; ++i)
		{
			m_dofRTT.SetColorTarget(0); // dofTextureA

			m_gaussianBlurShader->SendVector(m_gaussianBlurShaderFilterLocation, Vector2f(1.f, 0.f));

			Renderer::SetTexture(0, (i == 0) ? m_workTextures[secondWorkTexture] : static_cast<const Texture*>(m_dofTextures[1]));
			Renderer::DrawFullscreenQuad();

			m_dofRTT.SetColorTarget(1); // dofTextureB

			m_gaussianBlurShader->SendVector(m_gaussianBlurShaderFilterLocation, Vector2f(0.f, 1.f));

			Renderer::SetTexture(0, m_dofTextures[0]);
			Renderer::DrawFullscreenQuad();
		}

		m_workRTT->SetColorTarget(firstWorkTexture);
		Renderer::SetTarget(m_workRTT);
		Renderer::SetViewport(Recti(0, 0, m_dimensions.x, m_dimensions.y));

		Renderer::SetShader(m_dofShader);
		Renderer::SetTexture(0, m_workTextures[secondWorkTexture]);
		Renderer::SetTexture(1, m_dofTextures[1]);
		Renderer::SetTexture(2, m_GBuffer[1]);
		Renderer::DrawFullscreenQuad();

		return true;
	}
Beispiel #6
0
    void Graphics::ResetCachedState()
    {
        Program::Clear();

        viewport_ = Recti(0);

        glGetIntegerv(GL_FRAMEBUFFER_BINDING, &systemFbo_); // On IOS default FBO is not zero

        // Set up texture data read/write alignment
        glPixelStorei(GL_PACK_ALIGNMENT, 1);
        glPixelStorei(GL_UNPACK_ALIGNMENT, 1);

        VertexArrayObj::Clear();
        lastMesh_ = nullptr;
        lastProgram_ = nullptr;
        activeMesh_ = nullptr;
        activeWindow_ = nullptr;

        CHECK_GL_STATUS();

        SetClearColor(Color(0, 0, 0, 1));
        SetClearDepth(1);
        SetClearStencil(0);
        SetFrameBuffer(nullptr);
        SetStencilTest(DEFAULT_STENCIL_ENABLE, DEFAULT_STENCIL_WRITEMASK, DEFAULT_STENCIL_SFAIL,
                       DEFAULT_STENCIL_DPFAIL, DEFAULT_STENCIL_DPPASS, DEFAULT_STENCIL_FUNC, DEFAULT_STENCIL_REF, DEFAULT_STENCIL_COMPAREMASK);
        SetScissorTest();

        CHECK_GL_STATUS();

        SetColorMask(DEFAULT_COLOR_MASK);
        SetDepthMask(DEFAULT_DEPTH_MASK);
        SetDepthFunc(DepthFunc::LESS);
        SetStencilMask(DEFAULT_STENCIL_MASK);
        SetBlendModeTest(DEFAULT_BLEND_MODE);
        EnableDepthTest(DEFAULT_DEPTH_TEST_ENABLE);
        EnableCullFace(DEFAULT_CULL_FACE_ENABLE);
        SetCullFace(CullFaceMode::DEFAULT);
        SetFrontFace(FrontFaceMode::DEFAULT);
        CHECK_GL_STATUS();

        UnboundTextures();
        SetVertexArrayObj(nullptr);
        SetVertexBuffer(nullptr);
        SetIndexBuffer(nullptr);
        SetProgram(nullptr);
        SetSlopeScaledBias(0);

        CHECK_GL_STATUS();
    }
	bool DeferredForwardPass::Process(const SceneData& sceneData, unsigned int workTexture, unsigned sceneTexture) const
	{
		NazaraAssert(sceneData.viewer, "Invalid viewer");
		NazaraUnused(workTexture);

		m_workRTT->SetColorTarget(sceneTexture);
		Renderer::SetTarget(m_workRTT);
		Renderer::SetViewport(Recti(0, 0, m_dimensions.x, m_dimensions.y));

		if (sceneData.background)
			sceneData.background->Draw(sceneData.viewer);

		Renderer::SetMatrix(MatrixType_Projection, sceneData.viewer->GetProjectionMatrix());
		Renderer::SetMatrix(MatrixType_View, sceneData.viewer->GetViewMatrix());

		m_forwardTechnique->Draw(sceneData);

		return false;
	}
Recti UnitBucketArray::worldRectToBucketRectClip( Recti &world_rect )
 {
  long bucket_max_x;
  long bucket_max_y;

  bucket_max_x = world_rect.max.x / pixel_x_sample_factor;
  if ( bucket_max_x >= (long) column_size )   
   { bucket_max_x = column_size - 1; }

  bucket_max_y = world_rect.max.y / pixel_y_sample_factor;
  if ( bucket_max_y >= (long) row_size )   
   { bucket_max_y = row_size  - 1; }
 
  return( Recti( world_rect.min.x / pixel_x_sample_factor,
                 world_rect.min.y / pixel_y_sample_factor,
                 bucket_max_x,
                 bucket_max_y
               )
         );
 }
//--------------------------------------------------------------------------------------------------
/// 
//--------------------------------------------------------------------------------------------------
bool OverlayScalarMapperLegend::pick(int oglXCoord, int oglYCoord, const Vec2i& position, const Vec2ui& size)
{
    Recti oglRect(position, size.x(), size.y());

    OverlayColorLegendLayoutInfo layoutInViewPortCoords(oglRect.min(), Vec2ui(oglRect.width(), oglRect.height()));
    layoutInfo(&layoutInViewPortCoords);

    Vec2i legendBarOrigin = oglRect.min();
    legendBarOrigin.x() += static_cast<uint>(layoutInViewPortCoords.legendRect.min().x());
    legendBarOrigin.y() += static_cast<uint>(layoutInViewPortCoords.legendRect.min().y());

    Recti legendBarRect = Recti(legendBarOrigin, static_cast<uint>(layoutInViewPortCoords.legendRect.width()), static_cast<uint>(layoutInViewPortCoords.legendRect.height()));

    if ((oglXCoord > legendBarRect.min().x()) && (oglXCoord < legendBarRect.max().x()) &&
        (oglYCoord > legendBarRect.min().y()) && (oglYCoord < legendBarRect.max().y()))
    {
        return true;
    }

    return false;
}
	bool DeferredPhongLightingPass::Process(const SceneData& sceneData, unsigned int firstWorkTexture, unsigned secondWorkTexture) const
	{
		NazaraAssert(sceneData.viewer, "Invalid viewer");
		NazaraUnused(secondWorkTexture);

		m_workRTT->SetColorTarget(firstWorkTexture);
		Renderer::SetTarget(m_workRTT);
		Renderer::SetViewport(Recti(0, 0, m_dimensions.x, m_dimensions.y));

		Renderer::SetTexture(0, m_GBuffer[0]);
		Renderer::SetTextureSampler(0, m_pointSampler);

		Renderer::SetTexture(1, m_GBuffer[1]);
		Renderer::SetTextureSampler(1, m_pointSampler);

		Renderer::SetTexture(2, m_GBuffer[2]);
		Renderer::SetTextureSampler(2, m_pointSampler);

		Renderer::SetClearColor(Color::Black);
		Renderer::Clear(RendererBuffer_Color);

		RenderStates lightStates;
		lightStates.dstBlend = BlendFunc_One;
		lightStates.srcBlend = BlendFunc_One;
		lightStates.parameters[RendererParameter_Blend] = true;
		lightStates.parameters[RendererParameter_DepthBuffer] = false;
		lightStates.parameters[RendererParameter_DepthWrite] = false;

		// Directional lights
		if (!m_renderQueue->directionalLights.empty())
		{
			Renderer::SetRenderStates(lightStates);
			Renderer::SetShader(m_directionalLightShader);
			m_directionalLightShader->SendColor(m_directionalLightShaderSceneAmbientLocation, sceneData.ambientColor);
			m_directionalLightShader->SendVector(m_directionalLightShaderEyePositionLocation, sceneData.viewer->GetEyePosition());

			for (auto& light : m_renderQueue->directionalLights)
			{
				m_directionalLightShader->SendColor(m_directionalLightUniforms.locations.color, light.color);
				m_directionalLightShader->SendVector(m_directionalLightUniforms.locations.factors, Vector2f(light.ambientFactor, light.diffuseFactor));
				m_directionalLightShader->SendVector(m_directionalLightUniforms.locations.parameters1, Vector4f(light.direction));

				Renderer::DrawFullscreenQuad();
			}
		}

		// Point lights/Spot lights
		if (!m_renderQueue->pointLights.empty() || !m_renderQueue->spotLights.empty())
		{
			// http://www.altdevblogaday.com/2011/08/08/stencil-buffer-optimisation-for-deferred-lights/
			lightStates.parameters[RendererParameter_StencilTest] = true;
			lightStates.faceCulling = FaceSide_Front;
			lightStates.backFace.stencilMask = 0xFF;
			lightStates.backFace.stencilReference = 0;
			lightStates.backFace.stencilFail = StencilOperation_Keep;
			lightStates.backFace.stencilPass = StencilOperation_Keep;
			lightStates.backFace.stencilZFail = StencilOperation_Invert;
			lightStates.frontFace.stencilMask = 0xFF;
			lightStates.frontFace.stencilReference = 0;
			lightStates.frontFace.stencilFail = StencilOperation_Keep;
			lightStates.frontFace.stencilPass = StencilOperation_Keep;
			lightStates.frontFace.stencilZFail = StencilOperation_Invert;

			Renderer::SetRenderStates(lightStates);

			Renderer::SetShader(m_pointSpotLightShader);
			m_pointSpotLightShader->SendColor(m_pointSpotLightShaderSceneAmbientLocation, sceneData.ambientColor);
			m_pointSpotLightShader->SendVector(m_pointSpotLightShaderEyePositionLocation, sceneData.viewer->GetEyePosition());

			Matrix4f lightMatrix;
			lightMatrix.MakeIdentity();
			if (!m_renderQueue->pointLights.empty())
			{
				const IndexBuffer* indexBuffer = m_sphereMesh->GetIndexBuffer();
				Renderer::SetIndexBuffer(indexBuffer);
				Renderer::SetVertexBuffer(m_sphereMesh->GetVertexBuffer());

				m_pointSpotLightShader->SendInteger(m_pointSpotLightUniforms.locations.type, LightType_Point);
				for (const auto& light : m_renderQueue->pointLights)
				{
					m_pointSpotLightShader->SendColor(m_pointSpotLightUniforms.locations.color, light.color);
					m_pointSpotLightShader->SendVector(m_pointSpotLightUniforms.locations.factors, Vector2f(light.ambientFactor, light.diffuseFactor));
					m_pointSpotLightShader->SendVector(m_pointSpotLightUniforms.locations.parameters1, Vector4f(light.position, light.attenuation));
					m_pointSpotLightShader->SendVector(m_pointSpotLightUniforms.locations.parameters2, Vector4f(0.f, 0.f, 0.f, light.invRadius));

					lightMatrix.SetScale(Vector3f(light.radius * 1.1f)); // Pour corriger les imperfections liées à la sphère
					lightMatrix.SetTranslation(light.position);

					Renderer::SetMatrix(MatrixType_World, lightMatrix);

					// Rendu de la sphère dans le stencil buffer
					Renderer::Enable(RendererParameter_ColorWrite, false);
					Renderer::Enable(RendererParameter_DepthBuffer, true);
					Renderer::Enable(RendererParameter_FaceCulling, false);
					Renderer::SetStencilCompareFunction(RendererComparison_Always);

					m_pointSpotLightShader->SendBoolean(m_pointSpotLightShaderDiscardLocation, true);

					Renderer::DrawIndexedPrimitives(PrimitiveMode_TriangleList, 0, indexBuffer->GetIndexCount());

					// Rendu de la sphère comme zone d'effet
					Renderer::Enable(RendererParameter_ColorWrite, true);
					Renderer::Enable(RendererParameter_DepthBuffer, false);
					Renderer::Enable(RendererParameter_FaceCulling, true);
					Renderer::SetStencilCompareFunction(RendererComparison_NotEqual, FaceSide_Back);
					Renderer::SetStencilPassOperation(StencilOperation_Zero, FaceSide_Back);

					m_pointSpotLightShader->SendBoolean(m_pointSpotLightShaderDiscardLocation, false);

					Renderer::DrawIndexedPrimitives(PrimitiveMode_TriangleList, 0, indexBuffer->GetIndexCount());
				}

				if (m_lightMeshesDrawing)
				{
					Renderer::Enable(RendererParameter_DepthBuffer, true);
					Renderer::Enable(RendererParameter_DepthWrite, true);
					Renderer::Enable(RendererParameter_FaceCulling, false);
					Renderer::Enable(RendererParameter_StencilTest, false);
					Renderer::SetFaceFilling(FaceFilling_Line);

					const Shader* shader = ShaderLibrary::Get("DebugSimple");
					static int colorLocation = shader->GetUniformLocation("Color");

					Renderer::SetShader(shader);
					for (const auto& light : m_renderQueue->pointLights)
					{
						lightMatrix.SetScale(Vector3f(light.radius * 1.1f)); // Pour corriger les imperfections liées à la sphère
						lightMatrix.SetTranslation(light.position);

						Renderer::SetMatrix(MatrixType_World, lightMatrix);

						shader->SendColor(colorLocation, light.color);

						Renderer::DrawIndexedPrimitives(PrimitiveMode_TriangleList, 0, indexBuffer->GetIndexCount());
					}

					Renderer::Enable(RendererParameter_DepthBuffer, false);
					Renderer::Enable(RendererParameter_DepthWrite, false);
					Renderer::Enable(RendererParameter_FaceCulling, true);
					Renderer::Enable(RendererParameter_StencilTest, true);
					Renderer::SetFaceFilling(FaceFilling_Fill);
				}
			}

			if (!m_renderQueue->spotLights.empty())
			{
				const IndexBuffer* indexBuffer = m_coneMesh->GetIndexBuffer();
				Renderer::SetIndexBuffer(indexBuffer);
				Renderer::SetVertexBuffer(m_coneMesh->GetVertexBuffer());

				m_pointSpotLightShader->SendInteger(m_pointSpotLightUniforms.locations.type, LightType_Spot);
				for (const auto& light : m_renderQueue->spotLights)
				{
					m_pointSpotLightShader->SendColor(m_pointSpotLightUniforms.locations.color, light.color);
					m_pointSpotLightShader->SendVector(m_pointSpotLightUniforms.locations.factors, Vector2f(light.ambientFactor, light.diffuseFactor));
					m_pointSpotLightShader->SendVector(m_pointSpotLightUniforms.locations.parameters1, Vector4f(light.position, light.attenuation));
					m_pointSpotLightShader->SendVector(m_pointSpotLightUniforms.locations.parameters2, Vector4f(light.direction, light.invRadius));
					m_pointSpotLightShader->SendVector(m_pointSpotLightUniforms.locations.parameters3, Vector2f(light.innerAngleCosine, light.outerAngleCosine));

					float baseRadius = light.radius * light.outerAngleTangent * 1.1f;
					lightMatrix.MakeTransform(light.position, Quaternionf::RotationBetween(Vector3f::Forward(), light.direction), Vector3f(baseRadius, baseRadius, light.radius));

					Renderer::SetMatrix(MatrixType_World, lightMatrix);

					// Rendu de la sphère dans le stencil buffer
					Renderer::Enable(RendererParameter_ColorWrite, false);
					Renderer::Enable(RendererParameter_DepthBuffer, true);
					Renderer::Enable(RendererParameter_FaceCulling, false);
					Renderer::SetStencilCompareFunction(RendererComparison_Always);

					m_pointSpotLightShader->SendBoolean(m_pointSpotLightShaderDiscardLocation, true);

					Renderer::DrawIndexedPrimitives(PrimitiveMode_TriangleList, 0, indexBuffer->GetIndexCount());

					// Rendu de la sphère comme zone d'effet
					Renderer::Enable(RendererParameter_ColorWrite, true);
					Renderer::Enable(RendererParameter_DepthBuffer, false);
					Renderer::Enable(RendererParameter_FaceCulling, true);
					Renderer::SetFaceCulling(FaceSide_Front);
					Renderer::SetStencilCompareFunction(RendererComparison_NotEqual, FaceSide_Back);
					Renderer::SetStencilPassOperation(StencilOperation_Zero, FaceSide_Back);

					m_pointSpotLightShader->SendBoolean(m_pointSpotLightShaderDiscardLocation, false);

					Renderer::DrawIndexedPrimitives(PrimitiveMode_TriangleList, 0, indexBuffer->GetIndexCount());
				}

				if (m_lightMeshesDrawing)
				{
					Renderer::Enable(RendererParameter_DepthBuffer, true);
					Renderer::Enable(RendererParameter_DepthWrite, true);
					Renderer::Enable(RendererParameter_FaceCulling, false);
					Renderer::Enable(RendererParameter_StencilTest, false);
					Renderer::SetFaceFilling(FaceFilling_Line);

					const Shader* shader = ShaderLibrary::Get("DebugSimple");
					static int colorLocation = shader->GetUniformLocation("Color");

					Renderer::SetShader(shader);
					for (const auto& light : m_renderQueue->spotLights)
					{
						float baseRadius = light.radius * light.outerAngleTangent * 1.1f;
						lightMatrix.MakeTransform(light.position, Quaternionf::RotationBetween(Vector3f::Forward(), light.direction), Vector3f(baseRadius, baseRadius, light.radius));

						Renderer::SetMatrix(MatrixType_World, lightMatrix);

						shader->SendColor(colorLocation, light.color);

						Renderer::DrawIndexedPrimitives(PrimitiveMode_TriangleList, 0, indexBuffer->GetIndexCount());
					}

					Renderer::Enable(RendererParameter_DepthBuffer, false);
					Renderer::Enable(RendererParameter_DepthWrite, false);
					Renderer::Enable(RendererParameter_FaceCulling, true);
					Renderer::Enable(RendererParameter_StencilTest, true);
					Renderer::SetFaceFilling(FaceFilling_Fill);
				}
			}

			Renderer::Enable(RendererParameter_StencilTest, false);
		}

		return true;
	}
Beispiel #11
0
Recti Recti::operator * (const ivec2& scale) const
{
  return Recti(position * scale, size * scale);
}
Beispiel #12
0
Recti Recti::operator - (const ivec2& offset) const
{
  return Recti(position - offset, size);
}
Beispiel #13
0
    Texture* Texture::ConvertCubeTo2D(Cube2DMode mode)
    {
        Managed<Bitmap> posX = Faces[0].MipLevels[0]->ToBitmap();
        Managed<Bitmap> negX = Faces[1].MipLevels[0]->ToBitmap();
        Managed<Bitmap> posY = Faces[2].MipLevels[0]->ToBitmap();
        Managed<Bitmap> negY = Faces[3].MipLevels[0]->ToBitmap();
        Managed<Bitmap> posZ = Faces[4].MipLevels[0]->ToBitmap();
        Managed<Bitmap> negZ = Faces[5].MipLevels[0]->ToBitmap();

        int size = posX->GetWidth();
        
        int width, height;
        Recti coordsPosX, coordsNegX, coordsPosY, coordsNegY, coordsPosZ, coordsNegZ;

        switch (mode)
        {
        case Cube2DMode1x6:
            width = 1*size;
            height = 6*size;
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(0*size, 1*size, 1*size, 2*size);
            coordsPosY = Recti(0*size, 2*size, 1*size, 3*size);
            coordsNegY = Recti(0*size, 3*size, 1*size, 4*size);
            coordsPosZ = Recti(0*size, 4*size, 1*size, 5*size);
            coordsNegZ = Recti(0*size, 5*size, 1*size, 6*size);
            break;

        case Cube2DMode2x3:
            width = 2*size;
            height = 3*size;
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(1*size, 0*size, 2*size, 1*size);
            coordsPosY = Recti(0*size, 1*size, 1*size, 2*size);
            coordsNegY = Recti(1*size, 1*size, 2*size, 2*size);
            coordsPosZ = Recti(0*size, 2*size, 1*size, 3*size);
            coordsNegZ = Recti(1*size, 2*size, 2*size, 3*size);
            break;

        case Cube2DMode3x2:
            width = 3*size;
            height = 2*size;
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(1*size, 0*size, 2*size, 1*size);
            coordsPosY = Recti(2*size, 0*size, 3*size, 1*size);
            coordsNegY = Recti(0*size, 1*size, 1*size, 2*size);
            coordsPosZ = Recti(1*size, 1*size, 2*size, 2*size);
            coordsNegZ = Recti(2*size, 1*size, 3*size, 2*size);
            break;

        case Cube2DMode6x1:
            width = 6*size;
            height = 1*size;
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(1*size, 0*size, 2*size, 1*size);
            coordsPosY = Recti(2*size, 0*size, 3*size, 1*size);
            coordsNegY = Recti(3*size, 0*size, 4*size, 1*size);
            coordsPosZ = Recti(4*size, 0*size, 5*size, 1*size);
            coordsNegZ = Recti(5*size, 0*size, 6*size, 1*size);
            break;
        }
        
        Managed<Bitmap> bmp = new Bitmap(width, height, posX->GetFormat());

        bmp->Blit(posX, posX->GetRect(), coordsPosX.TopLeft());
        bmp->Blit(negX, negX->GetRect(), coordsNegX.TopLeft());
        bmp->Blit(posY, posY->GetRect(), coordsPosY.TopLeft());
        bmp->Blit(negY, negY->GetRect(), coordsNegY.TopLeft());
        bmp->Blit(posZ, posZ->GetRect(), coordsPosZ.TopLeft());
        bmp->Blit(negZ, negZ->GetRect(), coordsNegZ.TopLeft());

        return Create(bmp);
    }
Beispiel #14
0
    Texture* Texture::Convert2DToCube()
    {
        Managed<Bitmap> bmp = Faces[0].MipLevels[0]->ToBitmap();
        Cube2DMode mode = DetectCubeFrom2DMode(bmp->GetWidth(), bmp->GetHeight());

        int size;
        Recti coordsPosX, coordsNegX, coordsPosY, coordsNegY, coordsPosZ, coordsNegZ;

        switch (mode)
        {
        case Cube2DMode1x6:
            size = bmp->GetWidth();
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(0*size, 1*size, 1*size, 2*size);
            coordsPosY = Recti(0*size, 2*size, 1*size, 3*size);
            coordsNegY = Recti(0*size, 3*size, 1*size, 4*size);
            coordsPosZ = Recti(0*size, 4*size, 1*size, 5*size);
            coordsNegZ = Recti(0*size, 5*size, 1*size, 6*size);
            break;

        case Cube2DMode2x3:
            size = bmp->GetWidth() / 2;
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(1*size, 0*size, 2*size, 1*size);
            coordsPosY = Recti(0*size, 1*size, 1*size, 2*size);
            coordsNegY = Recti(1*size, 1*size, 2*size, 2*size);
            coordsPosZ = Recti(0*size, 2*size, 1*size, 3*size);
            coordsNegZ = Recti(1*size, 2*size, 2*size, 3*size);
            break;

        case Cube2DMode3x2:
            size = bmp->GetHeight() / 2;
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(1*size, 0*size, 2*size, 1*size);
            coordsPosY = Recti(2*size, 0*size, 3*size, 1*size);
            coordsNegY = Recti(0*size, 1*size, 1*size, 2*size);
            coordsPosZ = Recti(1*size, 1*size, 2*size, 2*size);
            coordsNegZ = Recti(2*size, 1*size, 3*size, 2*size);
            break;

        case Cube2DMode6x1:
            size = bmp->GetHeight();
            coordsPosX = Recti(0*size, 0*size, 1*size, 1*size);
            coordsNegX = Recti(1*size, 0*size, 2*size, 1*size);
            coordsPosY = Recti(2*size, 0*size, 3*size, 1*size);
            coordsNegY = Recti(3*size, 0*size, 4*size, 1*size);
            coordsPosZ = Recti(4*size, 0*size, 5*size, 1*size);
            coordsNegZ = Recti(5*size, 0*size, 6*size, 1*size);
            break;
        }

        Managed<Bitmap> posX = new Bitmap(size, size, bmp->GetFormat());
        Managed<Bitmap> negX = new Bitmap(size, size, bmp->GetFormat());
        Managed<Bitmap> posY = new Bitmap(size, size, bmp->GetFormat());
        Managed<Bitmap> negY = new Bitmap(size, size, bmp->GetFormat());
        Managed<Bitmap> posZ = new Bitmap(size, size, bmp->GetFormat());
        Managed<Bitmap> negZ = new Bitmap(size, size, bmp->GetFormat());

        posX->Blit(bmp, coordsPosX, Vector2i(0, 0));
        negX->Blit(bmp, coordsNegX, Vector2i(0, 0));
        posY->Blit(bmp, coordsPosY, Vector2i(0, 0));
        negY->Blit(bmp, coordsNegY, Vector2i(0, 0));
        posZ->Blit(bmp, coordsPosZ, Vector2i(0, 0));
        negZ->Blit(bmp, coordsNegZ, Vector2i(0, 0));

        return CreateCube(posX, negX, posY, negY, posZ, negZ);
    }
Beispiel #15
0
GMO double linkWindowHandle(void* windowHandle) {
	const int eyeRenderMultisample = 1;
	const int backBufferMultisample = 1;

	//HWND handle = GetWindow((HWND)(int)windowHandle, GW_OWNER);
	//HWND handle = (HWND) (int) windowHandle;
	HWND handle = (HWND) windowHandle;

	/*
	 * This function returns the passed windows' title. Just to debug / test
	LPWSTR title;
	GetWindowText(handle, title, GetWindowTextLength(handle) + 1);
	MessageBox(NULL, (LPCWSTR)title, (LPCWSTR)title, MB_ICONWARNING);
	MessageBoxA(NULL, (LPCSTR)title, (LPCSTR)title, MB_ICONWARNING);
	*/
	hWnd = handle;
	ovrHmd_AttachToWindow(HMD, handle, NULL, NULL);

	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

	bool UseAppWindowFrame = true;//(HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
	HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), FullScreen, backBufferMultisample, 1,&pRender, handle);
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	EyeTexture[0].D3D11.Header.API            = ovrRenderAPI_D3D11;
    EyeTexture[0].D3D11.Header.TextureSize    = RenderTargetSize;
    EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0];
    EyeTexture[0].D3D11.pTexture              = pRendertargetTexture->Tex.GetPtr();
    EyeTexture[0].D3D11.pSRView               = pRendertargetTexture->TexSv.GetPtr();

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1] = EyeTexture[0];
    EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1];

    // Configure d3d11.
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API         = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.RTSize      = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample = backBufferMultisample;
    d3d11cfg.D3D11.pDevice            = pRender->Device;
    d3d11cfg.D3D11.pDeviceContext     = pRender->Context;
    d3d11cfg.D3D11.pBackBufferRT      = pRender->BackBufferRT;
    d3d11cfg.D3D11.pSwapChain         = pRender->SwapChain;
	
    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc)) return -2;

	// Some settings might be changed here lateron.
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);// | ovrHmdCap_ExtendDesktop);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation |
                            ovrTrackingCap_MagYawCorrection |
                            ovrTrackingCap_Position, 0);
	return 1;
}
void OculusWorldDemoApp::CalculateHmdValues()
{
    // Initialize eye rendering information for ovrHmd_Configure.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2];
    eyeFov[0] = HmdDesc.DefaultEyeFov[0];
    eyeFov[1] = HmdDesc.DefaultEyeFov[1];

    // Clamp Fov based on our dynamically adjustable FovSideTanMax.
    // Most apps should use the default, but reducing Fov does reduce rendering cost.
    eyeFov[0] = FovPort::Min(eyeFov[0], FovPort(FovSideTanMax));
    eyeFov[1] = FovPort::Min(eyeFov[1], FovPort(FovSideTanMax));


    if (ForceZeroIpd)
    {
        // ForceZeroIpd does three things:
        //  1) Sets FOV to maximum symmetrical FOV based on both eyes
        //  2) Sets eye ViewAdjust values to 0.0 (effective IPD == 0)
        //  3) Uses only the Left texture for rendering.
        
        eyeFov[0] = FovPort::Max(eyeFov[0], eyeFov[1]);
        eyeFov[1] = eyeFov[0];

        Sizei recommenedTexSize = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,
                                                           eyeFov[0], DesiredPixelDensity);

        Sizei textureSize = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTexSize);

        EyeRenderSize[0] = Sizei::Min(textureSize, recommenedTexSize);
        EyeRenderSize[1] = EyeRenderSize[0];

        // Store texture pointers that will be passed for rendering.
        EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
        EyeTexture[0].Header.TextureSize    = textureSize;
        EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
        // Right eye is the same.
        EyeTexture[1] = EyeTexture[0];
    }

    else
    {
        // Configure Stereo settings. Default pixel density is 1.0f.
        Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,  eyeFov[0], DesiredPixelDensity);
        Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, eyeFov[1], DesiredPixelDensity);

        if (RendertargetIsSharedByBothEyes)
        {
            Sizei  rtSize(recommenedTex0Size.w + recommenedTex1Size.w,
                          Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h));

            // Use returned size as the actual RT size may be different due to HW limits.
            rtSize = EnsureRendertargetAtLeastThisBig(Rendertarget_BothEyes, rtSize);

            // Don't draw more then recommended size; this also ensures that resolution reported
            // in the overlay HUD size is updated correctly for FOV/pixel density change.            
            EyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex1Size);

            // Store texture pointers that will be passed for rendering.
            // Same texture is used, but with different viewports.
            EyeTexture[0]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[0].Header.TextureSize    = rtSize;
            EyeTexture[0].Header.RenderViewport = Recti(Vector2i(0), EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[1].Header.TextureSize    = rtSize;
            EyeTexture[1].Header.RenderViewport = Recti(Vector2i((rtSize.w+1)/2, 0), EyeRenderSize[1]);
        }

        else
        {
            Sizei tex0Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTex0Size);
            Sizei tex1Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Right, recommenedTex1Size);

            EyeRenderSize[0] = Sizei::Min(tex0Size, recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(tex1Size, recommenedTex1Size);

            // Store texture pointers and viewports that will be passed for rendering.
            EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
            EyeTexture[0].Header.TextureSize    = tex0Size;
            EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_Right].Tex;
            EyeTexture[1].Header.TextureSize    = tex1Size;
            EyeTexture[1].Header.RenderViewport = Recti(EyeRenderSize[1]);
        }
    }

    // Hmd caps.
    unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync) |
                       ovrHmdCap_LatencyTest;
    if (IsLowPersistence)
        hmdCaps |= ovrHmdCap_LowPersistence;
    if (DynamicPrediction)
        hmdCaps |= ovrHmdCap_DynamicPrediction;

    ovrHmd_SetEnabledCaps(Hmd, hmdCaps);


	ovrRenderAPIConfig config         = pRender->Get_ovrRenderAPIConfig();
    unsigned           distortionCaps = ovrDistortionCap_Chromatic;
    if (TimewarpEnabled)
        distortionCaps |= ovrDistortionCap_TimeWarp;

    if (!ovrHmd_ConfigureRendering( Hmd, &config, distortionCaps,
                                    eyeFov, EyeRenderDesc ))
    {
        // Fail exit? TBD
        return;
    }

    if (ForceZeroIpd)
    {
        // Remove IPD adjust
        EyeRenderDesc[0].ViewAdjust = Vector3f(0);
        EyeRenderDesc[1].ViewAdjust = Vector3f(0);
    }

    // ovrHmdCap_LatencyTest - enables internal latency feedback
    unsigned sensorCaps = ovrSensorCap_Orientation|ovrSensorCap_YawCorrection;
    if (PositionTrackingEnabled)
        sensorCaps |= ovrSensorCap_Position;
      
    if (StartSensorCaps != sensorCaps)
    {
        ovrHmd_StartSensor(Hmd, sensorCaps, 0);
        StartSensorCaps = sensorCaps;
    }    

    // Calculate projections
    Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov,  0.01f, 10000.0f, true);
    Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov,  0.01f, 10000.0f, true);

    float    orthoDistance = 0.8f; // 2D is 0.8 meter from camera
    Vector2f orthoScale0   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[0].PixelsPerTanAngleAtCenter);
    Vector2f orthoScale1   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[1].PixelsPerTanAngleAtCenter);
    
    OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(Projection[0], orthoScale0, orthoDistance,
                                                        EyeRenderDesc[0].ViewAdjust.x);
    OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(Projection[1], orthoScale1, orthoDistance,
                                                        EyeRenderDesc[1].ViewAdjust.x);
}
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    if (!HMD)
    {
        HMD = ovrHmd_Create(0);
        if (!HMD)
        {
            MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK);
            return(1);
        }
        if (HMD->ProductName[0] == '\0')
            MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);
    }

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query OGL texture data.
	EyeTexture[0].OGL.Header.API			= ovrRenderAPI_OpenGL;
	EyeTexture[0].OGL.Header.TextureSize	= RenderTargetSize;
	EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0];
	EyeTexture[0].OGL.TexId					= pRendertargetTexture->TexId;

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1]							= EyeTexture[0];
    EyeTexture[1].OGL.Header.RenderViewport	= EyeRenderViewport[1];

    // Configure OpenGL.
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API					= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.RTSize				= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample			= backBufferMultisample;
	oglcfg.OGL.Window						= window;
	oglcfg.OGL.DC							= GetDC(window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);	

    #else
	//Distortion vertex shader
	const char* vertexShader =
		"#version 110																			\n"
		"uniform vec2 EyeToSourceUVScale;														\n"
		"uniform vec2 EyeToSourceUVOffset;														\n"
		"uniform mat4 EyeRotationStart;															\n"
		"uniform mat4 EyeRotationEnd;															\n"
		"attribute vec2 Position;																\n"
		"attribute vec2 inTWLF_V;																\n"		
		"attribute vec2 inTexCoord0;															\n"
		"attribute vec2 inTexCoord1;															\n"
		"attribute vec2 inTexCoord2;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"vec2 TexCoord0 = vec2(inTexCoord0.x, -inTexCoord0.y);									\n"
		"vec2 TexCoord1 = vec2(inTexCoord1.x, -inTexCoord1.y);									\n"
		"vec2 TexCoord2 = vec2(inTexCoord2.x, -inTexCoord2.y);									\n"
		"float timewarpLerpFactor = inTWLF_V.x;													\n"
		"float Vignette = inTWLF_V.y;															\n"
		"vec2 TimewarpTexCoord( in vec2 TexCoord, in mat4 rotMat )								\n"
		"{																						\n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"   vec3 transformed = vec3( ( rotMat * vec4( TexCoord.xy , 1.00000, 1.00000) ).xyz );	\n"
		// Project them back onto the Z=1 plane of the rendered images.
		"   vec2 flattened = (transformed.xy  / transformed.z );								\n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"   return ((EyeToSourceUVScale * flattened) + EyeToSourceUVOffset);					\n"
		"}																						\n"
		"mat4 mat4_lerp( in mat4 x, in mat4 y, in mat4 s )										\n"
		"{																						\n"
		"	return mat4(mix(x[0],y[0],s[0]), mix(x[1],y[1],s[1]), mix(x[2],y[2],s[2]), mix(x[3],y[3],s[3]));\n"
		"}																						\n"
		"void main()																			\n"
		"{																						\n"
		"   mat4 lerpedEyeRot = mat4_lerp( EyeRotationStart, EyeRotationEnd, mat4( timewarpLerpFactor));\n"
		"   oTexCoord0 = TimewarpTexCoord( TexCoord0, lerpedEyeRot);							\n"
		"   oTexCoord1 = TimewarpTexCoord( TexCoord1, lerpedEyeRot);							\n"
		"   oTexCoord2 = TimewarpTexCoord( TexCoord2, lerpedEyeRot);							\n"
		"   oPosition = vec4( Position.xy , 0.500000, 1.00000);									\n"
		"   oVignette = Vignette;																\n"
		"   gl_Position = oPosition;															\n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"#version 110																			\n"
		"uniform sampler2D Texture0;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"void main()																			\n"
		"{																						\n"
		// 3 samples for fixing chromatic aberrations
		"   float R = texture2D(Texture0, oTexCoord0.xy).r;										\n"
		"   float G = texture2D(Texture0, oTexCoord1.xy).g;										\n"
		"   float B = texture2D(Texture0, oTexCoord2.xy).b;										\n"
		"   gl_FragColor = (oVignette*vec4(R,G,B,1));											\n"
		"}";

	pRender->InitShaders(vertexShader, pixelShader, &Shaders);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD,   ovrTrackingCap_Orientation |
                                    ovrTrackingCap_MagYawCorrection |
                                    ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
Beispiel #18
0
Recti GuiSprite::getBounds() const
{
	return Recti(position, position + textureBounds.max - textureBounds.min);
}
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
    static ovrPosef eyeRenderPose[2]; 

	// Start timing
    #if SDK_RENDER
	ovrHmd_BeginFrame(HMD, 0); 
    #else
	ovrHmd_BeginFrameTiming(HMD, 0); 
    // Retrieve data useful for handling the Health and Safety Warning - unused, but here for reference
    ovrHSWDisplayState hswDisplayState;
    ovrHmd_GetHSWDisplayState(HMD, &hswDisplayState);
    #endif

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float    BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
//	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = Util_RespondToControls(BodyYaw, HeadPos, eyeRenderPose[1].Orientation);

     pRender->BeginScene();
    
	// Render the two undistorted eye views into their render buffers.
    if (!freezeEyeRender) // freeze to debug for time warp
    {
        pRender->SetRenderTarget ( pRendertargetTexture );
        pRender->SetViewport (Recti(0,0, pRendertargetTexture->GetWidth(),
                                         pRendertargetTexture->GetHeight() ));  
        pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++)
		{
            ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
            eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

            // Get view and projection matrices
			Matrix4f rollPitchYaw       = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0,1,0));
			Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0,0,-1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
            Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
    }
    pRender->FinishScene();

    #if SDK_RENDER	// Let OVR do distortion rendering, Present and flush/sync
	ovrHmd_EndFrame(HMD, eyeRenderPose, &EyeTexture[0].Texture);
    #else
	// Clear screen
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	// Setup shader
	ShaderFill distortionShaderFill(Shaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);
	distortionShaderFill.SetInputLayout(VertexIL);

	for(int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// Get and set shader constants
		Shaders->SetUniform2f("EyeToSourceUVScale",   UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		Shaders->SetUniform2f("EyeToSourceUVOffset",  UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
 		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		Shaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		Shaders->SetUniform4x4f("EyeRotationEnd",   timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	pRender->SetDefaultRenderTarget();

	pRender->Present( true ); // Vsync enabled

    // Only flush GPU for ExtendDesktop; not needed in Direct App Renering with Oculus driver.
    if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
		pRender->WaitUntilGpuIdle();  
	ovrHmd_EndFrameTiming(HMD);
    #endif
}
Beispiel #20
0
 virtual void SetUp() {
   fullRect = Recti(8, 6);
 }
int Init()
{
	ovr_Initialize();
	HMD = ovrHmd_Create(0);
	if (!HMD)
	{
		MessageBox(NULL, "Oculus Rift not detected.", "", MB_OK);
		return 1;
	}
	if (HMD->ProductName[0] == '\0')
	{
		MessageBox(NULL, "Rift detected, display not enabled.", "", MB_OK);
	}

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
	bool UseAppWindowFrame = true;
	HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution), 
		FullScreen, backBufferMultisample, UseAppWindowFrame, &pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMD->DefaultEyeFov[0], 1.0f);
	Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
	RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h);

	RenderTargetSize.w = HMD->Resolution.w;
	RenderTargetSize.h = HMD->Resolution.h;

	//const int eyeRenderMultisample = 1;
	pRendertargetTexture = pRender->CreateRenderTarget(RenderTargetSize.w/2, RenderTargetSize.h/2);
	//pRendertargetTexture = pRender->CreateRenderTarget(512, 512);
	RenderTargetSize.w = pRendertargetTexture->Width;
	RenderTargetSize.h = pRendertargetTexture->Height;

	IDirect3DSurface9 *zb = 0;
	pRender->Device->GetDepthStencilSurface(&zb);
	D3DSURFACE_DESC d;
	zb->GetDesc(&d);

	// Initialize eye rendering information.
	// The viewport sizes are re-computed in case RenderTargetSize due to HW limitations.
	ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] };

	EyeRenderViewport[0].Pos  = Vector2i(0, 0);
	EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
	EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
	EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	// ---------------------

	DistortionShaders = pRender->CreateShaderSet();
	DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_Distortion));
	DistortionShaders->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_Distortion));
	DistortionDecl = VertexDecl::GetDecl(VertexType_Distortion);

	for (int eyeNum = 0; eyeNum < 2; ++eyeNum)
	{
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum],
			ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
		MeshVBs[eyeNum] = pRender->CreateVertexBuffer();
		MeshVBs[eyeNum]->Data(meshData.pVertexData, sizeof(ovrDistortionVertex)*meshData.VertexCount);
		MeshIBs[eyeNum] = pRender->CreateIndexBuffer();
		MeshIBs[eyeNum]->Data(meshData.pIndexData, sizeof(unsigned short)*meshData.IndexCount);

		MeshVBCnts[eyeNum] = meshData.VertexCount;
		MeshIBCnts[eyeNum] = meshData.IndexCount;
		ovrHmd_DestroyDistortionMesh(&meshData);

		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType)eyeNum, eyeFov[eyeNum]);

		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum], RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	ovrHmd_ConfigureTracking(HMD,
		ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0);

	// ---------------------

	pRoomScene = new Scene;
	PopulateRoomScene(pRoomScene, pRender);

	// texture model
	ShaderSet* ss = pRender->CreateShaderSet();
	ss->SetShader(pRender->LoadBuiltinShader(Shader_Vertex, VShader_MVP_UV));
	ss->SetShader(pRender->LoadBuiltinShader(Shader_Pixel, PShader_UV));

	Model<VertexXYZUV> *pModel2 = new Model<VertexXYZUV>();
	pModel2->Decl = VertexDecl::GetDecl(VertexType_XYZUV);
	pModel2->Fill = new ShaderFill(ss);

	//Texture* ttt = new Texture(pRender);
	//ttt->LoadFromFile("face.tga");
	pModel2->Fill->SetTexture(0, pRendertargetTexture);

	pModel2->AddVertex(VertexXYZUV(0.5f, -1.0f, 0.0f, 0.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(2.5f, -1.0f, 0.0f, 1.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(0.5f, 1.0f, 0.0f, 0.0f, 1.0f));
	pModel2->AddVertex(VertexXYZUV(2.5f, 1.0f, 0.0f, 1.0f, 1.0f));

	pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, -1.0f, 0.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, -1.0f, 1.0f, 0.0f));
	pModel2->AddVertex(VertexXYZUV(-1.0f, -1.5f, 1.0f, 0.0f, 1.0f));
	pModel2->AddVertex(VertexXYZUV(1.0f, -1.5f, 1.0f, 1.0f, 1.0f));

	pModel2->AddTriangle(0, 1, 2);
	pModel2->AddTriangle(2, 1, 3);
	pModel2->AddTriangle(4, 5, 6);
	pModel2->AddTriangle(6, 5, 7);

	pScene = new Scene;
	pScene->World.Add(pModel2);

    return (0);
}
//-------------------------------------------------------------------------------------
void ProcessAndRender()
{
#if 0
	//HRESULT hr = pRender->Device->SetRenderState(D3DRS_ZENABLE, D3DZB_TRUE);
	//OVR_ASSERT(SUCCEEDED(hr));

	pRender->Clear();
	pRender->BeginScene();

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	//Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 2, 800.0f / 600.0f, 1.0f, 10000.0f);

	ovrFovPort fov = { 1, 1, 1, 1 };
	Matrix4f proj = ovrMatrix4f_Projection(fov, 1.0f, 10000.0f, false);

	pRender->SetProjection(proj);
	pRoomScene->Render(pRender, view);

	pRender->EndScene();
	pRender->Present();
#endif

	static ovrPosef eyeRenderPose[2];
	ovrHmd_BeginFrameTiming(HMD, 0);

	// Adjust eye position and rotation from controls, maintaining y position from HMD.
	static float BodyYaw(3.141592f);
	static Vector3f HeadPos(0.0f, 1.6f, -5.0f);
	HeadPos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, HeadPos.y);
	bool freezeEyeRender = false;

	pRender->BeginScene();

	if (!freezeEyeRender)
	{
		pRender->SetRenderTarget(pRendertargetTexture);
		pRender->SetViewport(Recti(0, 0, pRendertargetTexture->Width, pRendertargetTexture->Height));
		pRender->Clear();
		for (int eyeIndex = 0; eyeIndex < ovrEye_Count; ++eyeIndex)
		{
			ovrEyeType eye = HMD->EyeRenderOrder[eyeIndex];
			eyeRenderPose[eye] = ovrHmd_GetEyePose(HMD, eye);

			// Get view and projection matrices
			Matrix4f rollPitchYaw = Matrix4f::RotationY(BodyYaw);
			Matrix4f finalRollPitchYaw = rollPitchYaw * Matrix4f(eyeRenderPose[eye].Orientation);
			Vector3f finalUp = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
			Vector3f finalForward = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
			Vector3f shiftedEyePos      = HeadPos + rollPitchYaw.Transform(eyeRenderPose[eye].Position);
			//Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp); 
			//Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, true);
			Matrix4f view = Matrix4f::LookAtLH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
			Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.01f, 10000.0f, false);

			pRender->SetViewport(Recti(EyeRenderViewport[eye]));
			pRender->SetProjection(proj);
			pRender->SetDepthMode(true, true);
			pRoomScene->Render(pRender, Matrix4f::Translation(EyeRenderDesc[eye].ViewAdjust) * view);
		}
	}

	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	ShaderFill distortionShaderFill(DistortionShaders);
	distortionShaderFill.SetTexture(0, pRendertargetTexture);

	for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++)
	{
		// Get and set shader constants
		DistortionShaders->SetUniform2f("EyeToSourceUVScale", UVScaleOffset[eyeNum][0].x, UVScaleOffset[eyeNum][0].y);
		DistortionShaders->SetUniform2f("EyeToSourceUVOffset", UVScaleOffset[eyeNum][1].x, UVScaleOffset[eyeNum][1].y);
		ovrMatrix4f timeWarpMatrices[2];
		ovrHmd_GetEyeTimewarpMatrices(HMD, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
		DistortionShaders->SetUniform4x4f("EyeRotationStart", timeWarpMatrices[0]);  //Nb transposed when set
		DistortionShaders->SetUniform4x4f("EyeRotationEnd", timeWarpMatrices[1]);  //Nb transposed when set
		// Perform distortion
		pRender->Render(&distortionShaderFill, DistortionDecl, MeshVBs[eyeNum], MeshIBs[eyeNum],
			sizeof(ovrDistortionVertex), Matrix4f(), MeshVBCnts[eyeNum], MeshIBCnts[eyeNum], Prim_Triangles);
		//Render(fill, vertices, indices, stride, Matrix4f(), 0,(int)vertices->GetSize(), Prim_Triangles, false);
		//(&distortionShaderFill, MeshVBs[eyeNum], MeshIBs[eyeNum],sizeof(ovrDistortionVertex));
	}

	/*
	pRender->SetDefaultRenderTarget();
	pRender->SetFullViewport();
	pRender->Clear(0.0f, 0.0f, 0.0f, 0.0f);

	Vector3f eye(0.0f, 0.0f, -5.0f);
	Vector3f lookat(0.0f, 0.0f, 0.0f);
	Vector3f up(0.0f, 1.0f, 0.0f);
	Matrix4f view = Matrix4f::LookAtLH(eye, lookat, up);
	Matrix4f proj = Matrix4f::PerspectiveLH(3.14145f / 4, 800.0f / 600.0f, 1.0f, 10000.0f);

	pRender->Proj = proj;
	pScene->Render(pRender, view);
	*/
	//pRender->SetDefaultRenderTarget();

	pRender->EndScene();
	pRender->Present();

	//if (HMD->HmdCaps & ovrHmdCap_ExtendDesktop)
	//	pRender->WaitUntilG
	ovrHmd_EndFrameTiming(HMD);
}
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
	HMD = ovrHmd_Create(0);
    if (!HMD)
    {
        MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK);
        return(1);
    }
	if (HMD->ProductName[0] == '\0') 
        MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK);

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query D3D texture data.
    EyeTexture[0].D3D11.Header.API            = ovrRenderAPI_D3D11;
    EyeTexture[0].D3D11.Header.TextureSize    = RenderTargetSize;
    EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0];
    EyeTexture[0].D3D11.pTexture              = pRendertargetTexture->Tex.GetPtr();
    EyeTexture[0].D3D11.pSRView               = pRendertargetTexture->TexSv.GetPtr();

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1] = EyeTexture[0];
    EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1];

    // Configure d3d11.
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API         = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.RTSize      = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample = backBufferMultisample;
    d3d11cfg.D3D11.pDevice            = pRender->Device;
    d3d11cfg.D3D11.pDeviceContext     = pRender->Context;
    d3d11cfg.D3D11.pBackBufferRT      = pRender->BackBufferRT;
    d3d11cfg.D3D11.pSwapChain         = pRender->SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);
    #else
	//Shader vertex format
	D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = {
		{"Position", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 0,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 1, DXGI_FORMAT_R32_FLOAT,      0, 8,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 2, DXGI_FORMAT_R32_FLOAT,      0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT,   0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT,   0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}};
	
	//Distortion vertex shader
	const char* vertexShader = 
		"float2 EyeToSourceUVScale, EyeToSourceUVOffset;                                        \n"
		"float4x4 EyeRotationStart, EyeRotationEnd;                                             \n"
		"float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat)                              \n"
		"{                                                                                      \n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"    float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz);       \n"
		// Project them back onto the Z=1 plane of the rendered images.
		"    float2 flattened = (transformed.xy / transformed.z);                               \n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"    return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);                      \n"
		"}                                                                                      \n"
		"void main(in float2  Position   : POSITION,  in float timewarpLerpFactor : POSITION1,  \n"
		"          in float   Vignette   : POSITION2, in float2 TexCoord0         : TEXCOORD0,  \n"
		"          in float2  TexCoord1  : TEXCOORD1, in float2 TexCoord2         : TEXCOORD2,  \n"
		"          out float4 oPosition  : SV_Position,                                         \n"
		"          out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1,        \n"
		"          out float2 oTexCoord2 : TEXCOORD2, out float  oVignette  : TEXCOORD3)        \n"
		"{                                                                                      \n"
		"    float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n"
		"    oTexCoord0  = TimewarpTexCoord(TexCoord0,lerpedEyeRot);                            \n"
		"    oTexCoord1  = TimewarpTexCoord(TexCoord1,lerpedEyeRot);                            \n"
		"    oTexCoord2  = TimewarpTexCoord(TexCoord2,lerpedEyeRot);                            \n"
		"    oPosition = float4(Position.xy, 0.5, 1.0);    oVignette = Vignette;                \n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"Texture2D Texture   : register(t0);                                                    \n"
		"SamplerState Linear : register(s0);                                                    \n"
		"float4 main(in float4 oPosition  : SV_Position,  in float2 oTexCoord0 : TEXCOORD0,     \n"
		"            in float2 oTexCoord1 : TEXCOORD1,    in float2 oTexCoord2 : TEXCOORD2,     \n"
		"            in float  oVignette  : TEXCOORD3)    : SV_Target                           \n"
		"{                                                                                      \n"
		// 3 samples for fixing chromatic aberrations
		"    float R = Texture.Sample(Linear, oTexCoord0.xy).r;                                 \n"
		"    float G = Texture.Sample(Linear, oTexCoord1.xy).g;                                 \n"
		"    float B = Texture.Sample(Linear, oTexCoord2.xy).b;                                 \n"
		"    return (oVignette*float4(R,G,B,1));                                                \n"
		"}";
	pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation |
                            ovrTrackingCap_MagYawCorrection |
                            ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	sbuilder.PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
//-------------------------------------------------------------------------------------
int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int)
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    HMD = ovrHmd_Create(0);

    if (!HMD)                       { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(0); }
    if (HMD->ProductName[0] == '\0')  MessageBoxA(NULL,"Rift detected, display not enabled.", "", MB_OK);

    // Setup Window and Graphics - use window frame if relying on Oculus driver
    bool windowed = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;    
    if (!WND.InitWindowAndDevice(hinst, Recti(HMD->WindowsPos, HMD->Resolution), windowed))
        return(0);

    WND.SetMaxFrameLatency(1);
    ovrHmd_AttachToWindow(HMD, WND.Window, NULL, NULL);
    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

    // Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
                                  ovrTrackingCap_Position, 0);

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    for (int eye=0; eye<2; eye++)
    {
        Sizei idealSize             = ovrHmd_GetFovTextureSize(HMD, (ovrEyeType)eye,
                                                               HMD->DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye]      = new ImageBuffer(true, false, idealSize);
        pEyeDepthBuffer[eye]        = new ImageBuffer(true, true, pEyeRenderTexture[eye]->Size);
        EyeRenderViewport[eye].Pos  = Vector2i(0, 0);
        EyeRenderViewport[eye].Size = pEyeRenderTexture[eye]->Size;
    }

    // Setup VR components
#if SDK_RENDER
	#if RENDER_OPENGL
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API				= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.BackBufferSize	= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample		= 1;
	oglcfg.OGL.Window					= OGL.Window;
	oglcfg.OGL.DC						= GetDC(OGL.Window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   HMD->DefaultEyeFov, EyeRenderDesc))	
		return(1);
	#else
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API            = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample    = 1;
    d3d11cfg.D3D11.pDevice               = WND.Device;
    d3d11cfg.D3D11.pDeviceContext        = WND.Context;
    d3d11cfg.D3D11.pBackBufferRT         = WND.BackBufferRT;
    d3d11cfg.D3D11.pSwapChain            = WND.SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
                                   ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
                                   HMD->DefaultEyeFov, EyeRenderDesc))
        return(1);
	#endif
#else
    APP_RENDER_SetupGeometryAndShaders();
#endif

    // Create the room model
    Scene roomScene(false); // Can simplify scene further with parameter if required.

    // Initialize Webcams and threads
	WebCamManager WebCamMngr(HMD);

    // MAIN LOOP
    // =========
    while (!(WND.Key['Q'] && WND.Key[VK_CONTROL]) && !WND.Key[VK_ESCAPE])
    {
        WND.HandleMessages();
        
        float       speed                    = 1.0f; // Can adjust the movement speed. 
        int         timesToRenderScene       = 1;    // Can adjust the render burden on the app.
		ovrVector3f useHmdToEyeViewOffset[2] = {EyeRenderDesc[0].HmdToEyeViewOffset,
			                                    EyeRenderDesc[1].HmdToEyeViewOffset};
        // Start timing
    #if SDK_RENDER
        ovrHmd_BeginFrame(HMD, 0);
    #else
        ovrHmd_BeginFrameTiming(HMD, 0);
    #endif

        // Handle key toggles for re-centering, meshes, FOV, etc.
        ExampleFeatures1(&speed, &timesToRenderScene, useHmdToEyeViewOffset);

        // Keyboard inputs to adjust player orientation
        if (WND.Key[VK_LEFT])  Yaw += 0.02f;
        if (WND.Key[VK_RIGHT]) Yaw -= 0.02f;

        // Keyboard inputs to adjust player position
        if (WND.Key['W']||WND.Key[VK_UP])   Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,-speed*0.05f));
        if (WND.Key['S']||WND.Key[VK_DOWN]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,+speed*0.05f));
        if (WND.Key['D'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(+speed*0.05f,0,0));
        if (WND.Key['A'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(-speed*0.05f,0,0));
        Pos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, Pos.y);
  
        // Animate the cube
        if (speed)
            roomScene.Models[0]->Pos = Vector3f(9*sin(0.01f*clock),3,9*cos(0.01f*clock));

		// Get both eye poses simultaneously, with IPD offset already included. 
		ovrPosef temp_EyeRenderPose[2];
		ovrHmd_GetEyePoses(HMD, 0, useHmdToEyeViewOffset, temp_EyeRenderPose, NULL);

		// Update textures with WebCams' frames
		WebCamMngr.Update();	

        // Render the two undistorted eye views into their render buffers.  
        for (int eye = 0; eye < 2; eye++)
        {
            ImageBuffer * useBuffer      = pEyeRenderTexture[eye];  
            ovrPosef    * useEyePose     = &EyeRenderPose[eye];
            float       * useYaw         = &YawAtRender[eye];
            bool          clearEyeImage  = true;
            bool          updateEyeImage = true;

            // Handle key toggles for half-frame rendering, buffer resolution, etc.
            ExampleFeatures2(eye, &useBuffer, &useEyePose, &useYaw, &clearEyeImage, &updateEyeImage);

            if (clearEyeImage)
			#if RENDER_OPENGL
				WND.ClearAndSetRenderTarget(useBuffer, Recti(EyeRenderViewport[eye]));
			#else
                WND.ClearAndSetRenderTarget(useBuffer->TexRtv,
                                             pEyeDepthBuffer[eye], Recti(EyeRenderViewport[eye]));	
			#endif

            if (updateEyeImage)
            {
                // Write in values actually used (becomes significant in Example features)
                *useEyePose = temp_EyeRenderPose[eye];
                *useYaw     = Yaw;

                // Get view and projection matrices (note near Z to reduce eye strain)
                Matrix4f rollPitchYaw       = Matrix4f::RotationY(Yaw);
                Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(useEyePose->Orientation);
                Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
                Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
                Vector3f shiftedEyePos      = Pos + rollPitchYaw.Transform(useEyePose->Position);

                Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
                Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.2f, 1000.0f, true); 

				// Keyboard input to switch from "look through" to scene mode
				static bool bOldLookThrough	= false;
				static bool bLookThrough	= true;
				if (WND.Key['X'] && bOldLookThrough != WND.Key['X']) { bLookThrough = !bLookThrough; }
				bOldLookThrough = WND.Key['X'];

				if(!bLookThrough)
				{
					// Render the scene
					for (int t=0; t<timesToRenderScene; t++)
						roomScene.Render(view, proj.Transposed());

					WebCamMngr.DrawBoard(view, proj.Transposed());
				}
				else { WebCamMngr.DrawLookThrough(eye); }
            }
        }

        // Do distortion rendering, Present and flush/sync
    #if SDK_RENDER
		#if RENDER_OPENGL
		ovrGLTexture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].OGL.Header.API				= ovrRenderAPI_OpenGL;
            eyeTexture[eye].OGL.Header.TextureSize		= pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].OGL.Header.RenderViewport	= EyeRenderViewport[eye];
            eyeTexture[eye].OGL.TexId					= pEyeRenderTexture[eye]->TexId;
        }
		#else
        ovrD3D11Texture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].D3D11.Header.API            = ovrRenderAPI_D3D11;
            eyeTexture[eye].D3D11.Header.TextureSize    = pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].D3D11.Header.RenderViewport = EyeRenderViewport[eye];
            eyeTexture[eye].D3D11.pTexture              = pEyeRenderTexture[eye]->Tex;
            eyeTexture[eye].D3D11.pSRView               = pEyeRenderTexture[eye]->TexSv;
        }
		#endif
		ovrHmd_EndFrame(HMD, EyeRenderPose, &eyeTexture[0].Texture);
    #else
        APP_RENDER_DistortAndPresent();
    #endif
    }

	WebCamMngr.StopCapture();

    // Release and close down
    ovrHmd_Destroy(HMD);
    ovr_Shutdown();
	WND.ReleaseWindow(hinst);

    return(0);
}
Beispiel #25
0
void VR::initD3D()
{
#if defined(_OVR_)
	Sizei bufferSize;
	bufferSize.w = buffersize_width;
	bufferSize.h = buffersize_height;


	// xapp->d3d11Device.Get() will not work, we need a real D3D11 device

	//for (int i = 0; i < xapp->FrameCount; i++) {
	//	ID3D12Resource *resource = xapp->renderTargets[i].Get();
	//	D3D12_RESOURCE_DESC rDesc = resource->GetDesc();
	//	D3D11_RESOURCE_FLAGS d3d11Flags = { D3D11_BIND_RENDER_TARGET };
	//	ThrowIfFailed(xapp->d3d11On12Device->CreateWrappedResource(
	//		resource,
	//		&d3d11Flags,
	//		D3D12_RESOURCE_STATE_RENDER_TARGET,
	//		D3D12_RESOURCE_STATE_PRESENT,
	//		IID_PPV_ARGS(&xapp->wrappedBackBuffers[i])
	//		));
	//	//xapp->d3d11On12Device->AcquireWrappedResources(xapp->wrappedBackBuffers[i].GetAddressOf(), 1);
	//}

	ovrTextureSwapChainDesc dsDesc = {};
	dsDesc.Type = ovrTexture_2D;
	dsDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
	dsDesc.ArraySize = 1;
	dsDesc.Width = bufferSize.w;
	dsDesc.Height = bufferSize.h;
	dsDesc.MipLevels = 1;
	dsDesc.SampleCount = 1;
	dsDesc.StaticImage = ovrFalse;
	dsDesc.MiscFlags = ovrTextureMisc_DX_Typeless;//ovrTextureMisc_None;
	dsDesc.BindFlags = ovrTextureBind_DX_RenderTarget;

/*	D3D11_TEXTURE2D_DESC dsDesc;
	dsDesc.Width = bufferSize.w;
	dsDesc.Height = bufferSize.h;
	dsDesc.MipLevels = 1;
	dsDesc.ArraySize = 1;
	dsDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;//DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;
	dsDesc.SampleDesc.Count = 1;
	dsDesc.SampleDesc.Quality = 0;
	dsDesc.Usage = D3D11_USAGE_DEFAULT;
	dsDesc.CPUAccessFlags = 0;
	dsDesc.MiscFlags = D3D11_RESOURCE_MISC_SHARED;
	dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
	*/
	if (ovr_CreateTextureSwapChainDX(session, xapp->commandQueue.Get()/*xapp->reald3d11Device.Get()*/, &dsDesc, &textureSwapChain) == ovrSuccess)
	{
		int count = 0;
		ovr_GetTextureSwapChainLength(session, textureSwapChain, &count);
		texRtv.resize(count);
		texResource.resize(count);
		// Create descriptor heaps.
		UINT rtvDescriptorSize;
		{
			// Describe and create a render target view (RTV) descriptor heap.

			D3D12_DESCRIPTOR_HEAP_DESC rtvHeapDesc = {};
			rtvHeapDesc.NumDescriptors = count;
			rtvHeapDesc.Type = D3D12_DESCRIPTOR_HEAP_TYPE_RTV;
			rtvHeapDesc.Flags = D3D12_DESCRIPTOR_HEAP_FLAG_NONE;
			ThrowIfFailed(xapp->device->CreateDescriptorHeap(&rtvHeapDesc, IID_PPV_ARGS(&rtvVRHeap)));
			rtvVRHeap->SetName(L"rtVRHeap_xapp");

			rtvDescriptorSize = xapp->device->GetDescriptorHandleIncrementSize(D3D12_DESCRIPTOR_HEAP_TYPE_RTV);
		}
		for (int i = 0; i < count; ++i)
		{
			ID3D11Texture2D* tex = nullptr;
			ovr_GetTextureSwapChainBufferDX(session, textureSwapChain, i, IID_PPV_ARGS(&texResource[i]));
			//xapp->reald3d11Device.Get()->CreateRenderTargetView(tex, nullptr, &texRtv[i]);

			D3D12_RENDER_TARGET_VIEW_DESC rtvd = {};
			rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
			rtvd.ViewDimension = D3D12_RTV_DIMENSION_TEXTURE2D;
			CD3DX12_CPU_DESCRIPTOR_HANDLE rtvHandle(rtvVRHeap->GetCPUDescriptorHandleForHeapStart(), i, rtvDescriptorSize);
			texRtv[i] = rtvHandle;
			xapp->device->CreateRenderTargetView(texResource[i], /*nullptr*/&rtvd, texRtv[i]);

			//ComPtr<IDXGIResource> dxgires;
			//tex->QueryInterface<IDXGIResource>(&dxgires);
			////Log("dxgires = " << dxgires.GetAddressOf() << endl);
			//HANDLE shHandle;
			//dxgires->GetSharedHandle(&shHandle);
			////Log("shared handle = " << shHandle << endl);
			//xapp->d3d11Device->OpenSharedResource(shHandle, IID_PPV_ARGS(&xapp->wrappedTextures[i]));
			//tex->Release();
		}
	}
	// Initialize our single full screen Fov layer.
	layer.Header.Type = ovrLayerType_EyeFov;
	layer.Header.Flags = 0;
	layer.ColorTexture[0] = textureSwapChain;
	layer.ColorTexture[1] = nullptr;//textureSwapChain;
	layer.Fov[0] = eyeRenderDesc[0].Fov;
	layer.Fov[1] = eyeRenderDesc[1].Fov;
	layer.Viewport[0] = Recti(0, 0, bufferSize.w / 2, bufferSize.h);
	layer.Viewport[1] = Recti(bufferSize.w / 2, 0, bufferSize.w / 2, bufferSize.h);
	// ld.RenderPose and ld.SensorSampleTime are updated later per frame.
#endif
#if defined(_DEBUG)
	// SetStablePowerState requires Win10 to be in developer mode:
	// start settings app, then search for 'for developers settings', 
	// then enable it under developer features, developer mode
	//xapp->device->SetStablePowerState(true);
#endif
}
Beispiel #26
0
	Recti TextButton::getBounds() const
	{
		return Recti();
	}