Exemplo n.º 1
0
/**
 * ERRORCODE 0 => OK
 * ERRORCODE 1 => No HMD found
 * ERRORCODE 2 => Sensor Start failed
 */
int init_ovr(){

	// Init the OVR library
	ovr_Initialize();

	// Create the software device and connect the physical device
	hmd = ovrHmd_Create(0);
	//if (!hmd) hmd = ovrHmd_CreateDebug(ovrHmd_DK1);
	if (!hmd)
		return 1;

	// Starts the sensor input with check required Capabilities
	 if ( !ovrHmd_ConfigureTracking(hmd, hmd->TrackingCaps, hmd->TrackingCaps) )
		 return 2;

	 if ( mode == MODE_DEBUG ){
		 recommendedTex0Size = OVR::Sizei ( 1280, 720 );
		 recommendedTex1Size = OVR::Sizei ( 1280, 720 );
		 renderTargetSize.w = 1280;
		 renderTargetSize.h = 720;
	 }else{
		//Configuring the Texture size (bigger than screen for barreldistortion)
		recommendedTex0Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left, hmd->DefaultEyeFov[0], 1.0f);
		recommendedTex1Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[1], 1.0f);

		renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w;
		renderTargetSize.h = std::max( recommendedTex0Size.h, recommendedTex1Size.h );
	 }
	
	 return 0;
}
Exemplo n.º 2
0
void OVR_SDL2_app::conf_OVR()
{
    // Configure the renderer. Zeroing the configuration stucture causes all
    // display, window, and device specifications to take on current values
    // as put in place by SDL. This should work cross-platform, but doesn't.
    // A workaround is currently (0.4.3b) required under linux.

    SDL_SysWMinfo info;
    ovrGLConfig   cfg;

    memset(&info, 0, sizeof (SDL_SysWMinfo));
    memset(&cfg,  0, sizeof (ovrGLConfig));

    SDL_VERSION(&info.version);
    SDL_GetWindowWMInfo(window, &info);

    cfg.OGL.Header.API      = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.RTSize.w = hmd->Resolution.w;
    cfg.OGL.Header.RTSize.h = hmd->Resolution.h;

#ifdef __linux__
    cfg.OGL.Disp = info.info.x11.display;
    cfg.OGL.Win  = info.info.x11.window;
#endif

    // Set the configuration and receive eye render descriptors in return.

    ovrHmd_ConfigureRendering(hmd, &cfg.Config, ovrDistortionCap_Chromatic
                                              | ovrDistortionCap_TimeWarp
                                              | ovrDistortionCap_Overdrive,
                                                hmd->DefaultEyeFov, erd);

    offset[0] = erd[0].HmdToEyeViewOffset;
    offset[1] = erd[1].HmdToEyeViewOffset;

    // Determine the buffer size required by each eye of the current HMD.

    ovrSizei size[2];

    size[0] = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left,  hmd->DefaultEyeFov[0], 1);
    size[1] = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[1], 1);

    // Initialize the off-screen render buffers. We're using one buffer per-eye
    // instead of concatenating both eyes into a single buffer.

    for (int i = 0; i < 2; i++)
    {
        if ((buffer[i] = new framebuffer(size[i].w, size[i].h)))
        {
            ovrGLTexture *p = reinterpret_cast<ovrGLTexture*>(tex + i);

            memset(p, 0, sizeof (ovrGLTexture));

            p->OGL.Header.API                 = ovrRenderAPI_OpenGL;
            p->OGL.Header.TextureSize         = size[i];
            p->OGL.Header.RenderViewport.Size = size[i];
            p->OGL.TexId                      = buffer[i]->color;
        }
    }
}
Exemplo n.º 3
0
void OculusInterface::initOculus(float _devicePixelAspect)
{

  m_devicePixelAspect=_devicePixelAspect;
  std::cout<<"setting device aspect "<<m_devicePixelAspect<<"\n";
  m_hmd = ovrHmd_Create(0);
  if (!m_hmd)
  {
    std::cerr<<"Unable to create HMD: "<< ovrHmd_GetLastError(NULL)<<std::endl;
    std::cerr<<"Attempting to run without HMD\n";
    // If we didn't detect an Hmd, create a simulated one for debugging.
    m_hmd = ovrHmd_CreateDebug(ovrHmd_DK1);
    if (!m_hmd)
    {   // Failed Hmd creation.
      exit(EXIT_FAILURE);
    }
  }
  m_windowWidth=m_hmd->Resolution.w;
  m_windowHeight=m_hmd->Resolution.h;

  oculusDebug();
  // Start the sensor which provides the Rift’s pose and motion.
  ovrHmd_ConfigureTracking(m_hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0);
  // let's fill in some info about oculus
  m_eyeres[0] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left, m_hmd->DefaultEyeFov[0], 1.0);
  m_eyeres[1] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1],1.0);

	/* and create a single render target texture to encompass both eyes */
	//m_fbWidth = m_eyeres[0].w + m_eyeres[1].w;
	//m_fbHeight = m_eyeres[0].h > m_eyeres[1].h ? m_eyeres[0].h : m_eyeres[1].h;

	ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left,m_hmd->DefaultEyeFov[0], m_devicePixelAspect);
	ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right,m_hmd->DefaultEyeFov[1], m_devicePixelAspect);

	// Determine dimensions to fit into a single render target.
	ovrSizei renderTargetSize;
	m_fbWidth = recommenedTex0Size.w + recommenedTex1Size.w;
	m_fbHeight = std::max ( recommenedTex0Size.h, recommenedTex1Size.h );


	createRenderTarget();
	createOVRGLConfig();
	createOVRTextureBuffers();
	/* enable low-persistence display and dynamic prediction for lattency compensation */
	ovrHmd_SetEnabledCaps(m_hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	/* configure SDK-rendering and enable chromatic abberation correction, vignetting, and
	 * timewrap, which shifts the image before drawing to counter any lattency between the call
	 * to ovrHmd_GetEyePose and ovrHmd_EndFrame.
	 */
	unsigned int dcaps = ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette | ovrDistortionCap_TimeWarp |
		ovrDistortionCap_Overdrive;
	if(!ovrHmd_ConfigureRendering(m_hmd, &m_glcfg.Config, dcaps, m_hmd->DefaultEyeFov, m_eyeRdesc))
	{
		fprintf(stderr, "failed to configure distortion renderer\n");
	}


}
Exemplo n.º 4
0
int RiftAppSkeleton::ConfigureClientRendering()
{
    if (m_Hmd == NULL)
        return 1;

    ovrSizei l_TextureSizeLeft = ovrHmd_GetFovTextureSize(m_Hmd, ovrEye_Left, m_Hmd->DefaultEyeFov[0], 1.0f);
    ovrSizei l_TextureSizeRight = ovrHmd_GetFovTextureSize(m_Hmd, ovrEye_Right, m_Hmd->DefaultEyeFov[1], 1.0f);
    ovrSizei l_TextureSize;
    l_TextureSize.w = l_TextureSizeLeft.w + l_TextureSizeRight.w;
    l_TextureSize.h = std::max(l_TextureSizeLeft.h, l_TextureSizeRight.h);

    // Renderbuffer init - we can use smaller subsets of it easily
    deallocateFBO(m_renderBuffer);
    allocateFBO(m_renderBuffer, l_TextureSize.w, l_TextureSize.h);


    l_EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL;
    l_EyeTexture[0].OGL.Header.TextureSize.w = l_TextureSize.w;
    l_EyeTexture[0].OGL.Header.TextureSize.h = l_TextureSize.h;
    l_EyeTexture[0].OGL.Header.RenderViewport.Pos.x = 0;
    l_EyeTexture[0].OGL.Header.RenderViewport.Pos.y = 0;
    l_EyeTexture[0].OGL.Header.RenderViewport.Size.w = l_TextureSize.w/2;
    l_EyeTexture[0].OGL.Header.RenderViewport.Size.h = l_TextureSize.h;
    l_EyeTexture[0].OGL.TexId = m_renderBuffer.tex;

    // Right eye the same, except for the x-position in the texture...
    l_EyeTexture[1] = l_EyeTexture[0];
    l_EyeTexture[1].OGL.Header.RenderViewport.Pos.x = (l_TextureSize.w+1) / 2;

    m_RenderViewports[0] = l_EyeTexture[0].OGL.Header.RenderViewport;
    m_RenderViewports[1] = l_EyeTexture[1].OGL.Header.RenderViewport;

    const int distortionCaps =
        ovrDistortionCap_Chromatic |
        ovrDistortionCap_TimeWarp |
        ovrDistortionCap_Vignette;

    // Generate distortion mesh for each eye
    for (int eyeNum = 0; eyeNum < 2; eyeNum++)
    {
        // Allocate & generate distortion mesh vertices.
        ovrHmd_CreateDistortionMesh(
            m_Hmd,
            m_EyeRenderDesc[eyeNum].Eye,
            m_EyeRenderDesc[eyeNum].Fov,
            distortionCaps,
            &m_DistMeshes[eyeNum]);

        ovrHmd_GetRenderScaleAndOffset(
            m_EyeRenderDesc[eyeNum].Fov,
            l_TextureSize,
            m_RenderViewports[eyeNum],
            &m_uvScaleOffsetOut[2*eyeNum]);
    }
    return 0;
}
Exemplo n.º 5
0
void RiftSetup()
{
    ovr_Initialize();

    s_hmd = ovrHmd_Create(0);
    if (!s_hmd)
    {
        s_hmd = ovrHmd_CreateDebug(ovrHmd_DK1);
    }

    ovrHmd_GetDesc(s_hmd, &s_hmdDesc);
    DumpHMDInfo(s_hmdDesc);

    uint32_t supportedSensorCaps = ovrSensorCap_Orientation;
    uint32_t requiredSensorCaps = ovrSensorCap_Orientation;
    ovrBool success = ovrHmd_StartSensor(s_hmd, supportedSensorCaps, requiredSensorCaps);
    if (!success) {
        fprintf(stderr, "ERROR: HMD does not have required capabilities!\n");
        exit(2);
    }

    // Figure out dimensions of render target
    ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(s_hmd, ovrEye_Left, s_hmdDesc.DefaultEyeFov[0], 1.0f);
    ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(s_hmd, ovrEye_Right, s_hmdDesc.DefaultEyeFov[1], 1.0f);
    s_renderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    s_renderTargetSize.h = std::max(recommenedTex0Size.h, recommenedTex1Size.h);

    CreateRenderTarget(s_renderTargetSize.w, s_renderTargetSize.h);

    s_eyeTexture[0].Header.API = ovrRenderAPI_OpenGL;
    s_eyeTexture[0].Header.TextureSize = s_renderTargetSize;
    s_eyeTexture[0].Header.RenderViewport.Pos = {0, 0};
    s_eyeTexture[0].Header.RenderViewport.Size = {s_renderTargetSize.w / 2, s_renderTargetSize.h};
    ((ovrGLTexture*)(&s_eyeTexture[0]))->OGL.TexId = s_fboTex;

    s_eyeTexture[1].Header.API = ovrRenderAPI_OpenGL;
    s_eyeTexture[1].Header.TextureSize = s_renderTargetSize;
    s_eyeTexture[1].Header.RenderViewport.Pos = {s_renderTargetSize.w / 2, 0};
    s_eyeTexture[1].Header.RenderViewport.Size = {s_renderTargetSize.w / 2, s_renderTargetSize.h};
    ((ovrGLTexture*)(&s_eyeTexture[1]))->OGL.TexId = s_fboTex;

    // Configure ovr SDK Rendering
    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(ovrGLConfig));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.RTSize = {s_config->width, s_config->height};
    cfg.OGL.Header.Multisample = 0;
    // TODO: on windows need to set HWND, on Linux need to set other parameters
    if (!ovrHmd_ConfigureRendering(s_hmd, &cfg.Config, s_hmdDesc.DistortionCaps, s_hmdDesc.DefaultEyeFov, s_eyeRenderDesc))
    {
        fprintf(stderr, "ERROR: HMD configure rendering failed!\n");
        exit(3);
    }
}
Exemplo n.º 6
0
VR::VR(Game &game)
{
    // create HMD
    if (!(m_hmd = ovrHmd_Create(0)))
    {
        std::cerr << "couldn't create Oculus HMD, falling back to debug HMD"
            << std::endl;
        if (!(m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2)))
            throw Error("couldn't create debug HMD");
    }
    orient_window(game);

    // enable position, rotation tracking
    ovrHmd_ConfigureTracking(m_hmd, ovrTrackingCap_Orientation
        | ovrTrackingCap_MagYawCorrection
        | ovrTrackingCap_Position, 0);

    // calculate framebuffer resolution and create framebuffer
    ovrSizei eye_res[2];
    eye_res[0] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left,
        m_hmd->DefaultEyeFov[0], 1.0);
    eye_res[1] = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right,
        m_hmd->DefaultEyeFov[1], 1.0);
    m_fb_width = eye_res[0].w + eye_res[1].w;
    m_fb_height = eye_res[0].h > eye_res[1].h ? eye_res[0].h : eye_res[1].h;
    update_fb();

    // fill in ovrGLConfig
    ovrGLConfig glcfg;
    memset(&glcfg, 0, sizeof glcfg);
    glcfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    glcfg.OGL.Header.RTSize = m_hmd->Resolution;
    glcfg.OGL.Header.Multisample = 1;
    glcfg.OGL.Window = GetActiveWindow();
    glcfg.OGL.DC = wglGetCurrentDC();
    if (!(m_hmd->HmdCaps & ovrHmdCap_ExtendDesktop))
        ovrHmd_AttachToWindow(m_hmd, glcfg.OGL.Window, 0, 0);

    // enable HMD, distortion capabilities and enable SDK rendering
    ovrHmd_SetEnabledCaps(m_hmd, ovrHmdCap_LowPersistence
        | ovrHmdCap_DynamicPrediction);
    if (!ovrHmd_ConfigureRendering(m_hmd, &glcfg.Config,
        ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette
        | ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
        m_hmd->DefaultEyeFov, m_eye_rdesc))
        throw Error("failed to configure distortion rendering");

    // disable health/safety warning
    ovrhmd_EnableHSWDisplaySDKRender(m_hmd, 0);
}
  virtual void initGl() {
    CubeScene::initGl();

    ovrRenderAPIConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.Header.API = ovrRenderAPI_OpenGL;
    cfg.Header.RTSize = hmd->Resolution;
    cfg.Header.Multisample = 1;

    int distortionCaps = ovrDistortionCap_Chromatic;
    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg,
      distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);

    for_each_eye([&](ovrEyeType eye){
      PerEyeArg & eyeArg = eyes[eye];
      ovrFovPort fov = hmd->DefaultEyeFov[eye];
      ovrTextureHeader & textureHeader = textures[eye].Header;
      ovrSizei texSize = ovrHmd_GetFovTextureSize(hmd, eye, fov, 1.0f);
      textureHeader.API = ovrRenderAPI_OpenGL;
      textureHeader.TextureSize = texSize;
      textureHeader.RenderViewport.Size = texSize;
      textureHeader.RenderViewport.Pos.x = 0;
      textureHeader.RenderViewport.Pos.y = 0;
      eyeArg.frameBuffer.init(Rift::fromOvr(texSize));
      ((ovrGLTexture&)textures[eye]).OGL.TexId = eyeArg.frameBuffer.color->texture;

      ovrVector3f offset = eyeRenderDescs[eye].ViewAdjust;
      ovrMatrix4f projection = ovrMatrix4f_Projection(fov, 0.01f, 100, true);

      eyeArg.projection = Rift::fromOvr(projection);
      eyeArg.modelviewOffset = glm::translate(glm::mat4(), Rift::fromOvr(offset));
    });
  }
Exemplo n.º 8
0
void Oculus::ConfigureEyeViewportInformation() {
	std::array<int, 2> render_target_size;
	eye_fovs[0] = head_mounted_display->DefaultEyeFov[0];
	eye_fovs[1] = head_mounted_display->DefaultEyeFov[1];
	ovrSizei left_eye_texture_size = ovrHmd_GetFovTextureSize(
		head_mounted_display, ovrEye_Left,
		eye_fovs[0], 1.0f);
	ovrSizei right_eye_texture_size = ovrHmd_GetFovTextureSize(
		head_mounted_display, ovrEye_Right,
		eye_fovs[1], 1.0f);
	render_target_size[0] = left_eye_texture_size.w + right_eye_texture_size.w;
	render_target_size[1] = max(left_eye_texture_size.h, right_eye_texture_size.h);
	render_target_viewport = { { 0, 0 }, { render_target_size[0], render_target_size[1] } };
	eye_viewports[0] = { { 0, 0 }, { render_target_size[0] / 2, render_target_size[1] } };
	eye_viewports[1] = { { (render_target_size[0] + 1) / 2, 0 }, { render_target_size[0] / 2, render_target_size[1] } };
}
  SimpleScene() {
    ipd = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
    eyeHeight = ovrHmd_GetFloat(hmd, OVR_KEY_PLAYER_HEIGHT, OVR_DEFAULT_PLAYER_HEIGHT);
    if (!ovrHmd_ConfigureTracking(hmd,
      ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
      SAY_ERR("Could not attach to sensor device");
    }
    for_each_eye([&](ovrEyeType eye){
      textureIds[eye] = 0;
    });

    memset(eyeTextures, 0, 2 * sizeof(ovrGLTexture));
    float eyeHeight = 1.5f;
    player = glm::inverse(glm::lookAt(
      glm::vec3(0, eyeHeight, 4),
      glm::vec3(0, eyeHeight, 0),
      glm::vec3(0, 1, 0)));

    for_each_eye([&](ovrEyeType eye){
      ovrSizei eyeTextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->MaxEyeFov[eye], 1.0f);

      ovrTextureHeader & eyeTextureHeader = eyeTextures[eye].Header;
      eyeTextureHeader.TextureSize = eyeTextureSize;
      eyeTextureHeader.RenderViewport.Size = eyeTextureSize;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;
    });

    resetCamera();
  }
  void initGl() {
    RiftGlfwApp::initGl();
    for_each_eye([&](ovrEyeType eye){
      EyeArg & eyeArg = eyeArgs[eye];
      ovrFovPort fov = hmdDesc.DefaultEyeFov[eye];
      ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(hmd, eye, fov);

      // Set up the per-eye projection matrix
      eyeArg.projection = Rift::fromOvr(
        ovrMatrix4f_Projection(fov, 0.01, 100000, true));
      eyeArg.viewOffset = glm::translate(glm::mat4(), Rift::fromOvr(renderDesc.ViewAdjust));
      ovrRecti texRect;
      texRect.Size = ovrHmd_GetFovTextureSize(hmd, eye,
        hmdDesc.DefaultEyeFov[eye], 1.0f);
      texRect.Pos.x = texRect.Pos.y = 0;

      eyeArg.frameBuffer.init(Rift::fromOvr(texRect.Size));

      ovrVector2f scaleAndOffset[2];
      ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size,
        texRect, scaleAndOffset);
      eyeArg.scale = Rift::fromOvr(scaleAndOffset[0]);
      eyeArg.offset = Rift::fromOvr(scaleAndOffset[1]);

      ovrHmd_CreateDistortionMesh(hmd, eye, fov, 0, &eyeArg.mesh);

      eyeArg.meshVao = gl::VertexArrayPtr(new gl::VertexArray());
      eyeArg.meshVao->bind();

      eyeArg.meshIbo = gl::IndexBufferPtr(new gl::IndexBuffer());
      eyeArg.meshIbo->bind();
      size_t bufferSize = eyeArg.mesh.IndexCount * sizeof(unsigned short);
      eyeArg.meshIbo->load(bufferSize, eyeArg.mesh.pIndexData);

      eyeArg.meshVbo = gl::VertexBufferPtr(new gl::VertexBuffer());
      eyeArg.meshVbo->bind();
      bufferSize = eyeArg.mesh.VertexCount * sizeof(ovrDistortionVertex);
      eyeArg.meshVbo->load(bufferSize, eyeArg.mesh.pVertexData);

      size_t stride = sizeof(ovrDistortionVertex);
      size_t offset = offsetof(ovrDistortionVertex, Pos);
      glEnableVertexAttribArray(gl::Attribute::Position);
      glVertexAttribPointer(gl::Attribute::Position, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      offset = offsetof(ovrDistortionVertex, TexG);
      glEnableVertexAttribArray(gl::Attribute::TexCoord0);
      glVertexAttribPointer(gl::Attribute::TexCoord0, 2, GL_FLOAT, GL_FALSE,
        stride, (void*)offset);

      gl::VertexArray::unbind();
      gl::Program::clear();
    });

    distortionProgram = GlUtils::getProgram(
      Resource::SHADERS_DISTORTION_VS,
      Resource::SHADERS_DISTORTION_FS
    );
  }
Exemplo n.º 11
0
  void initGl() {
    GlfwApp::initGl();

    ovrFovPort eyeFovPorts[2];
    for_each_eye([&](ovrEyeType eye){
      ovrTextureHeader & eyeTextureHeader = textures[eye].Header;
      eyeFovPorts[eye] = hmd->DefaultEyeFov[eye];
      eyeTextureHeader.TextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->DefaultEyeFov[eye], 1.0f);
      eyeTextureHeader.RenderViewport.Size = eyeTextureHeader.TextureSize;
      eyeTextureHeader.RenderViewport.Pos.x = 0;
      eyeTextureHeader.RenderViewport.Pos.y = 0;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;

      eyeFramebuffers[eye] = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeFramebuffers[eye]->init(ovr::toGlm(eyeTextureHeader.TextureSize));
      ((ovrGLTexture&)textures[eye]).OGL.TexId = oglplus::GetName(eyeFramebuffers[eye]->color);
    });

    ovrGLConfig cfg;
    memset(&cfg, 0, sizeof(ovrGLConfig));
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.Multisample = 1;

    /**
     * In the Direct3D examples in the Oculus SDK, they make the point that the
     * onscreen window size does not need to match the Rift resolution.  However
     * this doesn't currently work in OpenGL, so we have to create the window at
     * the full resolution of the Rift and ensure that we use the same
     * size here when setting the BackBufferSize.
     */
    cfg.OGL.Header.BackBufferSize = ovr::fromGlm(getSize());

    ON_LINUX([&]{
      cfg.OGL.Disp = (Display*)glfw::getNativeDisplay(getWindow());
    });

    int distortionCaps = 0
        | ovrDistortionCap_TimeWarp
        | ovrDistortionCap_Vignette;

    ON_LINUX([&]{
      distortionCaps |= ovrDistortionCap_LinuxDevFullscreen;
    });

    ovrEyeRenderDesc              eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg.Config,
        distortionCaps, eyeFovPorts, eyeRenderDescs);
    if (!configResult) {
      FAIL("Unable to configure SDK based distortion rendering");
    }

    for_each_eye([&](ovrEyeType eye){
      eyeOffsets[eye] = eyeRenderDescs[eye].HmdToEyeViewOffset;
      eyeProjections[eye] = ovr::toGlm(
          ovrMatrix4f_Projection(eyeFovPorts[eye], 0.01f, 1000.0f, true));
    });
  }
Exemplo n.º 12
0
// Active GL context is required for the following
int RiftAppSkeleton::ConfigureSDKRendering()
{
    if (m_Hmd == NULL)
        return 1;
    ovrSizei l_TextureSizeLeft = ovrHmd_GetFovTextureSize(m_Hmd, ovrEye_Left, m_Hmd->DefaultEyeFov[0], 1.0f);
    ovrSizei l_TextureSizeRight = ovrHmd_GetFovTextureSize(m_Hmd, ovrEye_Right, m_Hmd->DefaultEyeFov[1], 1.0f);
    ovrSizei l_TextureSize;
    l_TextureSize.w = l_TextureSizeLeft.w + l_TextureSizeRight.w;
    l_TextureSize.h = (l_TextureSizeLeft.h>l_TextureSizeRight.h ? l_TextureSizeLeft.h : l_TextureSizeRight.h);

    // Oculus Rift eye configurations...
    m_EyeFov[0] = m_Hmd->DefaultEyeFov[0];
    m_EyeFov[1] = m_Hmd->DefaultEyeFov[1];

    m_Cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    m_Cfg.OGL.Header.Multisample = 0;

    const int l_DistortionCaps = ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp;
    ovrHmd_ConfigureRendering(m_Hmd, &m_Cfg.Config, l_DistortionCaps, m_EyeFov, m_EyeRenderDesc);

    // Reset this state before rendering anything else or we get a black screen.
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glUseProgram(0);

    l_EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL;
    l_EyeTexture[0].OGL.Header.TextureSize.w = l_TextureSize.w;
    l_EyeTexture[0].OGL.Header.TextureSize.h = l_TextureSize.h;
    l_EyeTexture[0].OGL.Header.RenderViewport.Pos.x = 0;
    l_EyeTexture[0].OGL.Header.RenderViewport.Pos.y = 0;
    l_EyeTexture[0].OGL.Header.RenderViewport.Size.w = l_TextureSize.w/2;
    l_EyeTexture[0].OGL.Header.RenderViewport.Size.h = l_TextureSize.h;
    l_EyeTexture[0].OGL.TexId = m_renderBuffer.tex;

    // Right eye the same, except for the x-position in the texture...
    l_EyeTexture[1] = l_EyeTexture[0];
    l_EyeTexture[1].OGL.Header.RenderViewport.Pos.x = (l_TextureSize.w+1) / 2;

    return 0;
}
RiftRenderingApp::RiftRenderingApp() {
    Platform::sleepMillis(200);
    if (!ovrHmd_ConfigureTracking(hmd,
      ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
      SAY_ERR("Could not attach to sensor device");
    }

    memset(eyeTextures, 0, 2 * sizeof(ovrGLTexture));

    for_each_eye([&](ovrEyeType eye){
      ovrSizei eyeTextureSize = ovrHmd_GetFovTextureSize(hmd, eye, hmd->MaxEyeFov[eye], 1.0f);
      ovrTextureHeader & eyeTextureHeader = eyeTextures[eye].Header;
      eyeTextureHeader.TextureSize = eyeTextureSize;
      eyeTextureHeader.RenderViewport.Size = eyeTextureSize;
      eyeTextureHeader.API = ovrRenderAPI_OpenGL;
    });
  }
Exemplo n.º 14
0
qboolean VR_Enable()
{
	int i;
	if( ovr_Initialize(NULL) != ovrSuccess ) {
		Con_Printf("Failed to Initialize Oculus SDK");
		return false;
	}

	if( ovrHmd_Create(0, &hmd) != ovrSuccess ) {
		Con_Printf("Failed to get HMD");
		return false;
	}

	if( !InitOpenGLExtensions() ) {
		Con_Printf("Failed to initialize OpenGL extensions");
		return false;
	}

	ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, glwidth, glheight, (ovrTexture**)&mirror_texture);
	glGenFramebuffersEXT(1, &mirror_fbo);
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, mirror_fbo);
	glFramebufferTexture2DEXT(GL_READ_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, mirror_texture->OGL.TexId, 0);
	glFramebufferRenderbufferEXT(GL_READ_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, 0);
	glBindFramebufferEXT(GL_READ_FRAMEBUFFER_EXT, 0);

	for( i = 0; i < 2; i++ ) {
		ovrSizei size = ovrHmd_GetFovTextureSize(hmd, (ovrEyeType)i, hmd->DefaultEyeFov[i], 1);

		eyes[i].index = i;
		eyes[i].fbo = CreateFBO(size.w, size.h);
		eyes[i].render_desc = ovrHmd_GetRenderDesc(hmd, (ovrEyeType)i, hmd->DefaultEyeFov[i]);
		eyes[i].fov_x = (atan(hmd->DefaultEyeFov[i].LeftTan) + atan(hmd->DefaultEyeFov[i].RightTan)) / M_PI_DIV_180;
		eyes[i].fov_y = (atan(hmd->DefaultEyeFov[i].UpTan) + atan(hmd->DefaultEyeFov[i].DownTan)) / M_PI_DIV_180;
	}

	ovrHmd_SetEnabledCaps(hmd, ovrHmdCap_LowPersistence|ovrHmdCap_DynamicPrediction);
	ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation|ovrTrackingCap_MagYawCorrection|ovrTrackingCap_Position, 0);
	
	wglSwapIntervalEXT(0); // Disable V-Sync

	vr_initialized = true;
	return true;
}
  virtual void initGl() {
    RiftGlfwApp::initGl();

    ovrRenderAPIConfig cfg;
    memset(&cfg, 0, sizeof(cfg));
    cfg.Header.API = ovrRenderAPI_OpenGL;
    cfg.Header.BackBufferSize = ovr::fromGlm(getSize());
    cfg.Header.Multisample = 1;

    int distortionCaps = ovrDistortionCap_Vignette;
    ovrEyeRenderDesc eyeRenderDescs[2];
    int configResult = ovrHmd_ConfigureRendering(hmd, &cfg,
        distortionCaps, hmd->DefaultEyeFov, eyeRenderDescs);

    for_each_eye([&](ovrEyeType eye){
      PerEyeArg & eyeArgs = eyes[eye];
      ovrFovPort fov = hmd->DefaultEyeFov[eye];
      ovrSizei texSize = ovrHmd_GetFovTextureSize(hmd, eye, fov, 1.0f);
      eyeArgs.framebuffer = FramebufferWrapperPtr(new FramebufferWrapper());
      eyeArgs.framebuffer->init(ovr::toGlm(texSize));

      ovrTextureHeader & textureHeader = eyeTextures[eye].Header;
      textureHeader.API = ovrRenderAPI_OpenGL;
      textureHeader.TextureSize = texSize;
      textureHeader.RenderViewport.Size = texSize;
      textureHeader.RenderViewport.Pos.x = 0;
      textureHeader.RenderViewport.Pos.y = 0;
      ((ovrGLTextureData&)eyeTextures[eye]).TexId =
        oglplus::GetName(eyeArgs.framebuffer->color);

      eyeArgs.modelviewOffset = glm::translate(glm::mat4(), 
        ovr::toGlm(eyeRenderDescs[eye].HmdToEyeViewOffset));

      ovrMatrix4f projection = ovrMatrix4f_Projection(fov, 0.01f, 100, true);
      eyeArgs.projection = ovr::toGlm(projection);
    });
  }
//-------------------------------------------------------------------------------------
int WINAPI WinMain(HINSTANCE hinst, HINSTANCE, LPSTR, int)
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    HMD = ovrHmd_Create(0);

    if (!HMD)                       { MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK); return(0); }
    if (HMD->ProductName[0] == '\0')  MessageBoxA(NULL,"Rift detected, display not enabled.", "", MB_OK);

    // Setup Window and Graphics - use window frame if relying on Oculus driver
    bool windowed = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;    
    if (!WND.InitWindowAndDevice(hinst, Recti(HMD->WindowsPos, HMD->Resolution), windowed))
        return(0);

    WND.SetMaxFrameLatency(1);
    ovrHmd_AttachToWindow(HMD, WND.Window, NULL, NULL);
    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

    // Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection |
                                  ovrTrackingCap_Position, 0);

    // Make the eye render buffers (caution if actual size < requested due to HW limits). 
    for (int eye=0; eye<2; eye++)
    {
        Sizei idealSize             = ovrHmd_GetFovTextureSize(HMD, (ovrEyeType)eye,
                                                               HMD->DefaultEyeFov[eye], 1.0f);
        pEyeRenderTexture[eye]      = new ImageBuffer(true, false, idealSize);
        pEyeDepthBuffer[eye]        = new ImageBuffer(true, true, pEyeRenderTexture[eye]->Size);
        EyeRenderViewport[eye].Pos  = Vector2i(0, 0);
        EyeRenderViewport[eye].Size = pEyeRenderTexture[eye]->Size;
    }

    // Setup VR components
#if SDK_RENDER
	#if RENDER_OPENGL
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API				= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.BackBufferSize	= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample		= 1;
	oglcfg.OGL.Window					= OGL.Window;
	oglcfg.OGL.DC						= GetDC(OGL.Window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   HMD->DefaultEyeFov, EyeRenderDesc))	
		return(1);
	#else
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API            = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.BackBufferSize = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample    = 1;
    d3d11cfg.D3D11.pDevice               = WND.Device;
    d3d11cfg.D3D11.pDeviceContext        = WND.Context;
    d3d11cfg.D3D11.pBackBufferRT         = WND.BackBufferRT;
    d3d11cfg.D3D11.pSwapChain            = WND.SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
                                   ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
                                   HMD->DefaultEyeFov, EyeRenderDesc))
        return(1);
	#endif
#else
    APP_RENDER_SetupGeometryAndShaders();
#endif

    // Create the room model
    Scene roomScene(false); // Can simplify scene further with parameter if required.

    // Initialize Webcams and threads
	WebCamManager WebCamMngr(HMD);

    // MAIN LOOP
    // =========
    while (!(WND.Key['Q'] && WND.Key[VK_CONTROL]) && !WND.Key[VK_ESCAPE])
    {
        WND.HandleMessages();
        
        float       speed                    = 1.0f; // Can adjust the movement speed. 
        int         timesToRenderScene       = 1;    // Can adjust the render burden on the app.
		ovrVector3f useHmdToEyeViewOffset[2] = {EyeRenderDesc[0].HmdToEyeViewOffset,
			                                    EyeRenderDesc[1].HmdToEyeViewOffset};
        // Start timing
    #if SDK_RENDER
        ovrHmd_BeginFrame(HMD, 0);
    #else
        ovrHmd_BeginFrameTiming(HMD, 0);
    #endif

        // Handle key toggles for re-centering, meshes, FOV, etc.
        ExampleFeatures1(&speed, &timesToRenderScene, useHmdToEyeViewOffset);

        // Keyboard inputs to adjust player orientation
        if (WND.Key[VK_LEFT])  Yaw += 0.02f;
        if (WND.Key[VK_RIGHT]) Yaw -= 0.02f;

        // Keyboard inputs to adjust player position
        if (WND.Key['W']||WND.Key[VK_UP])   Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,-speed*0.05f));
        if (WND.Key['S']||WND.Key[VK_DOWN]) Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(0,0,+speed*0.05f));
        if (WND.Key['D'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(+speed*0.05f,0,0));
        if (WND.Key['A'])                    Pos+=Matrix4f::RotationY(Yaw).Transform(Vector3f(-speed*0.05f,0,0));
        Pos.y = ovrHmd_GetFloat(HMD, OVR_KEY_EYE_HEIGHT, Pos.y);
  
        // Animate the cube
        if (speed)
            roomScene.Models[0]->Pos = Vector3f(9*sin(0.01f*clock),3,9*cos(0.01f*clock));

		// Get both eye poses simultaneously, with IPD offset already included. 
		ovrPosef temp_EyeRenderPose[2];
		ovrHmd_GetEyePoses(HMD, 0, useHmdToEyeViewOffset, temp_EyeRenderPose, NULL);

		// Update textures with WebCams' frames
		WebCamMngr.Update();	

        // Render the two undistorted eye views into their render buffers.  
        for (int eye = 0; eye < 2; eye++)
        {
            ImageBuffer * useBuffer      = pEyeRenderTexture[eye];  
            ovrPosef    * useEyePose     = &EyeRenderPose[eye];
            float       * useYaw         = &YawAtRender[eye];
            bool          clearEyeImage  = true;
            bool          updateEyeImage = true;

            // Handle key toggles for half-frame rendering, buffer resolution, etc.
            ExampleFeatures2(eye, &useBuffer, &useEyePose, &useYaw, &clearEyeImage, &updateEyeImage);

            if (clearEyeImage)
			#if RENDER_OPENGL
				WND.ClearAndSetRenderTarget(useBuffer, Recti(EyeRenderViewport[eye]));
			#else
                WND.ClearAndSetRenderTarget(useBuffer->TexRtv,
                                             pEyeDepthBuffer[eye], Recti(EyeRenderViewport[eye]));	
			#endif

            if (updateEyeImage)
            {
                // Write in values actually used (becomes significant in Example features)
                *useEyePose = temp_EyeRenderPose[eye];
                *useYaw     = Yaw;

                // Get view and projection matrices (note near Z to reduce eye strain)
                Matrix4f rollPitchYaw       = Matrix4f::RotationY(Yaw);
                Matrix4f finalRollPitchYaw  = rollPitchYaw * Matrix4f(useEyePose->Orientation);
                Vector3f finalUp            = finalRollPitchYaw.Transform(Vector3f(0, 1, 0));
                Vector3f finalForward       = finalRollPitchYaw.Transform(Vector3f(0, 0, -1));
                Vector3f shiftedEyePos      = Pos + rollPitchYaw.Transform(useEyePose->Position);

                Matrix4f view = Matrix4f::LookAtRH(shiftedEyePos, shiftedEyePos + finalForward, finalUp);
                Matrix4f proj = ovrMatrix4f_Projection(EyeRenderDesc[eye].Fov, 0.2f, 1000.0f, true); 

				// Keyboard input to switch from "look through" to scene mode
				static bool bOldLookThrough	= false;
				static bool bLookThrough	= true;
				if (WND.Key['X'] && bOldLookThrough != WND.Key['X']) { bLookThrough = !bLookThrough; }
				bOldLookThrough = WND.Key['X'];

				if(!bLookThrough)
				{
					// Render the scene
					for (int t=0; t<timesToRenderScene; t++)
						roomScene.Render(view, proj.Transposed());

					WebCamMngr.DrawBoard(view, proj.Transposed());
				}
				else { WebCamMngr.DrawLookThrough(eye); }
            }
        }

        // Do distortion rendering, Present and flush/sync
    #if SDK_RENDER
		#if RENDER_OPENGL
		ovrGLTexture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].OGL.Header.API				= ovrRenderAPI_OpenGL;
            eyeTexture[eye].OGL.Header.TextureSize		= pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].OGL.Header.RenderViewport	= EyeRenderViewport[eye];
            eyeTexture[eye].OGL.TexId					= pEyeRenderTexture[eye]->TexId;
        }
		#else
        ovrD3D11Texture eyeTexture[2]; // Gather data for eye textures 
        for (int eye = 0; eye<2; eye++)
        {
            eyeTexture[eye].D3D11.Header.API            = ovrRenderAPI_D3D11;
            eyeTexture[eye].D3D11.Header.TextureSize    = pEyeRenderTexture[eye]->Size;
            eyeTexture[eye].D3D11.Header.RenderViewport = EyeRenderViewport[eye];
            eyeTexture[eye].D3D11.pTexture              = pEyeRenderTexture[eye]->Tex;
            eyeTexture[eye].D3D11.pSRView               = pEyeRenderTexture[eye]->TexSv;
        }
		#endif
		ovrHmd_EndFrame(HMD, EyeRenderPose, &eyeTexture[0].Texture);
    #else
        APP_RENDER_DistortAndPresent();
    #endif
    }

	WebCamMngr.StopCapture();

    // Release and close down
    ovrHmd_Destroy(HMD);
    ovr_Shutdown();
	WND.ReleaseWindow(hinst);

    return(0);
}
Exemplo n.º 17
0
	bool OVR::postReset(void* _nwh, ovrRenderAPIConfig* _config, bool _debug)
	{
		if (_debug)
		{
			switch (_config->Header.API)
			{
#if BGFX_CONFIG_RENDERER_DIRECT3D11
			case ovrRenderAPI_D3D11:
				{
					ovrD3D11ConfigData* data = (ovrD3D11ConfigData*)_config;
#	if OVR_VERSION > OVR_VERSION_043
					m_rtSize = data->Header.BackBufferSize;
#	else
					m_rtSize = data->Header.RTSize;
#	endif // OVR_VERSION > OVR_VERSION_043
				}
				break;
#endif // BGFX_CONFIG_RENDERER_DIRECT3D11

#if BGFX_CONFIG_RENDERER_OPENGL
			case ovrRenderAPI_OpenGL:
				{
					ovrGLConfigData* data = (ovrGLConfigData*)_config;
#	if OVR_VERSION > OVR_VERSION_043
					m_rtSize = data->Header.BackBufferSize;
#	else
					m_rtSize = data->Header.RTSize;
#	endif // OVR_VERSION > OVR_VERSION_043
				}
				break;
#endif // BGFX_CONFIG_RENDERER_OPENGL

			case ovrRenderAPI_None:
			default:
				BX_CHECK(false, "You should not be here!");
				break;
			}

			m_debug = true;
			return false;
		}

		if (!m_initialized)
		{
			return false;
		}

		if (!_debug)
		{
			m_hmd = ovrHmd_Create(0);
		}

		if (NULL == m_hmd)
		{
			m_hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
			BX_WARN(NULL != m_hmd, "Unable to initialize OVR.");

			if (NULL == m_hmd)
			{
				return false;
			}
		}

		BX_TRACE("HMD: %s, %s, firmware: %d.%d"
			, m_hmd->ProductName
			, m_hmd->Manufacturer
			, m_hmd->FirmwareMajor
			, m_hmd->FirmwareMinor
			);

		ovrBool result;
		result = ovrHmd_AttachToWindow(m_hmd, _nwh, NULL, NULL);
		if (!result) { goto ovrError; }

		ovrFovPort eyeFov[2] = { m_hmd->DefaultEyeFov[0], m_hmd->DefaultEyeFov[1] };
		result = ovrHmd_ConfigureRendering(m_hmd
			, _config
			, 0
#if OVR_VERSION < OVR_VERSION_050
			| ovrDistortionCap_Chromatic // permanently enabled >= v5.0
#endif
			| ovrDistortionCap_Vignette
			| ovrDistortionCap_TimeWarp
			| ovrDistortionCap_Overdrive
			| ovrDistortionCap_NoRestore
			| ovrDistortionCap_HqDistortion
			, eyeFov
			, m_erd
			);
		if (!result) { goto ovrError; }

		ovrHmd_SetEnabledCaps(m_hmd
			, 0
			| ovrHmdCap_LowPersistence
			| ovrHmdCap_DynamicPrediction
			);

		result = ovrHmd_ConfigureTracking(m_hmd
			, 0
			| ovrTrackingCap_Orientation
			| ovrTrackingCap_MagYawCorrection
			| ovrTrackingCap_Position
			, 0
			);

		if (!result)
		{
ovrError:
			BX_TRACE("Failed to initialize OVR.");
			ovrHmd_Destroy(m_hmd);
			m_hmd = NULL;
			return false;
		}

		ovrSizei sizeL = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Left,  m_hmd->DefaultEyeFov[0], 1.0f);
		ovrSizei sizeR = ovrHmd_GetFovTextureSize(m_hmd, ovrEye_Right, m_hmd->DefaultEyeFov[1], 1.0f);
		m_rtSize.w = sizeL.w + sizeR.w;
		m_rtSize.h = bx::uint32_max(sizeL.h, sizeR.h);

		m_warning = true;

		return true;
	}
COculusVR::COculusVR(bool latency)
{
	m_isReady = true;

	// Initializes LibOVR, and the Rift
    ovr_Initialize();

    Hmd = ovrHmd_Create(0);
    if (!Hmd)
    {
        MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK);
        return;
    }
    if (Hmd->ProductName[0] == '\0')
        MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);

    if (Hmd->HmdCaps & ovrHmdCap_ExtendDesktop)
    {
        WindowSize = Hmd->Resolution;
    }
    else
    {
        // In Direct App-rendered mode, we can use smaller window size,
        // as it can have its own contents and isn't tied to the buffer.
        WindowSize = Sizei(1100, 618);//Sizei(960, 540); avoid rotated output bug.
    }

	ovrHmd_AttachToWindow(Hmd, wzGetWindowHandle(), NULL, NULL);

	// Configure Stereo settings.
	Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left, Hmd->DefaultEyeFov[0], 1.0f);
	Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, Hmd->DefaultEyeFov[1], 1.0f);

    EyeRenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    EyeRenderTargetSize.h = Alg::Max( recommenedTex0Size.h, recommenedTex1Size.h );

	//Create Framebuffer
	wzCreateRenderTarget(&m_screenRender);
	wzCreateRenderBufferDepth(&m_screenBuffer,EyeRenderTargetSize.w,EyeRenderTargetSize.h);
	wzCreateTexture(&m_screenTex,EyeRenderTargetSize.w,EyeRenderTargetSize.h,WZ_FORMATTYPE_RGB,NULL);
	//attach
	wzSetRenderBuffer(&m_screenRender,&m_screenBuffer);
	wzSetRenderTexture(&m_screenRender,&m_screenTex);

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { Hmd->DefaultEyeFov[0], Hmd->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(EyeRenderTargetSize.w / 2, EyeRenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((EyeRenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	//Shader vertex format
	wzVertexElements ve_var[] = {
		{WZVETYPE_FLOAT2,"position"},
		{WZVETYPE_FLOAT1,"timewarpLerpFactor"},
		{WZVETYPE_FLOAT1,"vignette"},
		{WZVETYPE_FLOAT2,"texCoord0"},
		{WZVETYPE_FLOAT2,"texCoord1"},
		{WZVETYPE_FLOAT2,"texCoord2"},
		WZVE_TMT()
	};

	//carete mesh
	for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
	{
		// Allocate mesh vertices, registering with renderer using the OVR vertex format.
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(Hmd, (ovrEyeType) eyeNum, eyeFov[eyeNum],
									ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
		//Create datas
		wzVector2* vertex_pos = new wzVector2[meshData.VertexCount];
		float* vertex_posTimewarp = new float[meshData.VertexCount];
		float* vertex_posVignette = new float[meshData.VertexCount];
		wzVector2* vertex_textanR = new wzVector2[meshData.VertexCount];
		wzVector2* vertex_textanG = new wzVector2[meshData.VertexCount];
		wzVector2* vertex_textanB = new wzVector2[meshData.VertexCount];

		//data copy
		for(unsigned int i = 0; i < meshData.VertexCount; i++) {
			vertex_pos[i].x = meshData.pVertexData[i].ScreenPosNDC.x;
			vertex_pos[i].y = meshData.pVertexData[i].ScreenPosNDC.y;
			vertex_posTimewarp[i] = meshData.pVertexData[i].TimeWarpFactor;
			vertex_posVignette[i] = meshData.pVertexData[i].VignetteFactor;
			vertex_textanR[i].x = meshData.pVertexData[i].TanEyeAnglesR.x;
			vertex_textanR[i].y = meshData.pVertexData[i].TanEyeAnglesR.y;
			vertex_textanG[i].x = meshData.pVertexData[i].TanEyeAnglesG.x;
			vertex_textanG[i].y = meshData.pVertexData[i].TanEyeAnglesG.y;
			vertex_textanB[i].x = meshData.pVertexData[i].TanEyeAnglesB.x;
			vertex_textanB[i].y = meshData.pVertexData[i].TanEyeAnglesB.y;
		}

		void* vertex_pointer[] = {vertex_pos,vertex_posTimewarp,vertex_posVignette,vertex_textanR,vertex_textanG,vertex_textanB};

		if(wzCreateMesh(&MeshBuffer[eyeNum], vertex_pointer, ve_var,
			meshData.pIndexData, meshData.VertexCount, meshData.IndexCount)) {
				MessageBoxA(NULL, "Lens Distort Mesh Error.", "", MB_OK);
				
			delete[] vertex_pos;
			delete[] vertex_posTimewarp;
			delete[] vertex_posVignette;
			delete[] vertex_textanR;
			delete[] vertex_textanG;
			delete[] vertex_textanB;

			return;	//error
		}
		wzChangeDrawMode(&MeshBuffer[eyeNum],WZ_MESH_DF_TRIANGLELIST);

		delete[] vertex_pos;
		delete[] vertex_posTimewarp;
		delete[] vertex_posVignette;
		delete[] vertex_textanR;
		delete[] vertex_textanG;
		delete[] vertex_textanB;

		ovrHmd_DestroyDistortionMesh(&meshData);

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(Hmd, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],EyeRenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

	//Create shader
	if(wzCreateShader(&LensShader, ols_vertexshader,ols_flagshader, ve_var)) {
		MessageBoxA(NULL, "Lens Shader Compile Error.", "", MB_OK);
		return;
	}

    if(latency) ovrHmd_SetEnabledCaps(Hmd, ovrHmdCap_DynamicPrediction);	//ovrHmdCap_LowPersistence
	// Start the sensor which informs of the Rift's pose and motion
	ovrHmd_ConfigureTracking(Hmd, ovrTrackingCap_Orientation |
								ovrTrackingCap_MagYawCorrection, 0);		//not use : ovrTrackingCap_Position

	m_isReady = false;
}
Exemplo n.º 19
0
		void UpdateOVRParams()
		{


			m_data.eyeFov[0] = m_device->DefaultEyeFov[0];
			m_data.eyeFov[1] = m_device->DefaultEyeFov[1];

			float DesiredPixelDensity = 1;
			// Configure Stereo settings. Default pixel density is 1.0f.
			Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(m_device, ovrEye_Left, m_data.eyeFov[0], DesiredPixelDensity);
			Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(m_device, ovrEye_Right, m_data.eyeFov[1], DesiredPixelDensity);

			Sizei  rtSize(recommenedTex0Size.w + recommenedTex1Size.w,
				Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h));

			m_data.texSize.set(rtSize.w, rtSize.h);
			Sizei eyeRenderSize[2];

			// Don't draw more then recommended size; this also ensures that resolution reported
			// in the overlay HUD size is updated correctly for FOV/pixel density change.            
			eyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w / 2, rtSize.h), recommenedTex0Size);
			eyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w / 2, rtSize.h), recommenedTex1Size);
			m_data.eyeRenderSize[0].set(eyeRenderSize[0].w, eyeRenderSize[0].h);
			m_data.eyeRenderSize[1].set(eyeRenderSize[1].w, eyeRenderSize[1].h);

			m_data.hmdResolution.x = m_device->Resolution.w;
			m_data.hmdResolution.y = m_device->Resolution.h;


			bool IsLowPersistence = true;
			bool DynamicPrediction = false;
			bool VsyncEnabled = false;

			// Hmd caps.
			unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync);
			if (IsLowPersistence)
				hmdCaps |= ovrHmdCap_LowPersistence;

			// ovrHmdCap_DynamicPrediction - enables internal latency feedback
			if (DynamicPrediction)
				hmdCaps |= ovrHmdCap_DynamicPrediction;

			// ovrHmdCap_DisplayOff - turns off the display
			//if (DisplaySleep)
			hmdCaps |= ovrHmdCap_DisplayOff;

			//if (!MirrorToWindow)
			hmdCaps |= ovrHmdCap_NoMirrorToWindow;

			ovrHmd_SetEnabledCaps(m_device, hmdCaps);
			

			unsigned sensorCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection;
			if (!IsTrackingConnected())
				printf("Oculus tracking device not detected!\n");
			sensorCaps |= ovrTrackingCap_Position;

			if (StartTrackingCaps != sensorCaps)
			{
				ovrHmd_ConfigureTracking(m_device, sensorCaps, 0);
				StartTrackingCaps = sensorCaps;
			}
		}
Exemplo n.º 20
0
ovrSizei HeadMountedDisplay::fovTextureSize( ovrEyeType eye, ovrFovPort fov, float pixels_per_display_pixel )
{
    ovrSizei ret;
    KVS_OVR_CALL( ret = ovrHmd_GetFovTextureSize( m_handler, eye, fov, pixels_per_display_pixel ) );
    return ret;
}
Exemplo n.º 21
0
CoinRiftWidget::CoinRiftWidget() : QGLWidget()
{
    for (int eye = 0; eye < 2; eye++) {
        reinterpret_cast<ovrGLTextureData*>(&eyeTexture[eye])->TexId = 0;
#ifdef USE_FRAMEBUFFER
        frameBufferID[eye] = 0;
        depthBufferID[eye] = 0;
#endif
    }

    // OVR will do the swapping.
    setAutoBufferSwap(false);

    hmd = ovrHmd_Create(0);
    if (!hmd) {
        qDebug() << "Could not find Rift device.";
        throw;
    }

    if (!ovrHmd_ConfigureTracking (hmd, ovrTrackingCap_Orientation |
                                        ovrTrackingCap_MagYawCorrection |
                                        ovrTrackingCap_Position, 
                                        ovrTrackingCap_Orientation |
                                        ovrTrackingCap_MagYawCorrection |
                                        ovrTrackingCap_Position
                                        )) { // Capabilities we require.
        qDebug() << "Could not start Rift motion sensor.";
        throw;
    }

    resize(hmd->Resolution.w, hmd->Resolution.h);

    // Configure stereo settings.
    ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left,
                                                           hmd->DefaultEyeFov[0], 1.0f);
    ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right,
                                                           hmd->DefaultEyeFov[1], 1.0f);

#ifdef USE_SO_OFFSCREEN_RENDERER
    renderer = new SoOffscreenRenderer(SbViewportRegion(std::max(recommenedTex0Size.w, recommenedTex0Size.w),
                                                        std::max(recommenedTex1Size.h, recommenedTex1Size.h)));
    renderer->setComponents(SoOffscreenRenderer::RGB_TRANSPARENCY);
    BackgroundColor = SbColor(.0f, .0f, .8f);
    renderer->setBackgroundColor(BackgroundColor);
#endif
#ifdef USE_FRAMEBUFFER
    m_sceneManager = new SoSceneManager();
    m_sceneManager->setViewportRegion(SbViewportRegion(std::max(recommenedTex0Size.w, recommenedTex0Size.w),
                                                       std::max(recommenedTex1Size.h, recommenedTex1Size.h)));
    m_sceneManager->setBackgroundColor(SbColor(.0f, .0f, .8f));
#endif
    basePosition = SbVec3f(0.0f, 0.0f, -2.0f);
    
    // light handling 
     SoDirectionalLight *light = new SoDirectionalLight();
    light->direction.setValue(1,-1,-1);

    SoDirectionalLight *light2 = new SoDirectionalLight();
    light2->direction.setValue(-1,-1,-1);
    light2->intensity.setValue(0.6);
    light2->color.setValue(0.8,0.8,1);


    scene = new SoSeparator(0); // Placeholder.
    for (int eye = 0; eye < 2; eye++) {
        rootScene[eye] = new SoSeparator();
        rootScene[eye]->ref();
        camera[eye] = new SoFrustumCamera();
        camera[eye]->position.setValue(basePosition);
        camera[eye]->focalDistance.setValue(5.0f);
        camera[eye]->viewportMapping.setValue(SoCamera::LEAVE_ALONE);
        rootScene[eye]->addChild(camera[eye]);
        rootScene[eye]->addChild(light); 
        rootScene[eye]->addChild(light2);
        rootScene[eye]->addChild(scene);
    }

    // Populate ovrEyeDesc[2].
    eyeRenderDesc[0].Eye = ovrEye_Left;
    eyeRenderDesc[1].Eye = ovrEye_Right;
    eyeRenderDesc[0].Fov = hmd->DefaultEyeFov[0];
    eyeRenderDesc[1].Fov = hmd->DefaultEyeFov[1];
#ifdef USE_SO_OFFSCREEN_RENDERER
    eyeTexture[0].Header.TextureSize.w = renderer->getViewportRegion().getViewportSizePixels().getValue()[0];
    eyeTexture[0].Header.TextureSize.h = renderer->getViewportRegion().getViewportSizePixels().getValue()[1];
    eyeTexture[1].Header.TextureSize = eyeTexture[0].Header.TextureSize;
#endif
#ifdef USE_FRAMEBUFFER
    eyeTexture[0].Header.TextureSize = recommenedTex0Size;
    eyeTexture[1].Header.TextureSize = recommenedTex1Size;
#endif
    eyeTexture[0].Header.RenderViewport.Pos.x = 0;
    eyeTexture[0].Header.RenderViewport.Pos.y = 0;
    eyeTexture[0].Header.RenderViewport.Size = eyeTexture[0].Header.TextureSize;
    eyeTexture[1].Header.RenderViewport.Pos = eyeTexture[0].Header.RenderViewport.Pos;
    eyeTexture[1].Header.RenderViewport.Size = eyeTexture[1].Header.TextureSize;

    const int backBufferMultisample = 0; // TODO This is a guess?
    ovrGLConfig cfg;
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.RTSize = hmd->Resolution;
    cfg.OGL.Header.Multisample = backBufferMultisample;
    cfg.OGL.Window = reinterpret_cast<HWND>(winId());
    makeCurrent();
    //cfg.OGL.WglContext = wglGetCurrentContext(); // http://stackoverflow.com/questions/17532033/qglwidget-get-gl-contextes-for-windows
    cfg.OGL.DC = wglGetCurrentDC();
    qDebug() << "Window:" << cfg.OGL.Window;
    //qDebug() << "Context:" << cfg.OGL.WglContext;
    qDebug() << "DC:" << cfg.OGL.DC;

    int DistortionCaps = 0;
    DistortionCaps |= ovrDistortionCap_Chromatic;
// DistortionCaps |= ovrDistortionCap_TimeWarp; // Produces black screen...
    DistortionCaps |= ovrDistortionCap_Vignette;
    DistortionCaps |= ovrDistortionCap_HqDistortion;

    bool VSyncEnabled(false); // TODO This is a guess.
    if (!ovrHmd_ConfigureRendering( hmd, 
                                    &cfg.Config, 
                                    /*(VSyncEnabled ? 0 : ovrHmdCap_NoVSync),*/
                                    DistortionCaps, 
                                    hmd->DefaultEyeFov,//eyes, 
                                    eyeRenderDesc)) {
        qDebug() << "Could not configure OVR rendering.";
        throw;
    }
    static const float nearPlane = 0.01;

    for (int eye = 0; eye < 2; eye++) {
        camera[eye]->aspectRatio.setValue((eyeRenderDesc[eye].Fov.LeftTan + eyeRenderDesc[eye].Fov.RightTan) /
                (eyeRenderDesc[eye].Fov.UpTan + eyeRenderDesc[eye].Fov.DownTan));
        camera[eye]->nearDistance.setValue(nearPlane);
        camera[eye]->farDistance.setValue(10000.0f);
        camera[eye]->left.setValue(-eyeRenderDesc[eye].Fov.LeftTan * nearPlane);
        camera[eye]->right.setValue(eyeRenderDesc[eye].Fov.RightTan * nearPlane);
        camera[eye]->top.setValue(eyeRenderDesc[eye].Fov.UpTan * nearPlane);
        camera[eye]->bottom.setValue(-eyeRenderDesc[eye].Fov.DownTan * nearPlane);
    }
}
Exemplo n.º 22
0
int
setup_rift(struct weston_compositor *compositor)
{
  struct oculus_rift *rift = compositor->rift;

  rift->enabled = 1;

  rift->screen_z = -5.0;
  rift->screen_scale = 1.0;

  weston_compositor_add_key_binding(compositor, KEY_5, MODIFIER_SUPER, 
      toggle_sbs, compositor);
  weston_compositor_add_key_binding(compositor, KEY_6, MODIFIER_SUPER, 
      toggle_rotate, compositor);
  weston_compositor_add_key_binding(compositor, KEY_7, MODIFIER_SUPER, 
      move_in, compositor);
  weston_compositor_add_key_binding(compositor, KEY_8, MODIFIER_SUPER, 
      move_out, compositor);
  weston_compositor_add_key_binding(compositor, KEY_9, MODIFIER_SUPER, 
      scale_up, compositor);
  weston_compositor_add_key_binding(compositor, KEY_0, MODIFIER_SUPER, 
      scale_down, compositor);

  /*// use this at some point in the future to detect and grab the rift display
  struct weston_output *output;
  wl_list_for_each(output, &compositor->output_list, link)
  {
    weston_log("Output (%i): %s\n\t%ix%i\n", output->id, output->name,
        output->width, output->height);
  }*/

  rift->distortion_shader = calloc(1, sizeof *(rift->distortion_shader));
  struct distortion_shader_ *d = rift->distortion_shader;
  d->program = CreateProgram(distortion_vertex_shader, distortion_fragment_shader);
  d->EyeToSourceUVScale = glGetUniformLocation(d->program, "EyeToSourceUVScale");
  d->EyeToSourceUVOffset = glGetUniformLocation(d->program, "EyeToSourceUVOffset");
  d->RightEye = glGetUniformLocation(d->program, "RightEye");
  d->angle = glGetUniformLocation(d->program, "angle");
  d->Position = glGetAttribLocation(d->program, "Position");
  d->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0");
  d->TexCoordR = glGetAttribLocation(d->program, "TexCoordR");
  d->TexCoordG = glGetAttribLocation(d->program, "TexCoordG");
  d->TexCoordB = glGetAttribLocation(d->program, "TexCoordB");
  d->eyeTexture = glGetAttribLocation(d->program, "Texture0");

  rift->eye_shader = calloc(1, sizeof *(rift->eye_shader));
  struct eye_shader_ *e = rift->eye_shader;
  e->program = CreateProgram(eye_vertex_shader, eye_fragment_shader);
  e->Position = glGetAttribLocation(d->program, "Position");
  e->TexCoord0 = glGetAttribLocation(d->program, "TexCoord0");
  e->Projection = glGetUniformLocation(e->program, "Projection");
  e->ModelView = glGetUniformLocation(e->program, "ModelView");
  e->virtualScreenTexture = glGetAttribLocation(d->program, "Texture0");

  rift->scene = calloc(1, sizeof *(rift->scene));
  glGenBuffers(1, &rift->scene->vertexBuffer);
  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->vertexBuffer);
  static const GLfloat rectangle[] = 
    {-1.0f, -1.0f, -0.5f, 
      1.0f, -1.0f, -0.5f, 
     -1.0f, 1.0f, -0.5f,
      1.0f, -1.0f, -0.5f, 
      1.0f, 1.0f, -0.5f,
     -1.0f, 1.0f, -0.5f};
  glBufferData(GL_ARRAY_BUFFER, sizeof(rectangle), rectangle, GL_STATIC_DRAW);

  glGenBuffers(2, &rift->scene->SBSuvsBuffer[0]);
  glGenBuffers(1, &rift->scene->uvsBuffer);
  static const GLfloat uvs[3][12] = 
   {{ 0.0, 0.0,
      0.5, 0.0,
      0.0, 1.0,
      0.5, 0.0,
      0.5, 1.0,
      0.0, 1.0},
   {  0.5, 0.0,
      1.0, 0.0,
      0.5, 1.0,
      1.0, 0.0,
      1.0, 1.0,
      0.5, 1.0},
   {  0.0, 0.0,
      1.0, 0.0,
      0.0, 1.0,
      1.0, 0.0,
      1.0, 1.0,
      0.0, 1.0}};
  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[0]);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[0]), uvs[0], GL_STATIC_DRAW);

  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->SBSuvsBuffer[1]);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[1]), uvs[1], GL_STATIC_DRAW);

  glBindBuffer(GL_ARRAY_BUFFER, rift->scene->uvsBuffer);
  glBufferData(GL_ARRAY_BUFFER, sizeof(uvs[2]), uvs[2], GL_STATIC_DRAW);

  rift->width = 1920;
  rift->height = 1080;

  glGenTextures(1, &rift->fbTexture);
  glBindTexture(GL_TEXTURE_2D, rift->fbTexture);
  glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  glGenFramebuffers(1, &rift->redirectedFramebuffer);
  glBindFramebuffer(GL_FRAMEBUFFER, rift->redirectedFramebuffer);
  glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, rift->fbTexture, 0); show_error();
  if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
  {
    switch(glCheckFramebufferStatus(GL_FRAMEBUFFER))
    {
      case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break;
      case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break;
      case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break;
      case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break;
    }

    weston_log("framebuffer not working\n");
    show_error();
    exit(1);
  }
  glClear(GL_COLOR_BUFFER_BIT);

  /*EGLint pbufferAttributes[] = {
     EGL_WIDTH,           rift->width,
     EGL_HEIGHT,          rift->height,
     EGL_TEXTURE_FORMAT,  EGL_TEXTURE_RGB,
     EGL_TEXTURE_TARGET,  EGL_TEXTURE_2D,
     EGL_NONE
  };

  rift->pbuffer = eglCreatePbufferSurface(
      rift->egl_display, rift->egl_config, 
      pbufferAttributes);

  glGenTextures(1, &(rift->texture));
  glBindTexture(GL_TEXTURE_2D, rift->texture);
  //glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rift->width, rift->height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

  eglMakeCurrent(rift->egl_display, rift->pbuffer, rift->pbuffer, rift->egl_context);
  eglBindTexImage(rift->egl_display, rift->pbuffer, EGL_BACK_BUFFER);
  eglMakeCurrent(rift->egl_display, rift->orig_surface, rift->orig_surface, rift->egl_context);*/

  ovr_Initialize(0);
  rift->hmd = ovrHmd_Create(0);
  if(rift->hmd == NULL)
  {
    rift->hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
  }
  ovrHmd_ConfigureTracking(rift->hmd, ovrTrackingCap_Orientation | 
      ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0);
  ovrHmd_ResetFrameTiming(rift->hmd, 0);

  int eye;
  for(eye = 0; eye < 2; eye++)
  {
    ovrFovPort fov = rift->hmd->DefaultEyeFov[eye];
    ovrEyeRenderDesc renderDesc = ovrHmd_GetRenderDesc(rift->hmd, eye, fov);
    struct EyeArg *eyeArg = &rift->eyeArgs[eye];

    eyeArg->projection = ovrMatrix4f_Projection(fov, 0.1, 100000, true);
    /*int j, k;
    for(k=0; k<4; k++)
    {
      for(j=0; j<4; j++)
      {
        printf("%f\t", eyeArg->projection.M[k][j]);
      }
      printf("\n");
    }*/
    rift->hmdToEyeOffsets[eye] = renderDesc.HmdToEyeViewOffset;
    ovrRecti texRect;
    texRect.Size = ovrHmd_GetFovTextureSize(rift->hmd, eye, rift->hmd->DefaultEyeFov[eye],
        1.0f);
    texRect.Pos.x = texRect.Pos.y = 0;
    eyeArg->textureWidth = texRect.Size.w;
    eyeArg->textureHeight = texRect.Size.h;

    glGenTextures(1, &eyeArg->texture);
    glBindTexture(GL_TEXTURE_2D, eyeArg->texture);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, eyeArg->textureWidth, eyeArg->textureHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    glGenFramebuffers(1, &eyeArg->framebuffer); show_error();
    glBindFramebuffer(GL_FRAMEBUFFER, eyeArg->framebuffer); show_error();
    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, eyeArg->texture, 0); show_error();
    if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    {
      switch(glCheckFramebufferStatus(GL_FRAMEBUFFER))
      {
        case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: weston_log("incomplete attachment\n"); break;
        case GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS: weston_log("incomplete dimensions\n"); break;
        case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: weston_log("incomplete missing attachment\n"); break;
        case GL_FRAMEBUFFER_UNSUPPORTED: weston_log("unsupported\n"); break;
      }

      weston_log("framebuffer not working\n");
      show_error();
      exit(1);
    }
    if(eye)
    {
      glClearColor(1.0, 0.0, 0.0, 1.0); show_error();
    }
    else
    {
      glClearColor(0.0, 1.0, 0.0, 1.0); show_error();
    }
    glClear(GL_COLOR_BUFFER_BIT); show_error();

    /*EGLint eyePbufferAttributes[] = {
       EGL_WIDTH,           texRect.Size.w,
       EGL_HEIGHT,          texRect.Size.h,
       EGL_TEXTURE_FORMAT,  EGL_TEXTURE_RGB,
       EGL_TEXTURE_TARGET,  EGL_TEXTURE_2D,
       EGL_NONE
    };

    eyeArg.surface = eglCreatePbufferSurface(
        rift->egl_display, rift->egl_config, 
        eyePbufferAttributes);*/

    ovrVector2f scaleAndOffset[2];
    ovrHmd_GetRenderScaleAndOffset(fov, texRect.Size, texRect, scaleAndOffset);
    eyeArg->scale = scaleAndOffset[0];
    eyeArg->offset = scaleAndOffset[1];

    ovrHmd_CreateDistortionMesh(rift->hmd, eye, fov, 0, &eyeArg->mesh);

    glGenBuffers(1, &eyeArg->indexBuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eyeArg->indexBuffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, eyeArg->mesh.IndexCount * sizeof(unsigned short), eyeArg->mesh.pIndexData, GL_STATIC_DRAW);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);

    float vertices_buffer[eyeArg->mesh.VertexCount*2];
    float uvs_buffer[3][eyeArg->mesh.VertexCount*2];
    uint i;
    for(i=0; i<eyeArg->mesh.VertexCount; i++)
    {
      ovrDistortionVertex vertex = eyeArg->mesh.pVertexData[i];
      vertices_buffer[i*2] = vertex.ScreenPosNDC.x;
      vertices_buffer[(i*2)+1] = vertex.ScreenPosNDC.y;
      uvs_buffer[0][i*2] = vertex.TanEyeAnglesR.x;
      uvs_buffer[0][(i*2)+1] = vertex.TanEyeAnglesR.y;
      uvs_buffer[1][i*2] = vertex.TanEyeAnglesG.x;
      uvs_buffer[1][(i*2)+1] = vertex.TanEyeAnglesG.y;
      uvs_buffer[2][i*2] = vertex.TanEyeAnglesB.x;
      uvs_buffer[2][(i*2)+1] = vertex.TanEyeAnglesB.y;
    }

    glGenBuffers(1, &eyeArg->vertexBuffer);
    glBindBuffer(GL_ARRAY_BUFFER, eyeArg->vertexBuffer);
    glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, vertices_buffer, GL_STATIC_DRAW);
    glGenBuffers(3, &eyeArg->uvsBuffer[0]);
    for(i=0; i<3; i++)
    {
      glBindBuffer(GL_ARRAY_BUFFER, eyeArg->uvsBuffer[i]);
      glBufferData(GL_ARRAY_BUFFER, eyeArg->mesh.VertexCount * sizeof(GL_FLOAT) * 2, uvs_buffer[i], GL_STATIC_DRAW);
      glBindBuffer(GL_ARRAY_BUFFER, 0);
    }
  }

  return 0;
}
Exemplo n.º 23
0
void Renderer::initOVR()
{
    ovr_Initialize();

    Config& config = Core::get().config();

    if(!config.getBool("Renderer.OVR", false))
    {
        return;
    }

    hmd_ = ovrHmd_Create(0);

    if(!hmd_)
    {
        fprintf(stderr, "Failed to create OVR HMD, falling back to fake one\n");
        hmd_ = ovrHmd_CreateDebug(ovrHmd_DK2);
    }

    ovrSizei leftEyeTexSize = ovrHmd_GetFovTextureSize(hmd_, ovrEye_Left, hmd_->DefaultEyeFov[ovrEye_Left], 1.0f);
    ovrSizei rightEyeTexSize = ovrHmd_GetFovTextureSize(hmd_, ovrEye_Right, hmd_->DefaultEyeFov[ovrEye_Right], 1.0f);

    renderTexSize_.w = leftEyeTexSize.w + rightEyeTexSize.w;
    renderTexSize_.h = max(leftEyeTexSize.h, rightEyeTexSize.h);

    glGenTextures(1, &renderTex_);
    glBindTexture(GL_TEXTURE_2D, renderTex_);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, renderTexSize_.w, renderTexSize_.h, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr);

    glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &renderTexSize_.w);
    glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &renderTexSize_.h);

    glGenTextures(1, &depthTex_);
    glBindTexture(GL_TEXTURE_2D, depthTex_);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT24, renderTexSize_.w, renderTexSize_.h, 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, nullptr);

    eyeViewport_[ovrEye_Left].Pos.x = 0;
    eyeViewport_[ovrEye_Left].Pos.y = 0;
    eyeViewport_[ovrEye_Left].Size.w = renderTexSize_.w / 2;
    eyeViewport_[ovrEye_Left].Size.h = renderTexSize_.h;

    eyeViewport_[ovrEye_Right].Pos.x = renderTexSize_.w / 2;
    eyeViewport_[ovrEye_Right].Pos.y = 0;
    eyeViewport_[ovrEye_Right].Size.w = renderTexSize_.w / 2;
    eyeViewport_[ovrEye_Right].Size.h = renderTexSize_.h;

    eyeTexture_[ovrEye_Left].OGL.Header.API = ovrRenderAPI_OpenGL;
    eyeTexture_[ovrEye_Left].OGL.Header.TextureSize = renderTexSize_;
    eyeTexture_[ovrEye_Left].OGL.Header.RenderViewport = eyeViewport_[ovrEye_Left];
    eyeTexture_[ovrEye_Left].OGL.TexId = renderTex_;

    eyeTexture_[ovrEye_Right].OGL.Header.API = ovrRenderAPI_OpenGL;
    eyeTexture_[ovrEye_Right].OGL.Header.TextureSize = renderTexSize_;
    eyeTexture_[ovrEye_Right].OGL.Header.RenderViewport = eyeViewport_[ovrEye_Right];
    eyeTexture_[ovrEye_Right].OGL.TexId = renderTex_;

    ovrSizei targetSize;
    SDL_GetWindowSize(window_, &targetSize.w, &targetSize.h);

    SDL_SysWMinfo wmInfo;
    SDL_VERSION(&wmInfo.version);

    if(!SDL_GetWindowWMInfo(window_, &wmInfo))
    {
        throw runtime_error("Failed to get window info");
    }

    ovrGLConfig cfg;
    cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
    cfg.OGL.Header.RTSize = targetSize;
    cfg.OGL.Header.Multisample = 1; // yes?
#if defined(OVR_OS_WIN32)
    cfg.OGL.Window = wmInfo.info.win.window;
    cfg.OGL.DC = GetDC(wmInfo.info.win.window);
#elif defined(OVR_OS_MAC)
    // Mac does not have any fields
#else
    #error Implement for this OS.
#endif

    unsigned int distortionCaps = ovrDistortionCap_Chromatic|ovrDistortionCap_TimeWarp|ovrDistortionCap_Overdrive;

    if(!ovrHmd_ConfigureRendering(hmd_, &cfg.Config, distortionCaps, hmd_->DefaultEyeFov, eyeRenderDesc_))
    {
        throw runtime_error("Failed to configure HMD rendering");
    }

#ifdef OVR_OS_WIN32
    if(!ovrHmd_AttachToWindow(hmd_, wmInfo.info.win.window, nullptr, nullptr))
    {
        throw runtime_error("Failed to attach HMD to window");
    }
#endif

    glGenFramebuffers(1, &framebuffer_);
    glBindFramebuffer(GL_FRAMEBUFFER, framebuffer_);
    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, renderTex_, 0);
    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, depthTex_, 0);

    if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    {
        throw runtime_error("Framebuffer not complete");
    }

    glBindFramebuffer(GL_FRAMEBUFFER, 0);

    unsigned int trackingCaps = ovrTrackingCap_Orientation|ovrTrackingCap_Position;

    if(!ovrHmd_ConfigureTracking(hmd_, trackingCaps, 0))
    {
        throw runtime_error("Failed to configure HMD tracking");
    }

    // warning will disappear as soon as the timeout expires
    ovrHmd_DismissHSWDisplay(hmd_);
}
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
    if (!HMD)
    {
        HMD = ovrHmd_Create(0);
        if (!HMD)
        {
            MessageBoxA(NULL, "Oculus Rift not detected.", "", MB_OK);
            return(1);
        }
        if (HMD->ProductName[0] == '\0')
            MessageBoxA(NULL, "Rift detected, display not enabled.", "", MB_OK);
    }

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query OGL texture data.
	EyeTexture[0].OGL.Header.API			= ovrRenderAPI_OpenGL;
	EyeTexture[0].OGL.Header.TextureSize	= RenderTargetSize;
	EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0];
	EyeTexture[0].OGL.TexId					= pRendertargetTexture->TexId;

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1]							= EyeTexture[0];
    EyeTexture[1].OGL.Header.RenderViewport	= EyeRenderViewport[1];

    // Configure OpenGL.
    ovrGLConfig oglcfg;
    oglcfg.OGL.Header.API					= ovrRenderAPI_OpenGL;
    oglcfg.OGL.Header.RTSize				= Sizei(HMD->Resolution.w, HMD->Resolution.h);
    oglcfg.OGL.Header.Multisample			= backBufferMultisample;
	oglcfg.OGL.Window						= window;
	oglcfg.OGL.DC							= GetDC(window);

    if (!ovrHmd_ConfigureRendering(HMD, &oglcfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);	

    #else
	//Distortion vertex shader
	const char* vertexShader =
		"#version 110																			\n"
		"uniform vec2 EyeToSourceUVScale;														\n"
		"uniform vec2 EyeToSourceUVOffset;														\n"
		"uniform mat4 EyeRotationStart;															\n"
		"uniform mat4 EyeRotationEnd;															\n"
		"attribute vec2 Position;																\n"
		"attribute vec2 inTWLF_V;																\n"		
		"attribute vec2 inTexCoord0;															\n"
		"attribute vec2 inTexCoord1;															\n"
		"attribute vec2 inTexCoord2;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"vec2 TexCoord0 = vec2(inTexCoord0.x, -inTexCoord0.y);									\n"
		"vec2 TexCoord1 = vec2(inTexCoord1.x, -inTexCoord1.y);									\n"
		"vec2 TexCoord2 = vec2(inTexCoord2.x, -inTexCoord2.y);									\n"
		"float timewarpLerpFactor = inTWLF_V.x;													\n"
		"float Vignette = inTWLF_V.y;															\n"
		"vec2 TimewarpTexCoord( in vec2 TexCoord, in mat4 rotMat )								\n"
		"{																						\n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"   vec3 transformed = vec3( ( rotMat * vec4( TexCoord.xy , 1.00000, 1.00000) ).xyz );	\n"
		// Project them back onto the Z=1 plane of the rendered images.
		"   vec2 flattened = (transformed.xy  / transformed.z );								\n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"   return ((EyeToSourceUVScale * flattened) + EyeToSourceUVOffset);					\n"
		"}																						\n"
		"mat4 mat4_lerp( in mat4 x, in mat4 y, in mat4 s )										\n"
		"{																						\n"
		"	return mat4(mix(x[0],y[0],s[0]), mix(x[1],y[1],s[1]), mix(x[2],y[2],s[2]), mix(x[3],y[3],s[3]));\n"
		"}																						\n"
		"void main()																			\n"
		"{																						\n"
		"   mat4 lerpedEyeRot = mat4_lerp( EyeRotationStart, EyeRotationEnd, mat4( timewarpLerpFactor));\n"
		"   oTexCoord0 = TimewarpTexCoord( TexCoord0, lerpedEyeRot);							\n"
		"   oTexCoord1 = TimewarpTexCoord( TexCoord1, lerpedEyeRot);							\n"
		"   oTexCoord2 = TimewarpTexCoord( TexCoord2, lerpedEyeRot);							\n"
		"   oPosition = vec4( Position.xy , 0.500000, 1.00000);									\n"
		"   oVignette = Vignette;																\n"
		"   gl_Position = oPosition;															\n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"#version 110																			\n"
		"uniform sampler2D Texture0;															\n"
		"varying vec4 oPosition;																\n"
		"varying vec2 oTexCoord0;																\n"
		"varying vec2 oTexCoord1;																\n"
		"varying vec2 oTexCoord2;																\n"
		"varying float oVignette;																\n"
		"void main()																			\n"
		"{																						\n"
		// 3 samples for fixing chromatic aberrations
		"   float R = texture2D(Texture0, oTexCoord0.xy).r;										\n"
		"   float G = texture2D(Texture0, oTexCoord1.xy).g;										\n"
		"   float B = texture2D(Texture0, oTexCoord2.xy).b;										\n"
		"   gl_FragColor = (oVignette*vec4(R,G,B,1));											\n"
		"}";

	pRender->InitShaders(vertexShader, pixelShader, &Shaders);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD,   ovrTrackingCap_Orientation |
                                    ovrTrackingCap_MagYawCorrection |
                                    ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
Exemplo n.º 25
0
void OVR_FrameStart(int32_t changeBackBuffers)
{
	if (vr_ovr_maxfov->modified)
	{
		int newValue =  vr_ovr_maxfov->value ? 1 : 0;
		if (newValue != (int)vr_ovr_maxfov->value)
			Cvar_SetInteger("vr_ovr_maxfov",newValue);
		changeBackBuffers = 1;
		vr_ovr_maxfov->modified = (qboolean) false;
	}

	if (vr_ovr_supersample->modified)
	{
		if (vr_ovr_supersample->value < 1.0)
			Cvar_Set("vr_ovr_supersample", "1.0");
		else if (vr_ovr_supersample->value > 2.0)
			Cvar_Set("vr_ovr_supersample", "2.0");
		changeBackBuffers = 1;
		vr_ovr_supersample->modified = false;
	}
	if (useChroma != (qboolean) !!vr_chromatic->value)
	{
		useChroma = (qboolean) !!vr_chromatic->value;
	}

	if (vr_ovr_lumoverdrive->modified)
	{
		changeBackBuffers = 1;
		currentFrame = 0;
		vr_ovr_lumoverdrive->modified = false;
	}

	if (changeBackBuffers)
	{
		int i;
		float width, height;
		float ovrScale;

		OVR_CalculateState(&currentState);


		width = glConfig.render_width / (float) hmd->Resolution.w;
		height = glConfig.render_height / (float) hmd->Resolution.h;
		ovrScale = (width + height) / 2.0;
		ovrScale *= R_AntialiasGetScale() * vr_ovr_supersample->value;
		if (vr_ovr_debug->value)
			Com_Printf("VR_OVR: Set render target scale to %.2f\n",ovrScale);
		for (i = 0; i < 2; i++)
		{
			ovrRecti viewport = {{0,0}, {0,0}};
			renderInfo[i].renderTarget = ovrHmd_GetFovTextureSize(hmd, (ovrEyeType) i, renderInfo[i].eyeFov, ovrScale);
			viewport.Size.w = renderInfo[i].renderTarget.w;
			viewport.Size.h = renderInfo[i].renderTarget.h;
			ovrHmd_GetRenderScaleAndOffset(renderInfo[i].eyeFov, renderInfo[i].renderTarget, viewport, (ovrVector2f*) renderInfo[i].UVScaleOffset);

			if (renderInfo[i].renderTarget.w != renderInfo[i].eyeFBO.width || renderInfo[i].renderTarget.h != renderInfo[i].eyeFBO.height)
			{
				if (vr_ovr_debug->value)
					Com_Printf("VR_OVR: Set buffer %i to size %i x %i\n",i,renderInfo[i].renderTarget.w, renderInfo[i].renderTarget.h);
				R_ResizeFBO(renderInfo[i].renderTarget.w, renderInfo[i].renderTarget.h, 1, GL_RGBA8, &renderInfo[i].eyeFBO);
				R_ClearFBO(&renderInfo[i].eyeFBO);
			}

		}
	}
}
Exemplo n.º 26
0
void OculusWorldDemoApp::CalculateHmdValues()
{
    // Initialize eye rendering information for ovrHmd_Configure.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2];
    eyeFov[0] = HmdDesc.DefaultEyeFov[0];
    eyeFov[1] = HmdDesc.DefaultEyeFov[1];

    // Clamp Fov based on our dynamically adjustable FovSideTanMax.
    // Most apps should use the default, but reducing Fov does reduce rendering cost.
    eyeFov[0] = FovPort::Min(eyeFov[0], FovPort(FovSideTanMax));
    eyeFov[1] = FovPort::Min(eyeFov[1], FovPort(FovSideTanMax));


    if (ForceZeroIpd)
    {
        // ForceZeroIpd does three things:
        //  1) Sets FOV to maximum symmetrical FOV based on both eyes
        //  2) Sets eye ViewAdjust values to 0.0 (effective IPD == 0)
        //  3) Uses only the Left texture for rendering.
        
        eyeFov[0] = FovPort::Max(eyeFov[0], eyeFov[1]);
        eyeFov[1] = eyeFov[0];

        Sizei recommenedTexSize = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,
                                                           eyeFov[0], DesiredPixelDensity);

        Sizei textureSize = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTexSize);

        EyeRenderSize[0] = Sizei::Min(textureSize, recommenedTexSize);
        EyeRenderSize[1] = EyeRenderSize[0];

        // Store texture pointers that will be passed for rendering.
        EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
        EyeTexture[0].Header.TextureSize    = textureSize;
        EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
        // Right eye is the same.
        EyeTexture[1] = EyeTexture[0];
    }

    else
    {
        // Configure Stereo settings. Default pixel density is 1.0f.
        Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Left,  eyeFov[0], DesiredPixelDensity);
        Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(Hmd, ovrEye_Right, eyeFov[1], DesiredPixelDensity);

        if (RendertargetIsSharedByBothEyes)
        {
            Sizei  rtSize(recommenedTex0Size.w + recommenedTex1Size.w,
                          Alg::Max(recommenedTex0Size.h, recommenedTex1Size.h));

            // Use returned size as the actual RT size may be different due to HW limits.
            rtSize = EnsureRendertargetAtLeastThisBig(Rendertarget_BothEyes, rtSize);

            // Don't draw more then recommended size; this also ensures that resolution reported
            // in the overlay HUD size is updated correctly for FOV/pixel density change.            
            EyeRenderSize[0] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(Sizei(rtSize.w/2, rtSize.h), recommenedTex1Size);

            // Store texture pointers that will be passed for rendering.
            // Same texture is used, but with different viewports.
            EyeTexture[0]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[0].Header.TextureSize    = rtSize;
            EyeTexture[0].Header.RenderViewport = Recti(Vector2i(0), EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_BothEyes].Tex;
            EyeTexture[1].Header.TextureSize    = rtSize;
            EyeTexture[1].Header.RenderViewport = Recti(Vector2i((rtSize.w+1)/2, 0), EyeRenderSize[1]);
        }

        else
        {
            Sizei tex0Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Left,  recommenedTex0Size);
            Sizei tex1Size = EnsureRendertargetAtLeastThisBig(Rendertarget_Right, recommenedTex1Size);

            EyeRenderSize[0] = Sizei::Min(tex0Size, recommenedTex0Size);
            EyeRenderSize[1] = Sizei::Min(tex1Size, recommenedTex1Size);

            // Store texture pointers and viewports that will be passed for rendering.
            EyeTexture[0]                       = RenderTargets[Rendertarget_Left].Tex;
            EyeTexture[0].Header.TextureSize    = tex0Size;
            EyeTexture[0].Header.RenderViewport = Recti(EyeRenderSize[0]);
            EyeTexture[1]                       = RenderTargets[Rendertarget_Right].Tex;
            EyeTexture[1].Header.TextureSize    = tex1Size;
            EyeTexture[1].Header.RenderViewport = Recti(EyeRenderSize[1]);
        }
    }

    // Hmd caps.
    unsigned hmdCaps = (VsyncEnabled ? 0 : ovrHmdCap_NoVSync) |
                       ovrHmdCap_LatencyTest;
    if (IsLowPersistence)
        hmdCaps |= ovrHmdCap_LowPersistence;
    if (DynamicPrediction)
        hmdCaps |= ovrHmdCap_DynamicPrediction;

    ovrHmd_SetEnabledCaps(Hmd, hmdCaps);


	ovrRenderAPIConfig config         = pRender->Get_ovrRenderAPIConfig();
    unsigned           distortionCaps = ovrDistortionCap_Chromatic;
    if (TimewarpEnabled)
        distortionCaps |= ovrDistortionCap_TimeWarp;

    if (!ovrHmd_ConfigureRendering( Hmd, &config, distortionCaps,
                                    eyeFov, EyeRenderDesc ))
    {
        // Fail exit? TBD
        return;
    }

    if (ForceZeroIpd)
    {
        // Remove IPD adjust
        EyeRenderDesc[0].ViewAdjust = Vector3f(0);
        EyeRenderDesc[1].ViewAdjust = Vector3f(0);
    }

    // ovrHmdCap_LatencyTest - enables internal latency feedback
    unsigned sensorCaps = ovrSensorCap_Orientation|ovrSensorCap_YawCorrection;
    if (PositionTrackingEnabled)
        sensorCaps |= ovrSensorCap_Position;
      
    if (StartSensorCaps != sensorCaps)
    {
        ovrHmd_StartSensor(Hmd, sensorCaps, 0);
        StartSensorCaps = sensorCaps;
    }    

    // Calculate projections
    Projection[0] = ovrMatrix4f_Projection(EyeRenderDesc[0].Fov,  0.01f, 10000.0f, true);
    Projection[1] = ovrMatrix4f_Projection(EyeRenderDesc[1].Fov,  0.01f, 10000.0f, true);

    float    orthoDistance = 0.8f; // 2D is 0.8 meter from camera
    Vector2f orthoScale0   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[0].PixelsPerTanAngleAtCenter);
    Vector2f orthoScale1   = Vector2f(1.0f) / Vector2f(EyeRenderDesc[1].PixelsPerTanAngleAtCenter);
    
    OrthoProjection[0] = ovrMatrix4f_OrthoSubProjection(Projection[0], orthoScale0, orthoDistance,
                                                        EyeRenderDesc[0].ViewAdjust.x);
    OrthoProjection[1] = ovrMatrix4f_OrthoSubProjection(Projection[1], orthoScale1, orthoDistance,
                                                        EyeRenderDesc[1].ViewAdjust.x);
}
Exemplo n.º 27
0
//-------------------------------------------------------------------------------------
int Init()
{
    // Initializes LibOVR, and the Rift
    ovr_Initialize();
	HMD = ovrHmd_Create(0);
    if (!HMD)
    {
        MessageBoxA(NULL,"Oculus Rift not detected.","", MB_OK);
        return(1);
    }
	if (HMD->ProductName[0] == '\0') 
        MessageBoxA(NULL,"Rift detected, display not enabled.","", MB_OK);

	//Setup Window and Graphics - use window frame if relying on Oculus driver
	const int backBufferMultisample = 1;
    bool UseAppWindowFrame = (HMD->HmdCaps & ovrHmdCap_ExtendDesktop) ? false : true;
    HWND window = Util_InitWindowAndGraphics(Recti(HMD->WindowsPos, HMD->Resolution),
                                         FullScreen, backBufferMultisample, UseAppWindowFrame,&pRender);
	if (!window) return 1;
	ovrHmd_AttachToWindow(HMD, window, NULL, NULL);

    //Configure Stereo settings.
    Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left,  HMD->DefaultEyeFov[0], 1.0f);
    Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMD->DefaultEyeFov[1], 1.0f);
	Sizei RenderTargetSize;
    RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
    RenderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

    const int eyeRenderMultisample = 1;
    pRendertargetTexture = pRender->CreateTexture(Texture_RGBA | Texture_RenderTarget |
                                                  eyeRenderMultisample,
                                                  RenderTargetSize.w, RenderTargetSize.h, NULL);
    // The actual RT size may be different due to HW limits.
    RenderTargetSize.w = pRendertargetTexture->GetWidth();
    RenderTargetSize.h = pRendertargetTexture->GetHeight();

    // Initialize eye rendering information.
    // The viewport sizes are re-computed in case RenderTargetSize changed due to HW limitations.
    ovrFovPort eyeFov[2] = { HMD->DefaultEyeFov[0], HMD->DefaultEyeFov[1] } ;

    EyeRenderViewport[0].Pos  = Vector2i(0,0);
    EyeRenderViewport[0].Size = Sizei(RenderTargetSize.w / 2, RenderTargetSize.h);
    EyeRenderViewport[1].Pos  = Vector2i((RenderTargetSize.w + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

    #if SDK_RENDER
	// Query D3D texture data.
    EyeTexture[0].D3D11.Header.API            = ovrRenderAPI_D3D11;
    EyeTexture[0].D3D11.Header.TextureSize    = RenderTargetSize;
    EyeTexture[0].D3D11.Header.RenderViewport = EyeRenderViewport[0];
    EyeTexture[0].D3D11.pTexture              = pRendertargetTexture->Tex.GetPtr();
    EyeTexture[0].D3D11.pSRView               = pRendertargetTexture->TexSv.GetPtr();

    // Right eye uses the same texture, but different rendering viewport.
    EyeTexture[1] = EyeTexture[0];
    EyeTexture[1].D3D11.Header.RenderViewport = EyeRenderViewport[1];

    // Configure d3d11.
    ovrD3D11Config d3d11cfg;
    d3d11cfg.D3D11.Header.API         = ovrRenderAPI_D3D11;
    d3d11cfg.D3D11.Header.RTSize      = Sizei(HMD->Resolution.w, HMD->Resolution.h);
    d3d11cfg.D3D11.Header.Multisample = backBufferMultisample;
    d3d11cfg.D3D11.pDevice            = pRender->Device;
    d3d11cfg.D3D11.pDeviceContext     = pRender->Context;
    d3d11cfg.D3D11.pBackBufferRT      = pRender->BackBufferRT;
    d3d11cfg.D3D11.pSwapChain         = pRender->SwapChain;

    if (!ovrHmd_ConfigureRendering(HMD, &d3d11cfg.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc))	return(1);
    #else
	//Shader vertex format
	D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] = {
		{"Position", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 0,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 1, DXGI_FORMAT_R32_FLOAT,      0, 8,  D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"Position", 2, DXGI_FORMAT_R32_FLOAT,      0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT,   0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT,   0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0},
		{"TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT,   0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0}};
	
	//Distortion vertex shader
	const char* vertexShader = 
		"float2 EyeToSourceUVScale, EyeToSourceUVOffset;                                        \n"
		"float4x4 EyeRotationStart, EyeRotationEnd;                                             \n"
		"float2 TimewarpTexCoord(float2 TexCoord, float4x4 rotMat)                              \n"
		"{                                                                                      \n"
		// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic 
		// aberration and distortion). These are now "real world" vectors in direction (x,y,1) 
		// relative to the eye of the HMD.	Apply the 3x3 timewarp rotation to these vectors.
		"    float3 transformed = float3( mul ( rotMat, float4(TexCoord.xy, 1, 1) ).xyz);       \n"
		// Project them back onto the Z=1 plane of the rendered images.
		"    float2 flattened = (transformed.xy / transformed.z);                               \n"
		// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
		"    return(EyeToSourceUVScale * flattened + EyeToSourceUVOffset);                      \n"
		"}                                                                                      \n"
		"void main(in float2  Position   : POSITION,  in float timewarpLerpFactor : POSITION1,  \n"
		"          in float   Vignette   : POSITION2, in float2 TexCoord0         : TEXCOORD0,  \n"
		"          in float2  TexCoord1  : TEXCOORD1, in float2 TexCoord2         : TEXCOORD2,  \n"
		"          out float4 oPosition  : SV_Position,                                         \n"
		"          out float2 oTexCoord0 : TEXCOORD0, out float2 oTexCoord1 : TEXCOORD1,        \n"
		"          out float2 oTexCoord2 : TEXCOORD2, out float  oVignette  : TEXCOORD3)        \n"
		"{                                                                                      \n"
		"    float4x4 lerpedEyeRot = lerp(EyeRotationStart, EyeRotationEnd, timewarpLerpFactor);\n"
		"    oTexCoord0  = TimewarpTexCoord(TexCoord0,lerpedEyeRot);                            \n"
		"    oTexCoord1  = TimewarpTexCoord(TexCoord1,lerpedEyeRot);                            \n"
		"    oTexCoord2  = TimewarpTexCoord(TexCoord2,lerpedEyeRot);                            \n"
		"    oPosition = float4(Position.xy, 0.5, 1.0);    oVignette = Vignette;                \n"
		"}";

	//Distortion pixel shader
	const char* pixelShader = 
		"Texture2D Texture   : register(t0);                                                    \n"
		"SamplerState Linear : register(s0);                                                    \n"
		"float4 main(in float4 oPosition  : SV_Position,  in float2 oTexCoord0 : TEXCOORD0,     \n"
		"            in float2 oTexCoord1 : TEXCOORD1,    in float2 oTexCoord2 : TEXCOORD2,     \n"
		"            in float  oVignette  : TEXCOORD3)    : SV_Target                           \n"
		"{                                                                                      \n"
		// 3 samples for fixing chromatic aberrations
		"    float R = Texture.Sample(Linear, oTexCoord0.xy).r;                                 \n"
		"    float G = Texture.Sample(Linear, oTexCoord1.xy).g;                                 \n"
		"    float B = Texture.Sample(Linear, oTexCoord2.xy).b;                                 \n"
		"    return (oVignette*float4(R,G,B,1));                                                \n"
		"}";
	pRender->InitShaders(vertexShader, pixelShader, &Shaders, &VertexIL,DistortionMeshVertexDesc,6);

    for ( int eyeNum = 0; eyeNum < 2; eyeNum++ )
    {
        // Allocate mesh vertices, registering with renderer using the OVR vertex format.
        ovrDistortionMesh meshData;
        ovrHmd_CreateDistortionMesh(HMD, (ovrEyeType) eyeNum, eyeFov[eyeNum],
			                        ovrDistortionCap_Chromatic | ovrDistortionCap_TimeWarp, &meshData);
        MeshVBs[eyeNum] = *pRender->CreateBuffer();
        MeshVBs[eyeNum]->Data(Buffer_Vertex,meshData.pVertexData,sizeof(ovrDistortionVertex)*meshData.VertexCount);
        MeshIBs[eyeNum] = *pRender->CreateBuffer();
        MeshIBs[eyeNum]->Data(Buffer_Index,meshData.pIndexData,sizeof(unsigned short) * meshData.IndexCount);
        ovrHmd_DestroyDistortionMesh( &meshData );

		//Create eye render description for use later
		EyeRenderDesc[eyeNum] = ovrHmd_GetRenderDesc(HMD, (ovrEyeType) eyeNum,  eyeFov[eyeNum]);

		//Do scale and offset
		ovrHmd_GetRenderScaleAndOffset(eyeFov[eyeNum],RenderTargetSize, EyeRenderViewport[eyeNum], UVScaleOffset[eyeNum]);
	}

    #endif

    ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);

	// Start the sensor which informs of the Rift's pose and motion
    ovrHmd_ConfigureTracking(HMD, ovrTrackingCap_Orientation |
                            ovrTrackingCap_MagYawCorrection |
                            ovrTrackingCap_Position, 0);

    // This creates lights and models.
  	pRoomScene = new Scene;
	sbuilder.PopulateRoomScene(pRoomScene, pRender);

    return (0);
}
Exemplo n.º 28
0
OculusDevice::OculusDevice(float nearClip, float farClip, bool useTimewarp) : m_hmdDevice(0),
	m_nearClip(nearClip), m_farClip(farClip),
	m_useTimeWarp(useTimewarp),
	m_position(osg::Vec3(0.0f, 0.0f, 0.0f)),
	m_orientation(osg::Quat(0.0f, 0.0f, 0.0f, 1.0f))
{
	ovr_Initialize();
	
	// Enumerate HMD devices
	int numberOfDevices = ovrHmd_Detect();
	osg::notify(osg::DEBUG_INFO) << "Number of connected devices: " << numberOfDevices << std::endl;

	// Get first available HMD
	m_hmdDevice = ovrHmd_Create(0);
	
	// If no HMD is found try an emulated device
	if (!m_hmdDevice) {
		osg::notify(osg::WARN) << "Warning: No device could be found. Creating emulated device " << std::endl;
		m_hmdDevice = ovrHmd_CreateDebug(ovrHmd_DK1);
		ovrHmd_ResetFrameTiming(m_hmdDevice, 0);
	}

	if (m_hmdDevice) {
		// Print out some information about the HMD
		osg::notify(osg::ALWAYS) << "Product:         " << m_hmdDevice->ProductName << std::endl;
		osg::notify(osg::ALWAYS) << "Manufacturer:    " << m_hmdDevice->Manufacturer << std::endl;
		osg::notify(osg::ALWAYS) << "VendorId:        " << m_hmdDevice->VendorId << std::endl;
		osg::notify(osg::ALWAYS) << "ProductId:       " << m_hmdDevice->ProductId << std::endl;
		osg::notify(osg::ALWAYS) << "SerialNumber:    " << m_hmdDevice->SerialNumber << std::endl;
		osg::notify(osg::ALWAYS) << "FirmwareVersion: " << m_hmdDevice->FirmwareMajor << "."  << m_hmdDevice->FirmwareMinor << std::endl;

		// Get more details about the HMD.
		m_resolution = m_hmdDevice->Resolution;
		
		// Compute recommended render texture size
		float pixelsPerDisplayPixel = 1.0f; // Decrease this value to scale the size on render texture on lower performance hardware. Values above 1.0 is unnecessary.

		ovrSizei recommenedLeftTextureSize = ovrHmd_GetFovTextureSize(m_hmdDevice, ovrEye_Left, m_hmdDevice->DefaultEyeFov[0], pixelsPerDisplayPixel);
		ovrSizei recommenedRightTextureSize = ovrHmd_GetFovTextureSize(m_hmdDevice, ovrEye_Right, m_hmdDevice->DefaultEyeFov[1], pixelsPerDisplayPixel);

		// Compute size of render target
		m_renderTargetSize.w = recommenedLeftTextureSize.w + recommenedRightTextureSize.w;
		m_renderTargetSize.h = osg::maximum(recommenedLeftTextureSize.h, recommenedRightTextureSize.h);

		// Initialize ovrEyeRenderDesc struct.
		m_eyeRenderDesc[0] = ovrHmd_GetRenderDesc(m_hmdDevice, ovrEye_Left, m_hmdDevice->DefaultEyeFov[0]);
		m_eyeRenderDesc[1] = ovrHmd_GetRenderDesc(m_hmdDevice, ovrEye_Right, m_hmdDevice->DefaultEyeFov[1]);
		
		ovrVector3f leftEyeAdjust = m_eyeRenderDesc[0].ViewAdjust;
		m_leftEyeAdjust.set(leftEyeAdjust.x, leftEyeAdjust.y, leftEyeAdjust.z);
		ovrVector3f rightEyeAdjust = m_eyeRenderDesc[1].ViewAdjust;
		m_rightEyeAdjust.set(rightEyeAdjust.x, rightEyeAdjust.y, rightEyeAdjust.z);

		bool isRightHanded = true;
		
		ovrMatrix4f leftEyeProjectionMatrix = ovrMatrix4f_Projection(m_eyeRenderDesc[0].Fov, m_nearClip, m_farClip, isRightHanded);
		// Transpose matrix
		m_leftEyeProjectionMatrix.set(leftEyeProjectionMatrix.M[0][0], leftEyeProjectionMatrix.M[1][0], leftEyeProjectionMatrix.M[2][0], leftEyeProjectionMatrix.M[3][0],
			leftEyeProjectionMatrix.M[0][1], leftEyeProjectionMatrix.M[1][1], leftEyeProjectionMatrix.M[2][1], leftEyeProjectionMatrix.M[3][1],
			leftEyeProjectionMatrix.M[0][2], leftEyeProjectionMatrix.M[1][2], leftEyeProjectionMatrix.M[2][2], leftEyeProjectionMatrix.M[3][2],
			leftEyeProjectionMatrix.M[0][3], leftEyeProjectionMatrix.M[1][3], leftEyeProjectionMatrix.M[2][3], leftEyeProjectionMatrix.M[3][3]);

		ovrMatrix4f rightEyeProjectionMatrix = ovrMatrix4f_Projection(m_eyeRenderDesc[1].Fov, m_nearClip, m_farClip, isRightHanded);
		// Transpose matrix
		m_rightEyeProjectionMatrix.set(rightEyeProjectionMatrix.M[0][0], rightEyeProjectionMatrix.M[1][0], rightEyeProjectionMatrix.M[2][0], rightEyeProjectionMatrix.M[3][0],
			rightEyeProjectionMatrix.M[0][1], rightEyeProjectionMatrix.M[1][1], rightEyeProjectionMatrix.M[2][1], rightEyeProjectionMatrix.M[3][1],
			rightEyeProjectionMatrix.M[0][2], rightEyeProjectionMatrix.M[1][2], rightEyeProjectionMatrix.M[2][2], rightEyeProjectionMatrix.M[3][2],
			rightEyeProjectionMatrix.M[0][3], rightEyeProjectionMatrix.M[1][3], rightEyeProjectionMatrix.M[2][3], rightEyeProjectionMatrix.M[3][3]);

		// Start the sensor which provides the Rift’s pose and motion.
		ovrHmd_ConfigureTracking(m_hmdDevice, ovrTrackingCap_Orientation |
			ovrTrackingCap_MagYawCorrection |
			ovrTrackingCap_Position, 0);

		beginFrameTiming();
	}
}
Exemplo n.º 29
0
HRESULT InitOculusRiftObjects(unsigned distortionCaps, ovrHmd HMD)
{
	HRESULT hr = S_OK;

	// ステレオ描画設定を取得
	// レンダーターゲットの作成するためのサイズ決定に必要。
	// なお DK1 ではかなり変なサイズで返ってくるのでこのまま信用していいのかは不明
	ovrSizei recommenedTex0Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Left, HMDDesc.DefaultEyeFov[0], 1.0f);
	ovrSizei recommenedTex1Size = ovrHmd_GetFovTextureSize(HMD, ovrEye_Right, HMDDesc.DefaultEyeFov[1], 1.0f);
	ovrSizei RenderTargetSize;
	RenderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	RenderTargetSize.h = max(recommenedTex0Size.h, recommenedTex1Size.h);

	{
		// レンダーターゲット用のテクスチャを作成する
		// ここにシーンを描画する

		D3D11_TEXTURE2D_DESC desc;
		ZeroMemory(&desc, sizeof(desc));
		desc.Width = RenderTargetSize.w;
		desc.Height = RenderTargetSize.h;
		desc.MipLevels = 1;
		desc.ArraySize = 1;
		desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
		desc.SampleDesc.Count = 1;
		desc.Usage = D3D11_USAGE_DEFAULT;
		desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
		//テクスチャを作成する
		g_pd3dDevice->CreateTexture2D(&desc, NULL, &g_pTextureOculus);
		//シェーダーリソースビュー作ることで描画元として使用出来る
		g_pd3dDevice->CreateShaderResourceView(g_pTextureOculus, NULL, &g_pShaderResViewOculus);
		//レンダーターゲットビューを作る
		g_pd3dDevice->CreateRenderTargetView(g_pTextureOculus, NULL, &g_pRenderTargetViewOculus);

		// 実際に作成されたサイズを取得しておく(ミスがあるかもしれない)
		g_pTextureOculus->GetDesc(&desc);
		RenderTargetSize.w = desc.Width;
		RenderTargetSize.h = desc.Height;

		//深度バッファ作成(ステンシル)
		D3D11_TEXTURE2D_DESC descDepth;
		ZeroMemory(&descDepth, sizeof(descDepth));
		descDepth.Width = RenderTargetSize.w;
		descDepth.Height = RenderTargetSize.h;
		descDepth.MipLevels = 1;
		descDepth.ArraySize = 1;
		descDepth.Format = DXGI_FORMAT_D24_UNORM_S8_UINT;
		descDepth.SampleDesc.Count = 1;
		descDepth.SampleDesc.Quality = 0;
		descDepth.Usage = D3D11_USAGE_DEFAULT;
		descDepth.BindFlags = D3D11_BIND_DEPTH_STENCIL;
		descDepth.CPUAccessFlags = 0;
		descDepth.MiscFlags = 0;
		g_pd3dDevice->CreateTexture2D(&descDepth, NULL, &g_pDepthStencilOculus);

		//ステンシルビューの作成
		D3D11_DEPTH_STENCIL_VIEW_DESC descDSV;
		ZeroMemory(&descDSV, sizeof(descDSV));
		descDSV.Format = descDepth.Format;
		descDSV.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
		descDSV.Texture2D.MipSlice = 0;
		g_pd3dDevice->CreateDepthStencilView(g_pDepthStencilOculus, &descDSV, &g_pDepthStencilViewOculus);
	}


	// それぞれの目に対応する描画のための情報を取得します。
	ovrFovPort eyeFov[2] = { HMDDesc.DefaultEyeFov[0], HMDDesc.DefaultEyeFov[1] };
	EyeRenderDesc[0] = ovrHmd_GetRenderDesc(HMD, ovrEye_Left, eyeFov[0]);
	EyeRenderDesc[1] = ovrHmd_GetRenderDesc(HMD, ovrEye_Right, eyeFov[1]);

	// HMDで使用する機能を設定します。
	ovrHmd_SetEnabledCaps(HMD, ovrHmdCap_LowPersistence |
		ovrHmdCap_LatencyTest);

	// センサーの使用を開始します。
	// YawCorrection(?) Orientation(姿勢)、Position(位置)を取得できるようにします。
	ovrHmd_StartSensor(HMD, ovrSensorCap_Orientation |
		ovrSensorCap_YawCorrection |
		ovrSensorCap_Position, 0);

	//ビューポートの情報を格納します。
	EyeRenderViewport[0].Pos.x = 0;
	EyeRenderViewport[0].Pos.y = 0;
	EyeRenderViewport[0].Size.w = RenderTargetSize.w / 2;
	EyeRenderViewport[0].Size.h = RenderTargetSize.h;
	EyeRenderViewport[1].Pos.x = (RenderTargetSize.w + 1) / 2;
	EyeRenderViewport[1].Pos.y = 0;
	EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;



	//ゆがませるメッシュをそれぞれの目に対応するように作成する
	for (int eyeNum = 0; eyeNum < 2; eyeNum++)
	{
		// メッシュデータを取得する
		ovrDistortionMesh meshData;
		ovrHmd_CreateDistortionMesh(HMD,
			EyeRenderDesc[eyeNum].Eye, EyeRenderDesc[eyeNum].Fov,
			distortionCaps, &meshData);

		ovrHmd_GetRenderScaleAndOffset(EyeRenderDesc[eyeNum].Fov,
			RenderTargetSize, EyeRenderViewport[eyeNum],
			(ovrVector2f*)UVScaleOffset[eyeNum]);

		// こちらで用意した頂点形式にあうようにパースします。
		DistortionVertex * pVBVerts = (DistortionVertex*)OVR_ALLOC(
			sizeof(DistortionVertex)* meshData.VertexCount);
		DistortionVertex * v = pVBVerts;
		ovrDistortionVertex * ov = meshData.pVertexData;
		for (unsigned vertNum = 0; vertNum < meshData.VertexCount; vertNum++)
		{
			v->Pos.x = ov->Pos.x;
			v->Pos.y = ov->Pos.y;
			v->TexR = (*(ovrVector2f*)&ov->TexR);
			v->TexG = (*(ovrVector2f*)&ov->TexG);
			v->TexB = (*(ovrVector2f*)&ov->TexB);
			v->Col[0] = v->Col[1] = v->Col[2] = (BYTE)(ov->VignetteFactor*255.99f);
			v->Col[3] = (BYTE)(ov->TimeWarpFactor*255.99f);
			v++; ov++;
		}

		//メッシュの頂点データを用いて頂点バッファを作成します。
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(DistortionVertex)* meshData.VertexCount;
		bd.BindFlags = D3D11_BIND_VERTEX_BUFFER;
		bd.CPUAccessFlags = 0;
		D3D11_SUBRESOURCE_DATA InitData = { 0 };
		InitData.pSysMem = pVBVerts;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pVertexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;

		//同様にインデックスバッファを作成します。
		bd.ByteWidth = sizeof(unsigned short)* meshData.IndexCount;
		bd.BindFlags = D3D11_BIND_INDEX_BUFFER;
		bd.CPUAccessFlags = 0;
		InitData.pSysMem = meshData.pIndexData;
		hr = g_pd3dDevice->CreateBuffer(&bd, &InitData, &g_pIndexBufferOculus[eyeNum]);
		if (FAILED(hr))
			return hr;
		oculusIndexCount = meshData.IndexCount;

		OVR_FREE(pVBVerts);
		ovrHmd_DestroyDistortionMesh(&meshData);
	}


	{
		//頂点シェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "VS_TimeWarp", "vs_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreateVertexShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pVertexShaderOculus);
		if (FAILED(hr))
			return hr;

		//インプットレイアウトの形式
		static D3D11_INPUT_ELEMENT_DESC DistortionMeshVertexDesc[] =
		{
			{ "Position", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 8, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 1, DXGI_FORMAT_R32G32_FLOAT, 0, 16, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "TexCoord", 2, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0 },
			{ "Color", 0, DXGI_FORMAT_R8G8B8A8_UNORM, 0, 32, D3D11_INPUT_PER_VERTEX_DATA, 0 },
		};

		//インプットレイアウトの作成
		hr = g_pd3dDevice->CreateInputLayout(DistortionMeshVertexDesc, ARRAYSIZE(DistortionMeshVertexDesc), pBlob->GetBufferPointer(),
			pBlob->GetBufferSize(), &g_pVertexLayoutOculus);
		if (FAILED(hr))
			return hr;

		pBlob->Release();
	}

	{
		//ピクセルシェーダーをコンパイル
		ID3DBlob* pBlob = NULL;
		hr = CompileShaderFromFile("OculusRift.hlsl", "PS_Oculus", "ps_4_0", &pBlob);
		if (FAILED(hr))
		{
			MessageBox(NULL, "ピクセルシェーダーを読み込めませんでした。", "Error", MB_OK);
			return hr;
		}

		//ピクセルシェーダーからピクセルシェーダのオブジェクトを作成
		hr = g_pd3dDevice->CreatePixelShader(pBlob->GetBufferPointer(), pBlob->GetBufferSize(), NULL, &g_pPixelShaderOculus);
		pBlob->Release();
		if (FAILED(hr))
			return hr;
	}

	// コンスタントバッファの作成
	// ゆがませるメッシュを描画するためのシェーダー用の設定
	{
		D3D11_BUFFER_DESC bd = { 0 };
		bd.Usage = D3D11_USAGE_DEFAULT;
		bd.ByteWidth = sizeof(OculusRiftSettings);
		bd.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
		bd.CPUAccessFlags = 0;
		hr = g_pd3dDevice->CreateBuffer(&bd, NULL, &g_pConstantBufferOculus);
		if (FAILED(hr))
			return hr;
	}

	return hr;
}
void OcculusCameraComponent::init()
{
	ovr_Initialize();
	parent->getStage()->getGame()->getGraphicsHandle()->setAutoBufferSwap( false );

	hmd = ovrHmd_Create(0);
	if (hmd)
	{
		ovrSizei resolution = hmd->Resolution;
		resolution;
	}
	else
	{
		hmd = ovrHmd_CreateDebug( ovrHmdType::ovrHmd_DK2 );
	}
	// Start the sensor which provides the Rift’s pose and motion. 
	//ovrHmd_SetEnabledCaps(hmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
	ovrHmd_ConfigureTracking(hmd, ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position, 0);
	ovrHmd_RecenterPose( hmd );

	ovrFovPort eyeFov[2] = { hmd->DefaultEyeFov[0], hmd->DefaultEyeFov[1] } ;

	ovrGLConfig oglConfig;
	oglConfig.OGL.Header.API         = ovrRenderAPI_OpenGL;
    oglConfig.OGL.Header.RTSize      = OVR::Sizei(hmd->Resolution.w, hmd->Resolution.h);
    oglConfig.OGL.Header.Multisample = 1;
	oglConfig.OGL.Window = parent->getStage()->getGame()->getGraphicsHandle()->getHandle();
	oglConfig.OGL.DC = parent->getStage()->getGame()->getGraphicsHandle()->getHDC();
	#pragma comment(lib,"libovrd.lib")
	ovrBool result = ovrHmd_ConfigureRendering( hmd, &oglConfig.Config,
		                           ovrDistortionCap_Chromatic | ovrDistortionCap_Vignette |
                                   ovrDistortionCap_TimeWarp | ovrDistortionCap_Overdrive,
								   eyeFov, EyeRenderDesc);

	result;

	ovrHmd_AttachToWindow(hmd, oglConfig.OGL.Window, NULL, NULL);

	//Sets up FBOS
	// Configure Stereo settings. 
	OVR::Sizei recommenedTex0Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Left, hmd->DefaultEyeFov[0], 1.0f);
	OVR::Sizei recommenedTex1Size = ovrHmd_GetFovTextureSize(hmd, ovrEye_Right, hmd->DefaultEyeFov[1], 1.0f);

	OVR::Sizei renderTargetSize;
	renderTargetSize.w = recommenedTex0Size.w + recommenedTex1Size.w;
	renderTargetSize.h = max ( recommenedTex0Size.h, recommenedTex1Size.h );

	renderTarget = FrameBufferObject::createFrameBuffer( renderTargetSize.w, renderTargetSize.h );

	//Set up viewports
	EyeRenderViewport[0].Pos  = OVR::Vector2i(0,0);
	EyeRenderViewport[0].Size = OVR::Sizei(renderTarget->width / 2, renderTarget->height);
    EyeRenderViewport[1].Pos  = OVR::Vector2i((renderTarget->width + 1) / 2, 0);
    EyeRenderViewport[1].Size = EyeRenderViewport[0].Size;

	
	EyeTexture[0].OGL.Header.API = ovrRenderAPI_OpenGL;
	EyeTexture[0].OGL.Header.TextureSize = renderTargetSize;
	EyeTexture[0].OGL.Header.RenderViewport = EyeRenderViewport[0];
	EyeTexture[0].OGL.TexId = renderTarget->colorTexture->textureID;

	EyeTexture[1].OGL.Header.API = ovrRenderAPI_OpenGL;
	EyeTexture[1].OGL.Header.TextureSize = renderTargetSize;
	EyeTexture[1].OGL.Header.RenderViewport = EyeRenderViewport[1];
	EyeTexture[1].OGL.TexId = renderTarget->colorTexture->textureID;
}