void LookAtTransformationElement::calcMatrix(Matrix &result) const
{
    MatrixLookAt(result, 
                 getEyePosition(), 
                 getLookAtPosition(), 
                 getUpDirection()   );
}
Esempio n. 2
0
    void snap(Matrix& m) {
        if (csys) C = csys->getWorldMatrix();

        if (orientation == POINT) {
            MatrixLookAt(m, snapP, snapP+Vec3f(prim_o.getPosition()), prim_o.getDirection());
            m.multLeft(C);
        }
    }
void
ColladaNode::handleLookAt(domLookat *lookat)
{
    if(lookat == NULL)
        return;
	
    domNodeRef        node   = getDOMElementAs<domNode>();
    Pnt3f eyePosition(lookat->getValue()[0],lookat->getValue()[1],lookat->getValue()[2]), 
          lookAtPoint(lookat->getValue()[3],lookat->getValue()[4],lookat->getValue()[5]);
    Vec3f upDirection(lookat->getValue()[7],lookat->getValue()[8],lookat->getValue()[9]);

    if(getGlobal()->getOptions()->getFlattenNodeXForms())
    {
        LookAtTransformationElementUnrecPtr LookAtElement = LookAtTransformationElement::create();
        LookAtElement->setEyePosition(eyePosition);
        LookAtElement->setLookAtPosition(lookAtPoint);
        LookAtElement->setUpDirection(upDirection);
        setName(LookAtElement, lookat->getSid());

        appendStackedXForm(LookAtElement, node);
    }
    else
    {

        TransformUnrecPtr xform = Transform::create();
	    NodeUnrecPtr      xformN = makeNodeFor(xform);

        Matrix lookAtMatrix;
        MatrixLookAt(lookAtMatrix,eyePosition, lookAtPoint, upDirection);

	    xform->setMatrix(lookAtMatrix);

	     if(getGlobal()->getOptions()->getCreateNameAttachments() == true && 
           node->getName()                                       != NULL   )
        {
            std::string nodeName = node->getName();

            if(lookat->getSid() != NULL && 
			    getGlobal()->getOptions()->getFlattenNodeXForms() == false)
            {
                nodeName.append("."                );
                nodeName.append(lookat->getSid());
            }

            setName(xformN, nodeName);
        }

        appendXForm(xformN);
    }
}
Esempio n. 4
0
void showAll(PerspectiveCameraRefPtr TheCamera, NodeRefPtr Scene, Vec3f Up)
{
    //Make sure the volume is up to date for the Scene
    Scene->updateVolume();

    //Get the Minimum and Maximum bounds of the volume
    Vec3f min,max;
    Scene->getVolume().getBounds( min, max );
    Vec3f d = max - min;

    if(d.length() < Eps) //The volume is 0
    {
        //Default to a 1x1x1 box volume
        min.setValues(-0.5f,-0.5f,-0.5f);
        max.setValues( 0.5f, 0.5f, 0.5f);
        d = max - min;
    }

    Real32 dist = osgMax(d[0],d[1]) / (2 * osgTan(TheCamera->getFov() / 2.f));

    Pnt3f at((min[0] + max[0]) * .5f,(min[1] + max[1]) * .5f,(min[2] + max[2]) * .5f);
    Pnt3f from=at;
    from[2]+=(dist+fabs(max[2]-min[2])*0.5f); 

    //If the Camera Beacon is a node with a transfrom core
    if(TheCamera->getBeacon() != NULL &&
       TheCamera->getBeacon()->getCore() != NULL &&
       TheCamera->getBeacon()->getCore()->getType().isDerivedFrom(Transform::getClassType()))
    {
        Matrix m;

        if(!MatrixLookAt(m, from, at, Up))
        {
            dynamic_cast<Transform*>(TheCamera->getBeacon()->getCore())->setMatrix(m);
        }
    }

    //Set the camera to go from 1% of the object to 10 times its size
    Real32 diag = osgMax(osgMax(d[0], d[1]), d[2]);
    TheCamera->setNear (diag / 100.f);
    TheCamera->setFar  (10 * diag);
}
Esempio n. 5
0
float4x4 D3DUtil_GetCubeMapViewMatrix( int dwFace )
{
    float3 eye = cubecop;
    float3 LookDir;
    float3 UpDir;

    switch( dwFace )
    {
	case 0:// D3DCUBEMAP_FACE_POSITIVE_X:
            LookDir = float3( 1.0f, 0.0f, 0.0f );
            UpDir   = float3( 0.0f, -1.0f, 0.0f );
            break;
	case 1:// D3DCUBEMAP_FACE_NEGATIVE_X:
            LookDir = float3(-1.0f, 0.0f, 0.0f );
            UpDir   = float3( 0.0f, -1.0f, 0.0f );
            break;
	case 2://D3DCUBEMAP_FACE_POSITIVE_Y:
            LookDir = float3( 0.0f, 1.0f, 0.0f );
            UpDir   = float3( 0.0f, 0.0f, 1.0f );
            break;
	case 3://D3DCUBEMAP_FACE_NEGATIVE_Y:
            LookDir = float3( 0.0f,-1.0f, 0.0f );
            UpDir   = float3( 0.0f, 0.0f,-1.0f );
            break;
	case 4://D3DCUBEMAP_FACE_POSITIVE_Z:
            LookDir = float3( 0.0f, 0.0f, 1.0f );
            UpDir   = float3( 0.0f, -1.0f, 0.0f );
            break;
	case 5://D3DCUBEMAP_FACE_NEGATIVE_Z:
            LookDir = float3( 0.0f, 0.0f,-1.0f );
            UpDir   = float3( 0.0f, -1.0f, 0.0f );
            break;
    }
	LookDir *=-1.0f;  // this is so  i can use a right handed system.
    float4x4 m= MatrixLookAt(eye,eye+LookDir,UpDir);
	return m;
}
Esempio n. 6
0
//=================================================================================================================================
/// Measure overdraw from a particular viewpoint.
///
/// \param pCameraPosition  Camera position to use for this viewpoint.  The camera will be looking at the origin
/// \param nImageSize       Size of the pixel grid on each axis
/// \param bCullCCW         Set to true to cull CCW faces, otherwise cull CW faces.
/// \param fAvgODOut        A variable to receive the average overdraw per pixel.
///
/// \return                 False if out of memory.  True otherwise
//=================================================================================================================================
bool TootleRaytracer::ProcessViewpoint(const float* pCameraPosition,
                                       UINT         nImageSize,
                                       bool         bCullCCW,
                                       UINT&        nPixelHit,
                                       UINT&        nPixelDrawn)
{
    assert(pCameraPosition);

    if (nImageSize < 1)
    {
        nImageSize = 1;   // a strange 1x1 image
    }

    // build camera basis vectors
    Vec3f position(pCameraPosition);
    Vec3f viewDir = Normalize(position) * -1.0;
    Vec3f up;

    // Compute the up vector by performing 90 degree 2D rotation on the position vector
    //  (choose two good component vectors).
    if ((position[ 1 ] * position[ 1 ]) < (position[ 0 ] * position[ 0 ]))
    {
        up[ 0 ] = -position[ 2 ];
        up[ 1 ] =  0;
        up[ 2 ] =  position[ 0 ];
    }
    else
    {
        up[ 0 ] =  0;
        up[ 1 ] =  position[ 2 ];
        up[ 2 ] = -position[ 1 ];
    }

    up = Normalize(up);

    Matrix4f mLookAt = MatrixLookAt(position, Vec3f(0, 0, 0), up);

    // choose viewport size:
    // transform bounding box corners into viewing space
    // as we do this, track the bounding square of the x and y coordinates
    // we will take the size of the larger dimension to be the viewport size
    Vec3f corners[8];
    m_pCore->GetSceneBB().GetCorners(corners);

    float xmin =  FLT_MAX;
    float xmax = -FLT_MAX;
    float ymin =  FLT_MAX;
    float ymax = -FLT_MAX;

    for (int i = 0; i < 8; i++)
    {
        TransformVector(&corners[i], &mLookAt, &corners[i]);
        xmin = Min(xmin, corners[i].x);
        xmax = Max(xmax, corners[i].x);
        ymin = Min(ymin, corners[i].y);
        ymax = Max(ymax, corners[i].y);
    }

    float fViewSize = Max(xmax - xmin, ymax - ymin) * 2;
    //float fViewSize = sqrt(pow(xmax-xmin,2) + pow(ymax-ymin,2)); //Max( xmax - xmin, ymax - ymin );

    // build the camera
    JRTOrthoCamera camera(position, viewDir, up, fViewSize);

    // cull backfaces
    m_pCore->CullBackfaces(viewDir, bCullCCW);

    // iterate over the pixels that we're interested in
    float delta = 1.0f / nImageSize;
    float s = 0;
    float t = 0;
#ifdef DEBUG_IMAGES
    JRTPPMImage img(nImageSize, nImageSize);
#endif

    UINT nPixelDrawnTmp;

    nPixelHit   = 0;
    nPixelDrawn = 0;

    for (int i = 0; i < (int) nImageSize; i++)
    {
        for (int j = 0; j < (int) nImageSize; j++)
        {
            // compute the camera ray for this pixel
            Vec3f rayOrigin, rayDirection;
            camera.GetRay(s, t, &rayOrigin, &rayDirection);

            // trace through the scene data structures to find all hits
            TootleRayHit* pHitArray = 0;
            UINT nHits = 0;

            if (!m_pCore->FindAllHits(rayOrigin, rayDirection, &pHitArray, &nHits))
            {
                // ran out of memory
                return false;
            }

            if (nHits > 0)
            {
                nPixelHit++;

                // compute the number of triangles overdrawn for the pixel
                GetPixelDrawn(pHitArray, nHits, nPixelDrawnTmp);

                nPixelDrawn += nPixelDrawnTmp;
            }

#ifdef DEBUG_IMAGES
            float clr = nHits / 8.f;

            img.SetPixel(j, i, clr, clr, clr);

            /*if( nHits > 0 )
            {
               UINT nTriIndex = pHitArray[0].nFaceID;
               Vec3f normal = m_pMesh->GetFaceNormal( nTriIndex );
               normal /= 2;
               normal += Vec3f(0.5,0.5,0.5);
               img.SetPixel( j, i, normal.x, normal.y, normal.z );
            }*/

#endif

            s += delta;
        }

        t += delta;
        s = 0;
    }

#ifdef DEBUG_IMAGES
    static int nFrameNum = 0;
    char filename[100];
    sprintf(filename, "C:/tmp/images/view_%d.ppm", nFrameNum);
    img.SaveFile(filename);
    nFrameNum++;
#endif

    return true;
}
Esempio n. 7
0
//----------------------------------------------------------------------------------
// Main Entry point
//----------------------------------------------------------------------------------
int main(void)
{
    // Initialization
    //--------------------------------------------------------------------------------------
    int screenWidth = 1080;     // Mirror screen width (set to hmdDesc.Resolution.w/2)
    int screenHeight = 600;     // Mirror screen height (set to hmdDesc.Resolution.h/2)
    
    // NOTE: Mirror screen size can be set to any desired resolution!
    
    // GLFW3 Initialization + OpenGL 3.3 Context + Extensions
    //--------------------------------------------------------
    glfwSetErrorCallback(ErrorCallback);
    
    if (!glfwInit())
    {
        TraceLog(WARNING, "GLFW3: Can not initialize GLFW");
        return 1;
    }
    else TraceLog(INFO, "GLFW3: GLFW initialized successfully");
    
    glfwWindowHint(GLFW_SAMPLES, 4);
    glfwWindowHint(GLFW_DEPTH_BITS, 16);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
    glfwWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, GL_TRUE);
   
    GLFWwindow *window = glfwCreateWindow(screenWidth, screenHeight, "rlgl oculus rift", NULL, NULL);
    
    if (!window)
    {
        glfwTerminate();
        return 2;
    }
    else TraceLog(INFO, "GLFW3: Window created successfully");
    
    glfwSetKeyCallback(window, KeyCallback);
    
    glfwMakeContextCurrent(window);
    glfwSwapInterval(0);

    // Load OpenGL 3.3 supported extensions
    rlglLoadExtensions(glfwGetProcAddress);
    //--------------------------------------------------------
    
    // Initialize OpenGL context (states and resources)
    rlglInit(screenWidth, screenHeight);
    
    rlClearColor(245, 245, 245, 255);                   // Define clear color
    rlEnableDepthTest();                                // Enable DEPTH_TEST for 3D
    
    // Define custom camera to initialize projection and view matrices
    Camera camera;
    camera.position = (Vector3){ 5.0f, 5.0f, 5.0f };    // Camera position
    camera.target = (Vector3){ 0.0f, 0.0f, 0.0f };      // Camera looking at point
    camera.up = (Vector3){ 0.0f, 1.0f, 0.0f };          // Camera up vector (rotation towards target)
    camera.fovy = 45.0f;                                // Camera field-of-view Y

    // Initialize viewport and internal projection/modelview matrices
    rlViewport(0, 0, screenWidth, screenHeight);
    rlMatrixMode(RL_PROJECTION);                        // Switch to PROJECTION matrix
    rlLoadIdentity();                                   // Reset current matrix (PROJECTION)
    
    // Setup perspective projection
    float aspect = (float)screenWidth/(float)screenHeight;
    double top = 0.01*tan(camera.fovy*PI/360.0);
    double right = top*aspect;
    rlFrustum(-right, right, -top, top, 0.01, 1000.0);
    
    rlMatrixMode(RL_MODELVIEW);                         // Switch back to MODELVIEW matrix
    rlLoadIdentity();                                   // Reset current matrix (MODELVIEW)
    
    // Setup Camera view
    Matrix cameraView = MatrixLookAt(camera.position, camera.target, camera.up);
    rlMultMatrixf(MatrixToFloat(cameraView));      // Multiply MODELVIEW matrix by view matrix (camera)

    InitOculusDevice();                                 // Initialize Oculus Rift CV1
    
    Vector3 cubePosition = { 0.0f, 0.0f, 0.0f };
    //--------------------------------------------------------------------------------------    

    // Main game loop    
    while (!glfwWindowShouldClose(window)) 
    {
        // Update
        //----------------------------------------------------------------------------------
        UpdateOculusTracking();
        //----------------------------------------------------------------------------------

        // Draw
        //----------------------------------------------------------------------------------
        BeginOculusDrawing();
        
            rlClearScreenBuffers();             // Clear current framebuffer(s)

            DrawCube(cubePosition, 2.0f, 2.0f, 2.0f, RED);
            DrawCubeWires(cubePosition, 2.0f, 2.0f, 2.0f, RAYWHITE);
            DrawGrid(10, 1.0f);

            // NOTE: Internal buffers drawing (3D data)
            rlglDraw();

        EndOculusDrawing();

        glfwSwapBuffers(window);
        glfwPollEvents();
        //----------------------------------------------------------------------------------
    }

    // De-Initialization
    //--------------------------------------------------------------------------------------
    CloseOculusDevice();            // Close Oculus device and clear resources

    rlglClose();                    // Unload rlgl internal buffers and default shader/texture
    
    glfwDestroyWindow(window);      // Close window
    glfwTerminate();                // Free GLFW3 resources
    //--------------------------------------------------------------------------------------
    
    return 0;
}
Esempio n. 8
0
void VRShadowEngine::setupCamera(Light         *pLight,
                                        LightTypeE     eType,
                                        RenderAction  *pAction,
                                        EngineDataPtr  pEngineData)
{
    if(eType == Directional)
    {
        DirectionalLight *pDLight =
            dynamic_cast<DirectionalLight *>(pLight);

        MatrixCameraUnrecPtr pCam =
            dynamic_cast<MatrixCamera *>(pEngineData->getCamera());

        if(pCam == NULL)
        {
            pCam = MatrixCamera::createLocal();

            pEngineData->setCamera(pCam);
        }


        Vec3f   diff;
        Pnt3f   center;
        Matrix  transMatrix;
        Node   *pNode = pAction->getActNode();

//        tmpDir = DirectionalLightPtr::dcast(_lights[i]);

        diff = (pNode->getVolume().getMax() -
                pNode->getVolume().getMin());

        Real32 sceneWidth = diff.length() * 0.5f;
        // Not final values. May get tweaked in the future

        Real32 sceneHeight = diff.length() * 0.5f;

        pNode->getVolume().getCenter(center);

        Vec3f lightdir = pDLight->getDirection();

        if(pLight->getBeacon() != NULL)
        {
            Matrix m = pLight->getBeacon()->getToWorld();

            m.mult(lightdir, lightdir);
        }

        MatrixLookAt(transMatrix,
                     center + lightdir,
                     center,
                     Vec3f(0,1,0));

        transMatrix.invert();

        Matrix proMatrix;

        proMatrix.setIdentity();

        MatrixOrthogonal( proMatrix,
                         -sceneWidth,   sceneWidth, -sceneHeight,
                          sceneHeight, -sceneWidth,  sceneWidth);


        pCam->setProjectionMatrix(proMatrix  );
        pCam->setModelviewMatrix (transMatrix);
    }
    else if(eType == Point)
    {
        PointLight *pPLight = dynamic_cast<PointLight *>(pLight);

        MatrixCameraUnrecPtr pCam =
            dynamic_cast<MatrixCamera *>(pEngineData->getCamera());

        if(pCam == NULL)
        {
            pCam = MatrixCamera::createLocal();

            pEngineData->setCamera(pCam);
        }

        Real32  angle;
        Vec3f   dist;
        Pnt3f   center;
        Vec3f   diff;

        Matrix  transMatrix;

        Node   *pNode = pAction->getActNode();


        pNode->getVolume().getCenter(center);

        Pnt3f lightpos = pPLight->getPosition();

        if(pLight->getBeacon() != NULL)
        {
            Matrix m = pLight->getBeacon()->getToWorld();

            m.mult(lightpos, lightpos);
        }


        MatrixLookAt(transMatrix,
                     lightpos,
                     center,
                     Vec3f(0,1,0));

        transMatrix.invert();


        diff = (pNode->getVolume().getMax() -
                pNode->getVolume().getMin());

        dist  = lightpos - center;

        angle = atan((diff.length() * 0.5) / dist.length());

        Matrix proMatrix;

        proMatrix.setIdentity();

        MatrixPerspective( proMatrix,
                           2.f * angle,
                           1,
                           pAction->getActivePartition()->getNear(),
                           pAction->getActivePartition()->getFar ());


        pCam->setProjectionMatrix(proMatrix  );
        pCam->setModelviewMatrix (transMatrix);
    }
}
Esempio n. 9
0
/*! Get the current transformation matrix.
*/
Matrix &FlyNavigator::getMatrix()
{
    MatrixLookAt(_tMatrix,_rFrom,_rAt,_vUp);
    return _tMatrix;
}