Пример #1
0
void dPluginCamera::SetPerspectiveMatrix(dSceneRender* const render, int width, int height)
{
	// set the perspective matrix
	render->SetPerspectiveProjection(width, height, m_fov * 180.0f / 3.1416f, m_frontPlane, m_backPlane);

	// calculate the same gluLookAt matrix
	dMatrix modelViewMatrix(dGetIdentityMatrix());
	modelViewMatrix[2] = m_matrix.m_front.Scale (-1.0f);
	modelViewMatrix[0] = m_matrix.m_up.CrossProduct(modelViewMatrix[2]);
	modelViewMatrix[1] = modelViewMatrix[2].CrossProduct(modelViewMatrix[0]);
	modelViewMatrix[3] = m_matrix.m_posit;
	modelViewMatrix = modelViewMatrix.Inverse();

	// apply scale, zoom and pan 
	dMatrix zoomMatrix(dGetIdentityMatrix());
	zoomMatrix[0][0] = D_PERSPECTIVE_PIXEL_TO_METERS_SCALE * m_zoom;
	zoomMatrix[1][1] = D_PERSPECTIVE_PIXEL_TO_METERS_SCALE * m_zoom;
	zoomMatrix[2][2] = D_PERSPECTIVE_PIXEL_TO_METERS_SCALE * m_zoom;

	dMatrix panMatrix (dGetIdentityMatrix());
	// use a pan sensitivity 0f 0.25f
	dFloat panSensitivity = D_PANNING_SENSITIVITY;
	panMatrix.m_posit = dVector(m_panX * panSensitivity, m_panY * panSensitivity, 0.0f, 1.0f);

	dMatrix matrix (zoomMatrix * modelViewMatrix * panMatrix);
	render->SetModelViewMatrix(matrix);
}
Пример #2
0
/**
 * returns row-wise, to use with opengl
 */
void CoordinatesManipulator::getMatrix( float* matrix ) {
	for( int i=0; i<4; i++ ){
		for( int j=0; j<4; j++ ) {
			matrix[ j*4+i ] = modelViewMatrix(i,j);
		}
	}
}
Пример #3
0
// FIXME
// set matrix mode
void CoordinatesManipulator::setModelViewMatrix() {
	float temp[16];
	for( int i=0; i<4; i++ ){
		for( int j=0; j<4; j++ ) {
			temp[ i*4 + j ] = modelViewMatrix(i,j);
		}
	}
	glLoadMatrixf( temp );
}
Пример #4
0
void Torch::draw2Self()
{
	if( world()->landscape()->drawingReflection() || world()->landscape()->drawingRefraction() )
		return;

	const unsigned char samplingPoints = 16;
	unsigned char visiblePoints;

	glPushMatrix();
	glTranslate( mFlarePosition );
	glScale( 0.3f );
	visiblePoints = mOcclusionTest.randomPointsOnUnitSphereVisible( samplingPoints );
	glPopMatrix();
	if( !visiblePoints )
		return;

	glPushAttrib( GL_VIEWPORT_BIT | GL_DEPTH_BUFFER_BIT | GL_CURRENT_BIT );
	glDepthMask( GL_FALSE );
	glDisable( GL_CULL_FACE );
	glDisable( GL_DEPTH_TEST );

	glDisable( GL_LIGHTING );
	glEnable( GL_BLEND );
	glBlendFunc( GL_SRC_ALPHA, GL_ONE );
	mMaterial->bind();
	glColor( ((float)visiblePoints/(float)samplingPoints)*mColor );

	sQuadVertexBuffer.bind();
	glClientActiveTexture( GL_TEXTURE0 );
	glEnableClientState( GL_VERTEX_ARRAY );
	glEnableClientState( GL_TEXTURE_COORD_ARRAY );

	glVertexPointer( 3, GL_FLOAT, 5*sizeof(GLfloat), (void*)0 );
	glTexCoordPointer( 2, GL_FLOAT, 5*sizeof(GLfloat), (void*)(3*sizeof(GLfloat)) );

	QMatrix4x4 flareCenter = modelViewMatrix();
	flareCenter.translate( mFlarePosition );
	Bilboard::begin( flareCenter );
	glScale( mFlareSize );
	glRotate( mFlareRotation, QVector3D(0,0,1) );
	glDrawArrays( GL_QUADS, 0, 4 );
	glRotate( -mFlareRotation*2.7f, QVector3D(0,0,1) );
	glDrawArrays( GL_QUADS, 0, 4 );
	Bilboard::end();

	glDisableClientState( GL_TEXTURE_COORD_ARRAY );
	glDisableClientState( GL_VERTEX_ARRAY );
	sQuadVertexBuffer.release();

	mMaterial->release();
	glDisable( GL_BLEND );

	glPopAttrib();
}
Пример #5
0
void SimpleGMap3::cb_keyPress(int code)
{
    switch(code)
    {
        case 'e':
        {
            time_t rawtime;
            struct tm * timeinfo;
            char buffer[80];

            time (&rawtime);
            timeinfo = localtime (&rawtime);

            strftime (buffer,80,".%F.%H:%M:%S",timeinfo);

            std::string filename = std::string("topo_screenshot") + std::string(buffer) + std::string(".svg");
            m_render_topo->svgout2D(filename, modelViewMatrix(), projectionMatrix());
            break;
        }
    }
}
Пример #6
0
void CoordinatesManipulator::getPosition( float* position ) {
	position[0] = modelViewMatrix(0,3);
	position[1] = modelViewMatrix(1,3);
	position[2] = modelViewMatrix(2,3);
}
Пример #7
0
void
UsdMayaGLBatchRenderer::_RenderBatches( 
    const MHWRender::MDrawContext* vp2Context,
    const MMatrix& viewMat,
    const MMatrix& projectionMat,
    const GfVec4d& viewport )
{
    if( _renderQueue.empty() )
        return;
    
    if( !_populateQueue.empty() )
    {
        TF_DEBUG(PXRUSDMAYAGL_QUEUE_INFO).Msg(
            "____________ POPULATE STAGE START ______________ (%zu)\n",_populateQueue.size());

        std::vector<UsdImagingDelegate*> delegates;
        UsdPrimVector rootPrims;
        std::vector<SdfPathVector> excludedPrimPaths;
        std::vector<SdfPathVector> invisedPrimPaths;
        
        for( ShapeRenderer *shapeRenderer : _populateQueue )
        {
            delegates.push_back(shapeRenderer->_delegate.get());
            rootPrims.push_back(shapeRenderer->_rootPrim);
            excludedPrimPaths.push_back(shapeRenderer->_excludedPaths);
            invisedPrimPaths.push_back(SdfPathVector());
            
            shapeRenderer->_isPopulated = true;
        }
        
        UsdImagingDelegate::Populate( delegates,
                                      rootPrims,
                                      excludedPrimPaths,
                                      invisedPrimPaths );
        
        _populateQueue.clear();

        TF_DEBUG(PXRUSDMAYAGL_QUEUE_INFO).Msg(
            "^^^^^^^^^^^^ POPULATE STAGE FINISH ^^^^^^^^^^^^^ (%zu)\n",_populateQueue.size());
    }
    
    TF_DEBUG(PXRUSDMAYAGL_QUEUE_INFO).Msg(
        "____________ RENDER STAGE START ______________ (%zu)\n",_renderQueue.size());

    // A new display refresh signifies that the cached selection data is no
    // longer valid.
    _selectQueue.clear();
    _selectResults.clear();

    // We've already populated with all the selection info we need.  We Reset
    // and the first call to GetSoftSelectHelper in the next render pass will
    // re-populate it.
    _softSelectHelper.Reset();
    
    GfMatrix4d modelViewMatrix(viewMat.matrix);
    GfMatrix4d projectionMatrix(projectionMat.matrix);

    _taskDelegate->SetCameraState(modelViewMatrix, projectionMatrix, viewport);

    // save the current GL states which hydra may reset to default
    glPushAttrib(GL_LIGHTING_BIT |
                 GL_ENABLE_BIT |
                 GL_POLYGON_BIT |
                 GL_DEPTH_BUFFER_BIT |
                 GL_VIEWPORT_BIT);
    
    // hydra orients all geometry during topological processing so that
    // front faces have ccw winding. We disable culling because culling
    // is handled by fragment shader discard.
    glFrontFace(GL_CCW); // < State is pushed via GL_POLYGON_BIT
    glDisable(GL_CULL_FACE);

    // note: to get benefit of alpha-to-coverage, the target framebuffer
    // has to be a MSAA buffer.
    glDisable(GL_BLEND);
    glEnable(GL_SAMPLE_ALPHA_TO_COVERAGE);

    if (vp2Context) {
        _taskDelegate->SetLightingStateFromMayaDrawContext(*vp2Context);
    } else {
        _taskDelegate->SetLightingStateFromVP1(viewMat);
    }
    
    // The legacy viewport does not support color management,
    // so we roll our own gamma correction by GL means (only in
    // non-highlight mode)
    bool gammaCorrect = !vp2Context;

    if( gammaCorrect )
        glEnable(GL_FRAMEBUFFER_SRGB_EXT);

    glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);

    // render task setup
    HdTaskSharedPtrVector tasks = _taskDelegate->GetSetupTasks(); // lighting etc

    for( const auto &renderSetIter : _renderQueue )
    {
        size_t hash = renderSetIter.first;
        const RenderParams &params = renderSetIter.second.first;
        const _SdfPathSet &renderPaths = renderSetIter.second.second;

        TF_DEBUG(PXRUSDMAYAGL_QUEUE_INFO).Msg(
                "*** renderQueue, batch %zx, size %zu\n",
                renderSetIter.first, renderPaths.size() );

        SdfPathVector roots(renderPaths.begin(), renderPaths.end());
        tasks.push_back(_taskDelegate->GetRenderTask(hash, params, roots));
    }

    _hdEngine.Execute(*_renderIndex, tasks);
    
    if( gammaCorrect )
        glDisable(GL_FRAMEBUFFER_SRGB_EXT);

    glPopAttrib(); // GL_LIGHTING_BIT | GL_ENABLE_BIT | GL_POLYGON_BIT
    
    // Selection is based on what we have last rendered to the display. The
    // selection queue is cleared above, so this has the effect of resetting
    // the render queue and prepping the selection queue without any
    // significant memory hit.
    _renderQueue.swap( _selectQueue );
    
    TF_DEBUG(PXRUSDMAYAGL_QUEUE_INFO).Msg(
        "^^^^^^^^^^^^ RENDER STAGE FINISH ^^^^^^^^^^^^^ (%zu)\n",_renderQueue.size());
}
Пример #8
0
// Render Scene Struct
void renderScene(IplImage** images, CvMat** matches, int* bestMatchedIndex, 
		 IplImage* viewport, struct pData* poses)
{
  if (myScene->currentIndex != myScene->previousIndex) {
    //printf("CURRENT POSE: \n");
    //printPose(myScene->pose);
  }

  int i, j, k;
  int image_count = myScene->max_image;
  int baseIndex = closestImageIndex(poses);
  CvMat* transform = modelViewMatrix(baseIndex, poses);  

  // Translation?
  cvmSet(transform, 0, 2, -1*(myScene->pose.center.val[1] - poses[baseIndex].center.val[1]));
  cvmSet(transform, 1, 2, 1*(myScene->pose.center.val[2] - poses[baseIndex].center.val[2]));

  // Rotation?
  CvScalar diff = cvScalar(myScene->pose.center.val[0] - myScene->pose.eye.val[0], 
			   myScene->pose.center.val[1] - myScene->pose.eye.val[1], 
			   myScene->pose.center.val[2] - myScene->pose.eye.val[2], 0.0);
  //printf("diff is: [%.2lf  %.2lf  %.2lf  %.2lf]\n", diff.val[0], diff.val[1], diff.val[2], diff.val[3]);

  double radius = norm(diff);

  double angle1 = acos(diff.val[0] / radius) - PI;
  double angle2 = asin(diff.val[1] / radius);

  //printf("angle1: %.2lf\n", angle1);
  //printf("angle2: %.2lf\n", angle2);

  CvMat* zRotation = cvCreateMat(3, 3, CV_64F);
  makeZAxisRotation(zRotation, (angle1+angle2) / 2);
  //cvmSet(zRotation, 0, 2, 200*angle1);
  //cvmSet(zRotation, 1, 2, 200*angle1);
  cvMatMulAdd(zRotation, transform, 0, transform);
  cvReleaseMat(&zRotation);


  // Zoom?
  double zoom = radius;
  CvMat* zoomTransform = create3DIdentity();
  cvmSet(zoomTransform, 0, 0, zoom);
  cvmSet(zoomTransform, 1, 1, zoom);
  cvMatMulAdd(zoomTransform, transform, 0, transform);
  cvReleaseMat(&zoomTransform);

  for (k = 0; k < MATCH_RANGE; k++) {
    i = (baseIndex + k) % image_count;
    if (i < 0) i += image_count;
    //printf("displaying image %d\n", i);

    if (i == baseIndex) {
      cvWarpPerspective(images[i], viewport, transform, CV_INTER_LINEAR, cvScalarAll(0));
      continue;
    }

    //mosaic other images
    mosaic(i, images, matches, bestMatchedIndex, viewport, transform);
  }
  cvReleaseMat(&transform);

}
Пример #9
0
void Camera::modelViewProjectionMatrix(Matrix3D *modelMtx, float *outMtx) {
	float lookatMtx[16];
	modelViewMatrix(modelMtx, lookatMtx);
	mtxMultiply(outMtx, projection.matrix, lookatMtx);
}