示例#1
0
文件: plane.cpp 项目: gpodevijn/nestk
void orthogonal_basis(Vec3f& v1, Vec3f& v2, const Vec3f& v0)
{
  Eigen::Vector3d ev0 = toEigenVector3d(v0);
  Eigen::Vector3d eorth1 = ev0.unitOrthogonal();
  v1 = toVec3f(eorth1);
  v2 = v0.cross(v1);
  normalize(v1);
  normalize(v2);
}
	Bone::Bone( const Vector4 &position, const _NUI_SKELETON_BONE_ORIENTATION &bone )
	{
		mAbsRotQuat	= toQuatf( bone.absoluteRotation.rotationQuaternion );
		mAbsRotMat	= toMatrix44f( bone.absoluteRotation.rotationMatrix );
		mJointEnd	= bone.endJoint;
		mJointStart	= bone.startJoint;
		mPosition	= toVec3f( position );
		mRotQuat	= toQuatf( bone.hierarchicalRotation.rotationQuaternion );
		mRotMat		= toMatrix44f( bone.hierarchicalRotation.rotationMatrix );
	}
void Listener::onFrame( const Leap::Controller& controller ) 
{
	lock_guard<mutex> lock( *mMutex );
	if ( !mNewFrame ) {
		const Leap::Frame& controllerFrame	= controller.frame();
		const Leap::HandList& hands			= controllerFrame.hands();
		
		HandMap handMap;
		for ( const Leap::Hand& hand : hands ) {
			FingerMap fingerMap;
			ToolMap toolMap;
			const Leap::PointableList& pointables = hand.pointables();
			for ( const Leap::Pointable& pt : pointables ) {
				if ( pt.isValid() ) {
					Pointable pointable( pt );
					if ( pt.isFinger() ) {
						fingerMap[ pt.id() ] = Finger( pointable );
					} else if ( pt.isTool() ) {
						toolMap[ pt.id() ] = Tool( pointable );
					}
				}
			}
			
			float rotAngle			= (float)hand.rotationAngle( mFirstFrame.mFrame );
			Vec3f rotAxis			= toVec3f( hand.rotationAxis( mFirstFrame.mFrame ) );
			Matrix44f rotMatrix		= toMatrix44f( hand.rotationMatrix( mFirstFrame.mFrame ) );
			float scale				= (float)hand.scaleFactor( mFirstFrame.mFrame );
			Vec3f translation		= toVec3f( hand.translation( mFirstFrame.mFrame ) );
			
			handMap[ hand.id() ]	= Hand( hand, fingerMap, toolMap, rotAngle, rotAxis,
										   rotMatrix, scale, translation );
		}

		mFrame		= Frame( controllerFrame, handMap );
		if ( !mFirstFrameReceived ) {
			mFirstFrame			= Frame( controllerFrame, handMap );
			mFirstFrameReceived	= true;
		}
		mNewFrame	= true;
	}
}
bool Screen::intersects( const Pointable& pointable, Vec3f& result, bool normalize,
						float clampRatio ) const
{
	Leap::Vector v	= mScreen.intersect( pointable.mPointable, normalize, clampRatio );
	if ( v.x != v.x ||
		v.y != v.y ||
		v.z != v.z ) { // NaN
		return false;
	}
	result			= toVec3f( v );
	return true;
}
示例#5
0
Vec3f mapDepthCoordToBody( const Vec2i& v, uint16_t depth, ICoordinateMapper* mapper )
{
    DepthSpacePoint depthSpacePoint;
    depthSpacePoint.X = (float)v.x;
    depthSpacePoint.Y = (float)v.y;

    CameraSpacePoint cameraSpacePoint;
    long hr = mapper->MapDepthPointToCameraSpace( depthSpacePoint, depth, &cameraSpacePoint );
    if ( SUCCEEDED( hr ) ) {
        return Vec3f( toVec3f( cameraSpacePoint ) );
    }
    return Vec3f();
}
Vec3f Pointable::getDirection() const
{
	return toVec3f( mPointable.direction() );
}
示例#7
0
vector<VRCollision> VRPhysics::getCollisions() {
    Lock lock(mtx());
    vector<VRCollision> res;
    if (!physicalized) return res;
    if (!ghost) {
        int numManifolds = world->getDispatcher()->getNumManifolds();
        for (int i=0;i<numManifolds;i++) {
            btPersistentManifold* contactManifold =  world->getDispatcher()->getManifoldByIndexInternal(i);
            //btCollisionObject* obA = (btCollisionObject*)(contactManifold->getBody0());
            //btCollisionObject* obB = (btCollisionObject*)(contactManifold->getBody1());

            int numContacts = contactManifold->getNumContacts();
            for (int j=0;j<numContacts;j++) {
                btManifoldPoint& pt = contactManifold->getContactPoint(j);
                if (pt.getDistance()<0.f) {
                    VRCollision c;
                    c.obj1 = vr_obj;
                    // c.obj2 = // TODO
                    c.pos1 = toVec3f( pt.getPositionWorldOnA() );
                    c.pos2 = toVec3f( pt.getPositionWorldOnB() );
                    c.norm = toVec3f( pt.m_normalWorldOnB );
                    c.distance = pt.getDistance();
                    res.push_back(c);
                }
            }
        }
        return res;
    }

    // --------- ghost object --------------
    btManifoldArray   manifoldArray;
    btBroadphasePairArray& pairArray = ghost_body->getOverlappingPairCache()->getOverlappingPairArray();
    int numPairs = pairArray.size();

    for (int i=0;i<numPairs;i++) {
        manifoldArray.clear();

        const btBroadphasePair& pair = pairArray[i];

        //unless we manually perform collision detection on this pair, the contacts are in the dynamics world paircache:
        btBroadphasePair* collisionPair = world->getPairCache()->findPair(pair.m_pProxy0,pair.m_pProxy1);
        if (!collisionPair)
            continue;

        if (collisionPair->m_algorithm)
            collisionPair->m_algorithm->getAllContactManifolds(manifoldArray);

        for (int j=0;j<manifoldArray.size();j++) {
            btPersistentManifold* manifold = manifoldArray[j];
            btScalar directionSign = manifold->getBody0() == ghost_body ? btScalar(-1.0) : btScalar(1.0);
            for (int p=0;p<manifold->getNumContacts();p++) {
                const btManifoldPoint&pt = manifold->getContactPoint(p);
                if (pt.getDistance()<0.f) {
                    VRCollision c;
                    c.pos1 = toVec3f( pt.getPositionWorldOnA() );
                    c.pos2 = toVec3f( pt.getPositionWorldOnB() );
                    c.norm = toVec3f( pt.m_normalWorldOnB*directionSign );
                    c.distance = pt.getDistance();
                    res.push_back(c);
                }
            }
        }
    }

    return res;
}
示例#8
0
void writeBobjFile(const std::string& name, Mesh* mesh)
{
    debMsg( "writing mesh file " << name ,1);
#	if NO_ZLIB!=1
    const Real  dx = mesh->getParent()->getDx();
    const Vec3i gs = mesh->getParent()->getGridSize();

    gzFile gzf = gzopen(name.c_str(), "wb1"); // do some compression
    if (!gzf)
        errMsg("writeBobj: unable to open file");

    // write vertices
    int numVerts = mesh->numNodes();
    gzwrite(gzf, &numVerts, sizeof(int));
    for (int i=0; i<numVerts; i++)
    {
        Vector3D<float> pos = toVec3f(mesh->nodes(i).pos);
        // normalize to unit cube around 0
        pos -= toVec3f(gs)*0.5;
        pos *= dx;
        gzwrite(gzf, &pos.value[0], sizeof(float)*3);
    }

    // normals
    mesh->computeVertexNormals();
    gzwrite(gzf, &numVerts, sizeof(int));
    for (int i=0; i<numVerts; i++)
    {
        Vector3D<float> pos = toVec3f(mesh->nodes(i).normal);
        gzwrite(gzf, &pos.value[0], sizeof(float)*3);
    }

    // write tris
    int numTris = mesh->numTris();
    gzwrite(gzf, &numTris, sizeof(int));
    for(int t=0; t<numTris; t++)
    {
        for(int j=0; j<3; j++)
        {
            int trip = mesh->tris(t).c[j];
            gzwrite(gzf, &trip, sizeof(int));
        }
    }

    // per vertex smoke densities
    if (mesh->getType() == Mesh::TypeVortexSheet)
    {
        VortexSheetMesh* vmesh = (VortexSheetMesh*) mesh;
        int densId[4] = {0, 'v','d','e'};
        gzwrite(gzf, &densId[0], sizeof(int) * 4);

        // compute densities
        std::vector<float> triDensity(numTris);
        for (int tri=0; tri < numTris; tri++)
        {
            Real area = vmesh->getFaceArea(tri);
            if (area>0)
                triDensity[tri] = vmesh->sheet(tri).smokeAmount;
        }

        // project triangle data to vertex
        std::vector<int>   triPerVertex(numVerts);
        std::vector<float> density(numVerts);
        for (int tri=0; tri < numTris; tri++)
        {
            for (int c=0; c<3; c++)
            {
                int vertex = mesh->tris(tri).c[c];
                density[vertex] += triDensity[tri];
                triPerVertex[vertex]++;
            }
        }

        // averaged smoke densities
        for(int point=0; point<numVerts; point++)
        {
            float dens = 0;
            if (triPerVertex[point]>0)
                dens = density[point] / triPerVertex[point];
            gzwrite(gzf, &dens, sizeof(float));
        }
    }

    // vertex flags
    if (mesh->getType() == Mesh::TypeVortexSheet)
    {
        int Id[4] = {0, 'v','x','f'};
        gzwrite(gzf, &Id[0], sizeof(int) * 4);

        // averaged smoke densities
        for(int point=0; point<numVerts; point++)
        {
            float alpha = (mesh->nodes(point).flags & Mesh::NfMarked) ? 1: 0;
            gzwrite(gzf, &alpha, sizeof(float));
        }
    }

    gzclose( gzf );
#	else
    debMsg( "file format not supported without zlib" ,1);
#	endif
}
Vec3f Hand::getVelocity() const
{
	return toVec3f( mHand.palmVelocity() );
}
示例#10
0
Vec3f Hand::getTranslation( const Frame& frame ) const
{
	return toVec3f( mHand.translation( frame.mFrame ) );
}
示例#11
0
Vec3f Hand::getSpherePosition() const
{
	return toVec3f( mHand.sphereCenter() );
}
示例#12
0
Vec3f Hand::getRotationAxis( const Frame& frame ) const
{
	return toVec3f( mHand.rotationAxis( frame.mFrame ) );
}
示例#13
0
Vec3f Hand::getPosition() const
{
	return toVec3f( mHand.palmPosition() );
}
示例#14
0
Vec3f Hand::getNormal() const
{
	return toVec3f( mHand.palmNormal() );
}
示例#15
0
Vec3f Hand::getDirection() const
{
	return toVec3f( mHand.direction() );
}
示例#16
0
Vec3f Pointable::getPosition() const
{
return toVec3f( mPointable.tipPosition() );
}
示例#17
0
Vec3f Pointable::getVelocity() const
{
	return toVec3f( mPointable.tipVelocity() );
}
示例#18
0
Vec3f Screen::getBottomLeft() const
{
	return toVec3f( mScreen.bottomLeftCorner() );
}
示例#19
0
void Device::update()
{
    if ( mFrameReader == 0 ) {
        return;
    }

    IAudioBeamFrame* audioFrame								= 0;
    IBodyFrame* bodyFrame									= 0;
    IBodyIndexFrame* bodyIndexFrame							= 0;
    IColorFrame* colorFrame									= 0;
    IDepthFrame* depthFrame									= 0;
    IMultiSourceFrame* frame								= 0;
    IInfraredFrame* infraredFrame							= 0;
    ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;

    HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

    if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) {
        // TODO audio
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
        IBodyFrameReference* frameRef = 0;
        hr = frame->get_BodyFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
        IBodyIndexFrameReference* frameRef = 0;
        hr = frame->get_BodyIndexFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyIndexFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
        IColorFrameReference* frameRef = 0;
        hr = frame->get_ColorFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &colorFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
        IDepthFrameReference* frameRef = 0;
        hr = frame->get_DepthFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &depthFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
        IInfraredFrameReference* frameRef = 0;
        hr = frame->get_InfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
        ILongExposureInfraredFrameReference* frameRef = 0;
        hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) ) {
        long long timeStamp										= 0L;

        // TODO audio

        std::vector<Body> bodies;
        int64_t bodyTime										= 0L;
        IBody* kinectBodies[ BODY_COUNT ]						= { 0 };
        Vec4f floorClipPlane									= Vec4f::zero();

        Channel8u bodyIndexChannel;
        IFrameDescription* bodyIndexFrameDescription			= 0;
        int32_t bodyIndexWidth									= 0;
        int32_t bodyIndexHeight									= 0;
        uint32_t bodyIndexBufferSize							= 0;
        uint8_t* bodyIndexBuffer								= 0;
        int64_t bodyIndexTime									= 0L;

        Surface8u colorSurface;
        IFrameDescription* colorFrameDescription				= 0;
        int32_t colorWidth										= 0;
        int32_t colorHeight										= 0;
        ColorImageFormat colorImageFormat						= ColorImageFormat_None;
        uint32_t colorBufferSize								= 0;
        uint8_t* colorBuffer									= 0;

        Channel16u depthChannel;
        IFrameDescription* depthFrameDescription				= 0;
        int32_t depthWidth										= 0;
        int32_t depthHeight										= 0;
        uint16_t depthMinReliableDistance						= 0;
        uint16_t depthMaxReliableDistance						= 0;
        uint32_t depthBufferSize								= 0;
        uint16_t* depthBuffer									= 0;

        Channel16u infraredChannel;
        IFrameDescription* infraredFrameDescription				= 0;
        int32_t infraredWidth									= 0;
        int32_t infraredHeight									= 0;
        uint32_t infraredBufferSize								= 0;
        uint16_t* infraredBuffer								= 0;

        Channel16u infraredLongExposureChannel;
        IFrameDescription* infraredLongExposureFrameDescription	= 0;
        int32_t infraredLongExposureWidth						= 0;
        int32_t infraredLongExposureHeight						= 0;
        uint32_t infraredLongExposureBufferSize					= 0;
        uint16_t* infraredLongExposureBuffer					= 0;

        hr = depthFrame->get_RelativeTime( &timeStamp );

        // TODO audio
        if ( mDeviceOptions.isAudioEnabled() ) {

        }

        if ( mDeviceOptions.isBodyEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->get_RelativeTime( &bodyTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies );
            }
            if ( SUCCEEDED( hr ) ) {
                Vector4 v;
                hr = bodyFrame->get_FloorClipPlane( &v );
                floorClipPlane = toVec4f( v );
            }
            if ( SUCCEEDED( hr ) ) {
                for ( uint8_t i = 0; i < BODY_COUNT; ++i ) {
                    IBody* kinectBody = kinectBodies[ i ];
                    if ( kinectBody != 0 ) {
                        uint8_t isTracked	= false;
                        hr					= kinectBody->get_IsTracked( &isTracked );
                        if ( SUCCEEDED( hr ) && isTracked ) {
                            Joint joints[ JointType_Count ];
                            kinectBody->GetJoints( JointType_Count, joints );

                            JointOrientation jointOrientations[ JointType_Count ];
                            kinectBody->GetJointOrientations( JointType_Count, jointOrientations );

                            uint64_t id = 0;
                            kinectBody->get_TrackingId( &id );

                            std::map<JointType, Body::Joint> jointMap;
                            for ( int32_t j = 0; j < JointType_Count; ++j ) {
                                Body::Joint joint(
                                    toVec3f( joints[ j ].Position ),
                                    toQuatf( jointOrientations[ j ].Orientation ),
                                    joints[ j ].TrackingState
                                );
                                jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) );
                            }
                            Body body( id, i, jointMap );
                            bodies.push_back( body );
                        }
                    }
                }
            }
        }

        if ( mDeviceOptions.isBodyIndexEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight );
                memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) );
            }
        }

        if ( mDeviceOptions.isColorEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_FrameDescription( &colorFrameDescription );
                if ( SUCCEEDED( hr ) ) {
                    float vFov = 0.0f;
                    float hFov = 0.0f;
                    float dFov = 0.0f;
                    colorFrameDescription->get_VerticalFieldOfView( &vFov );
                    colorFrameDescription->get_HorizontalFieldOfView( &hFov );
                    colorFrameDescription->get_DiagonalFieldOfView( &dFov );
                }
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Width( &colorWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Height( &colorHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_RawColorImageFormat( &colorImageFormat );
            }
            if ( SUCCEEDED( hr ) ) {
                colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
                colorBuffer		= new uint8_t[ colorBufferSize ];
                hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );

                if ( SUCCEEDED( hr ) ) {
                    colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA );
                    memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 );
                }

                delete [] colorBuffer;
                colorBuffer = 0;
            }
        }

        if ( mDeviceOptions.isDepthEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_FrameDescription( &depthFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Width( &depthWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Height( &depthHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                depthChannel = Channel16u( depthWidth, depthHeight );
                memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Width( &infraredWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Height( &infraredHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredChannel = Channel16u( infraredWidth, infraredHeight );
                memcpy( infraredChannel.getData(), infraredBuffer,  infraredWidth * infraredHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
                memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) );
            }
        }

        if ( SUCCEEDED( hr ) ) {
            mFrame.mBodies						= bodies;
            mFrame.mChannelBodyIndex			= bodyIndexChannel;
            mFrame.mChannelDepth				= depthChannel;
            mFrame.mChannelInfrared				= infraredChannel;
            mFrame.mChannelInfraredLongExposure	= infraredLongExposureChannel;
            mFrame.mDeviceId					= mDeviceOptions.getDeviceId();
            mFrame.mSurfaceColor				= colorSurface;
            mFrame.mTimeStamp					= timeStamp;
            mFrame.mFloorClipPlane				= floorClipPlane;
        }

        if ( bodyIndexFrameDescription != 0 ) {
            bodyIndexFrameDescription->Release();
            bodyIndexFrameDescription = 0;
        }
        if ( colorFrameDescription != 0 ) {
            colorFrameDescription->Release();
            colorFrameDescription = 0;
        }
        if ( depthFrameDescription != 0 ) {
            depthFrameDescription->Release();
            depthFrameDescription = 0;
        }
        if ( infraredFrameDescription != 0 ) {
            infraredFrameDescription->Release();
            infraredFrameDescription = 0;
        }
        if ( infraredLongExposureFrameDescription != 0 ) {
            infraredLongExposureFrameDescription->Release();
            infraredLongExposureFrameDescription = 0;
        }
    }

    if ( audioFrame != 0 ) {
        audioFrame->Release();
        audioFrame = 0;
    }
    if ( bodyFrame != 0 ) {
        bodyFrame->Release();
        bodyFrame = 0;
    }
    if ( bodyIndexFrame != 0 ) {
        bodyIndexFrame->Release();
        bodyIndexFrame = 0;
    }
    if ( colorFrame != 0 ) {
        colorFrame->Release();
        colorFrame = 0;
    }
    if ( depthFrame != 0 ) {
        depthFrame->Release();
        depthFrame = 0;
    }
    if ( frame != 0 ) {
        frame->Release();
        frame = 0;
    }
    if ( infraredFrame != 0 ) {
        infraredFrame->Release();
        infraredFrame = 0;
    }
    if ( infraredLongExposureFrame != 0 ) {
        infraredLongExposureFrame->Release();
        infraredLongExposureFrame = 0;
    }
}
示例#20
0
Vec3f Screen::getHorizontalAxis() const
{
	return toVec3f( mScreen.horizontalAxis() );
}
示例#21
0
void collideWithPolyTrees(Particles& particles, const Matrix4& txMx, const std::vector<Object>& objs, std::vector<ozcollide::AABBTreePoly*>& coltrees) {

    Particles::Positions pos = particles.pos_;
    Particles::Velocities& vel = particles.vel_;
    Particles::Velocities& dv = particles.dv_;
//    Particles::Accelerations& da = particles.da_;
    const unsigned size = pos.size();
    
    for (unsigned i=0;i<size;i++) {
        pos[i] = Matrix4AffineReal3(txMx, pos[i]);
    }
    
    for (unsigned i=0;i<size;i++) {
        Real3 pi = pos[i];
        Real3 pf = pi+dv[i];
        std::vector<ozcollide::AABBTreePoly::SegmentColResult> colres(coltrees.size()); 
        for (unsigned j=0;j<coltrees.size();j++) {
            coltrees[j]->collideWithSegment(toVec3f(pi),toVec3f(pf), colres[j]);
        }
        for (unsigned j=0;j<coltrees.size();j++) {
            real tmin = std::numeric_limits<float>::max();
            Real3 nmin(0.);
            const ozcollide::Polygon* pmin = 0;
            
            for (unsigned k=0;k<colres[j].polys_.size();k++) {
                const ozcollide::Polygon* poly= colres[j].polys_[k];
                const Real3& a = objs[j].vx_[poly->getIndex(0)];
                const Real3& b = objs[j].vx_[poly->getIndex(1)];
                const Real3& c = objs[j].vx_[poly->getIndex(2)];
                

//                const Real3& an = objs[j].vn_[poly->getIndex(0)];
//                const Real3& bn = objs[j].vn_[poly->getIndex(1)];
//                const Real3& cn = objs[j].vn_[poly->getIndex(2)];
               
                ozcollide::Plane plane;
                plane.fromPoints(toVec3f(a),toVec3f(b), toVec3f(c) );
                real t;
                if (plane.intersectWithLine(toVec3f(pi),toVec3f(pf),t)) {
                    if(t<tmin) {
                        tmin=t;
                        pmin = poly;
                        nmin = normalize( crossProd( b-a, c-a) );
                    }
                }
                // reaction
                if (pmin) {
//                    Real3 dvi = dv[i];
//                    Real3 x = pi+tmin*dvi;
//                    pos[i]=x+(1.-tmin)*(dvi.norm())*nmin; // separate from geometry
                    dv[i] = tmin*dv[i]+(1.-tmin)*dv[i].norm()*nmin;

                    // perfect bounce == 2 slip walls = 1
                    real k = 1.8;
                    vel[i]+=-k*dotProd(vel[i],nmin)*nmin; // bounce
                    vel[i]*=0.95;
                    // this gets inside geometry often
//                    pos[i]=x+(1.-tmin)*(dv[i].norm())*nmin;
//                    vel[i]=vel[i].norm()*nmin;
                    
//                    dv[i]=Real3(0.);
//                    da[i]= Real3(0.);

                }
            }
        }
    }
}
示例#22
0
Vec3f Screen::getNormal() const
{
	return toVec3f( mScreen.normal() );
}
示例#23
0
Vec3f Screen::getVerticalAxis() const
{
	return toVec3f( mScreen.verticalAxis() );
}
示例#24
0
OSG::Vec3f VRPhysics::getTorque() { Lock lock(mtx()); return toVec3f(body->getTotalTorque());}