Exemplo n.º 1
0
void KinectPlugin::updateBody() {
#ifndef HAVE_KINECT
    return;
#else
    if (!_bodyFrameReader) {
        return;
    }

    IBodyFrame* pBodyFrame = NULL;

    HRESULT hr = _bodyFrameReader->AcquireLatestFrame(&pBodyFrame);

    if (SUCCEEDED(hr)) {
        INT64 nTime = 0;
        hr = pBodyFrame->get_RelativeTime(&nTime);
        IBody* bodies[BODY_COUNT] = {0};
        if (SUCCEEDED(hr)) {
            hr = pBodyFrame->GetAndRefreshBodyData(_countof(bodies), bodies);
        }

        if (SUCCEEDED(hr)) {
            ProcessBody(nTime, BODY_COUNT, bodies);
        }

        for (int i = 0; i < _countof(bodies); ++i) {
            SafeRelease(bodies[i]);
        }
    }

    SafeRelease(pBodyFrame);
#endif
}
Exemplo n.º 2
0
/// <summary>
/// Main processing function
/// </summary>
void CBodyBasics::Update()
{
    if (!m_pBodyFrameReader)
    {
        return;
    }

    IBodyFrame* pBodyFrame = NULL;

    HRESULT hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);

    if (SUCCEEDED(hr))
    {
        INT64 nTime = 0;

        hr = pBodyFrame->get_RelativeTime(&nTime);

        IBody* ppBodies[BODY_COUNT] = {0};

        if (SUCCEEDED(hr))
        {
            hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
        }

        if (SUCCEEDED(hr))
        {
            ProcessBody(nTime, BODY_COUNT, ppBodies);
        }

        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }

    SafeRelease(pBodyFrame);

	/// Update Color Frame
	if (IsDrawColorBase)
	{
		UpdateColorBase();
	}

}
Exemplo n.º 3
0
void KinectV2Module::updateKinect()
{
#if JUCE_WINDOWS
	if (!m_pBodyFrameReader)
	{
		return;
	}

	IBodyFrame* pBodyFrame = NULL;

	HRESULT hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);

	if (SUCCEEDED(hr))
	{
		INT64 nTime = 0;
		hr = pBodyFrame->get_RelativeTime(&nTime);
		IBody* ppBodies[BODY_COUNT] = { 0 };


		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

		if (SUCCEEDED(hr))
		{
			processBody(BODY_COUNT, ppBodies);
		}

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);
		}
	}

	SafeRelease(pBodyFrame);

#endif
}
Exemplo n.º 4
0
void MSKinectService::pollBody()
{
	if ( bodyFrameReader == NULL )
	{
		return;
	}

	IBodyFrame* pBodyFrame = NULL;

	HRESULT hr = bodyFrameReader->AcquireLatestFrame(&pBodyFrame);

	if (SUCCEEDED(hr))
	{
		INT64 nTime = 0;

		hr = pBodyFrame->get_RelativeTime(&nTime);

		IBody* ppBodies[BODY_COUNT] = {0};

		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

		if (SUCCEEDED(hr))
		{
			ProcessBody(nTime, BODY_COUNT, ppBodies);
		}

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);
		}
	}

	SafeRelease(pBodyFrame);
}
Exemplo n.º 5
0
void testApp::update() {

	kinect.update();



	if (!m_pBodyFrameReader)
	{
		return;
	}

	IBodyFrame* pBodyFrame = NULL;

	HRESULT hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);
	
	if (SUCCEEDED(hr))
	{
		INT64 nTime = 0;

		hr = pBodyFrame->get_RelativeTime(&nTime);

		IBody* ppBodies[BODY_COUNT] = { 0 };

		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
		}

		if (SUCCEEDED(hr))
		{
			ProcessBody(nTime, BODY_COUNT, ppBodies);
		}

		for (int i = 0; i < _countof(ppBodies); ++i)
		{
			SafeRelease(ppBodies[i]);
		}
	}

	SafeRelease(pBodyFrame);


	for (int y = 0; y < vectorField.getHeight(); y++)
		for (int x = 0; x< vectorField.getWidth(); x++) {
			int index = vectorField.getPixelIndex(x, y);
			float angle = ofNoise(x / (float)vectorField.getWidth()*4.0, y / (float)vectorField.getHeight()*4.0, ofGetElapsedTimef()*0.05)*TWO_PI*2.0;
			ofVec2f dir(cos(angle), sin(angle));
			dir.normalize().scale(ofNoise(x / (float)vectorField.getWidth()*4.0, y / (float)vectorField.getHeight()*4.0, ofGetElapsedTimef()*0.05 + 10.0));
			vectorField.setColor(x, y, ofColor_<float>(dir.x, dir.y, 0));
		}


	for (int i = 0; i < BODY_COUNT; i++) {
		updateParticleSystem(&particleSystems[i], lastChestPositions[i], lastHandPositionLeft[i], lastHandPositionRight[i], leftHandStates[i], rightHandStates[i]);
	}

	//Check if a body left the system for a long period of time
	for (int i = 0; i < BODY_COUNT; i++) {
		if (lastKnownChestPosition[i] == lastChestPositions[i])
		{
			bodyFreezeIterationToRemoveCount[i]++;
		}
		else {
			bodyFreezeIterationToRemoveCount[i] = 0;
		}

		lastKnownChestPosition[i] = lastChestPositions[i];
		if (bodyFreezeIterationToRemoveCount[i] >= MAX_NUM_OF_ITERATIONS_TO_REMOVE_A_BODY ) {
			bodyFreezeIterationToRemoveCount[i] = 0;
			lastChestPositions[i].x = lastChestPositions[i].y = 0;
		}
	}
}
Exemplo n.º 6
0
void MyKinect2::Update()
{
    // récupération de l'image en 2D
    if (!m_pColorFrameReader)
    {
        return;
    }

    IColorFrame* pColorFrame = NULL;

    HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

    if (SUCCEEDED(hr))
    {
        INT64 nTime = 0;
        IFrameDescription* pFrameDescription = NULL;
        int nWidth = 0;
        int nHeight = 0;
        ColorImageFormat imageFormat = ColorImageFormat_None;
        UINT nBufferSize = 0;


        hr = pColorFrame->get_RelativeTime(&nTime);

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_FrameDescription(&pFrameDescription);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Width(&nWidth);
        }

        if (SUCCEEDED(hr))
        {
            hr = pFrameDescription->get_Height(&nHeight);
        }

        if (SUCCEEDED(hr))
        {
            hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
        }


        if (SUCCEEDED(hr) && (nWidth == cColorWidth) && (nHeight == cColorHeight))
        {
            if (imageFormat == ColorImageFormat_Bgra)
            {
                hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&webcam.data));
            }
            else if (m_pColorRGBX)
            {
                nBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
                hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(webcam.data), ColorImageFormat_Bgra);
            }
            else
            {
                hr = E_FAIL;
            }
        }
        SafeRelease(pFrameDescription);
    }

    // récupération du squelette
    if (!m_pBodyFrameReader)
    {
        return;
    }

    IBodyFrame* pBodyFrame = NULL;

    hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);

    if (SUCCEEDED(hr))
    {
        INT64 nTime = 0;

        hr = pBodyFrame->get_RelativeTime(&nTime);

        IBody* ppBodies[BODY_COUNT] = {0};

        if (SUCCEEDED(hr))
        {
            hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
        }

        if (SUCCEEDED(hr))
        {
            ProcessBody(BODY_COUNT, ppBodies); // BODY_COUNT est un define de Kinect.h égal à 6... peut etre mettre 1 plus tard afin déviter des problemes lors de la récupération des positions de joints
        }

        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }

    SafeRelease(pBodyFrame);
    SafeRelease(pColorFrame);
}
Exemplo n.º 7
0
bool BodyStream::readFrame(IMultiSourceFrame *multiFrame)
{
    bool readed = false;
    if (!m_StreamHandle.bodyFrameReader) {
        ofLogWarning("ofxKinect2::BodyStream") << "Stream is not open.";
        return readed;
    }

    IBodyFrame *bodyFrame = nullptr;

    HRESULT hr = E_FAIL;
    if (!multiFrame) {
        hr = m_StreamHandle.bodyFrameReader->AcquireLatestFrame(&bodyFrame);
    }
    else {
        IBodyFrameReference *bodyFrameReference = nullptr;
        hr = multiFrame->get_BodyFrameReference(&bodyFrameReference);

        if (SUCCEEDED(hr)) {
            hr = bodyFrameReference->AcquireFrame(&bodyFrame);
        }

        safeRelease(bodyFrameReference);
    }

    if (SUCCEEDED(hr)) {
        hr = bodyFrame->get_RelativeTime((INT64 *)&m_Frame.timestamp);

        IBody *ppBodies[BODY_COUNT] = {0};

        if (SUCCEEDED(hr)) {
            hr = bodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
        }

        if (lock()) {
            if (SUCCEEDED(hr)) {
                readed = true;

                // Clears the body list
                for (int b = 0 ; b < m_Bodies.size() ; b++) {
                    delete m_Bodies[b];
                }
                m_Bodies.clear();

                for (int i = 0; i < _countof(ppBodies); ++i) {
                    BOOLEAN isTracked = false;
                    if (ppBodies[i]) {
                        ppBodies[i]->get_IsTracked(&isTracked);
                        if (isTracked) {
                            UINT64 id = -1;
                            ppBodies[i]->get_TrackingId(&id);

                            // Add the tracked body to the list
                            Body *body = new Body();
                            body->setup(*m_Device, ppBodies[i]);
                            body->update();
                            m_Bodies.push_back(body);
                            // TODO: Use the body tracked id to re-use the Body objects
                            // TODO: Clarify the relationship between Body and IBody.
                        }
                    }
                }
            }
            unlock();
            //Sort the bodies from left to right on the X-axis. Player one is the left-most body.
            auto ascSort = [](Body * bodyOne, Body * bodyTwo) {
                bool isBigger = false;
                const CameraSpacePoint spineMidPosOne = bodyOne->getJoint(JointType_SpineMid).Position;
                const CameraSpacePoint spineMidPosTwo = bodyTwo->getJoint(JointType_SpineMid).Position;
                if (spineMidPosOne.X < spineMidPosTwo.X) {
                    isBigger = true;
                }
                else if (spineMidPosTwo.X < spineMidPosOne.X) {
                    isBigger = false;
                }
                return isBigger;
            };
            std::sort(m_Bodies.begin(), m_Bodies.end(), ascSort);
        }

        for (int i = 0; i < _countof(ppBodies); ++i) {
            safeRelease(ppBodies[i]);
        }
    }

    safeRelease(bodyFrame);

    return readed;
}
Exemplo n.º 8
0
void Device::update()
{
    if ( mFrameReader == 0 ) {
        return;
    }

    IAudioBeamFrame* audioFrame								= 0;
    IBodyFrame* bodyFrame									= 0;
    IBodyIndexFrame* bodyIndexFrame							= 0;
    IColorFrame* colorFrame									= 0;
    IDepthFrame* depthFrame									= 0;
    IMultiSourceFrame* frame								= 0;
    IInfraredFrame* infraredFrame							= 0;
    ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;

    HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

    if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) {
        // TODO audio
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
        IBodyFrameReference* frameRef = 0;
        hr = frame->get_BodyFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
        IBodyIndexFrameReference* frameRef = 0;
        hr = frame->get_BodyIndexFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyIndexFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
        IColorFrameReference* frameRef = 0;
        hr = frame->get_ColorFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &colorFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
        IDepthFrameReference* frameRef = 0;
        hr = frame->get_DepthFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &depthFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
        IInfraredFrameReference* frameRef = 0;
        hr = frame->get_InfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
        ILongExposureInfraredFrameReference* frameRef = 0;
        hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) ) {
        long long timeStamp										= 0L;

        // TODO audio

        std::vector<Body> bodies;
        int64_t bodyTime										= 0L;
        IBody* kinectBodies[ BODY_COUNT ]						= { 0 };
        Vec4f floorClipPlane									= Vec4f::zero();

        Channel8u bodyIndexChannel;
        IFrameDescription* bodyIndexFrameDescription			= 0;
        int32_t bodyIndexWidth									= 0;
        int32_t bodyIndexHeight									= 0;
        uint32_t bodyIndexBufferSize							= 0;
        uint8_t* bodyIndexBuffer								= 0;
        int64_t bodyIndexTime									= 0L;

        Surface8u colorSurface;
        IFrameDescription* colorFrameDescription				= 0;
        int32_t colorWidth										= 0;
        int32_t colorHeight										= 0;
        ColorImageFormat colorImageFormat						= ColorImageFormat_None;
        uint32_t colorBufferSize								= 0;
        uint8_t* colorBuffer									= 0;

        Channel16u depthChannel;
        IFrameDescription* depthFrameDescription				= 0;
        int32_t depthWidth										= 0;
        int32_t depthHeight										= 0;
        uint16_t depthMinReliableDistance						= 0;
        uint16_t depthMaxReliableDistance						= 0;
        uint32_t depthBufferSize								= 0;
        uint16_t* depthBuffer									= 0;

        Channel16u infraredChannel;
        IFrameDescription* infraredFrameDescription				= 0;
        int32_t infraredWidth									= 0;
        int32_t infraredHeight									= 0;
        uint32_t infraredBufferSize								= 0;
        uint16_t* infraredBuffer								= 0;

        Channel16u infraredLongExposureChannel;
        IFrameDescription* infraredLongExposureFrameDescription	= 0;
        int32_t infraredLongExposureWidth						= 0;
        int32_t infraredLongExposureHeight						= 0;
        uint32_t infraredLongExposureBufferSize					= 0;
        uint16_t* infraredLongExposureBuffer					= 0;

        hr = depthFrame->get_RelativeTime( &timeStamp );

        // TODO audio
        if ( mDeviceOptions.isAudioEnabled() ) {

        }

        if ( mDeviceOptions.isBodyEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->get_RelativeTime( &bodyTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies );
            }
            if ( SUCCEEDED( hr ) ) {
                Vector4 v;
                hr = bodyFrame->get_FloorClipPlane( &v );
                floorClipPlane = toVec4f( v );
            }
            if ( SUCCEEDED( hr ) ) {
                for ( uint8_t i = 0; i < BODY_COUNT; ++i ) {
                    IBody* kinectBody = kinectBodies[ i ];
                    if ( kinectBody != 0 ) {
                        uint8_t isTracked	= false;
                        hr					= kinectBody->get_IsTracked( &isTracked );
                        if ( SUCCEEDED( hr ) && isTracked ) {
                            Joint joints[ JointType_Count ];
                            kinectBody->GetJoints( JointType_Count, joints );

                            JointOrientation jointOrientations[ JointType_Count ];
                            kinectBody->GetJointOrientations( JointType_Count, jointOrientations );

                            uint64_t id = 0;
                            kinectBody->get_TrackingId( &id );

                            std::map<JointType, Body::Joint> jointMap;
                            for ( int32_t j = 0; j < JointType_Count; ++j ) {
                                Body::Joint joint(
                                    toVec3f( joints[ j ].Position ),
                                    toQuatf( jointOrientations[ j ].Orientation ),
                                    joints[ j ].TrackingState
                                );
                                jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) );
                            }
                            Body body( id, i, jointMap );
                            bodies.push_back( body );
                        }
                    }
                }
            }
        }

        if ( mDeviceOptions.isBodyIndexEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight );
                memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) );
            }
        }

        if ( mDeviceOptions.isColorEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_FrameDescription( &colorFrameDescription );
                if ( SUCCEEDED( hr ) ) {
                    float vFov = 0.0f;
                    float hFov = 0.0f;
                    float dFov = 0.0f;
                    colorFrameDescription->get_VerticalFieldOfView( &vFov );
                    colorFrameDescription->get_HorizontalFieldOfView( &hFov );
                    colorFrameDescription->get_DiagonalFieldOfView( &dFov );
                }
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Width( &colorWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Height( &colorHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_RawColorImageFormat( &colorImageFormat );
            }
            if ( SUCCEEDED( hr ) ) {
                colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
                colorBuffer		= new uint8_t[ colorBufferSize ];
                hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );

                if ( SUCCEEDED( hr ) ) {
                    colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA );
                    memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 );
                }

                delete [] colorBuffer;
                colorBuffer = 0;
            }
        }

        if ( mDeviceOptions.isDepthEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_FrameDescription( &depthFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Width( &depthWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Height( &depthHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                depthChannel = Channel16u( depthWidth, depthHeight );
                memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Width( &infraredWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Height( &infraredHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredChannel = Channel16u( infraredWidth, infraredHeight );
                memcpy( infraredChannel.getData(), infraredBuffer,  infraredWidth * infraredHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
                memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) );
            }
        }

        if ( SUCCEEDED( hr ) ) {
            mFrame.mBodies						= bodies;
            mFrame.mChannelBodyIndex			= bodyIndexChannel;
            mFrame.mChannelDepth				= depthChannel;
            mFrame.mChannelInfrared				= infraredChannel;
            mFrame.mChannelInfraredLongExposure	= infraredLongExposureChannel;
            mFrame.mDeviceId					= mDeviceOptions.getDeviceId();
            mFrame.mSurfaceColor				= colorSurface;
            mFrame.mTimeStamp					= timeStamp;
            mFrame.mFloorClipPlane				= floorClipPlane;
        }

        if ( bodyIndexFrameDescription != 0 ) {
            bodyIndexFrameDescription->Release();
            bodyIndexFrameDescription = 0;
        }
        if ( colorFrameDescription != 0 ) {
            colorFrameDescription->Release();
            colorFrameDescription = 0;
        }
        if ( depthFrameDescription != 0 ) {
            depthFrameDescription->Release();
            depthFrameDescription = 0;
        }
        if ( infraredFrameDescription != 0 ) {
            infraredFrameDescription->Release();
            infraredFrameDescription = 0;
        }
        if ( infraredLongExposureFrameDescription != 0 ) {
            infraredLongExposureFrameDescription->Release();
            infraredLongExposureFrameDescription = 0;
        }
    }

    if ( audioFrame != 0 ) {
        audioFrame->Release();
        audioFrame = 0;
    }
    if ( bodyFrame != 0 ) {
        bodyFrame->Release();
        bodyFrame = 0;
    }
    if ( bodyIndexFrame != 0 ) {
        bodyIndexFrame->Release();
        bodyIndexFrame = 0;
    }
    if ( colorFrame != 0 ) {
        colorFrame->Release();
        colorFrame = 0;
    }
    if ( depthFrame != 0 ) {
        depthFrame->Release();
        depthFrame = 0;
    }
    if ( frame != 0 ) {
        frame->Release();
        frame = 0;
    }
    if ( infraredFrame != 0 ) {
        infraredFrame->Release();
        infraredFrame = 0;
    }
    if ( infraredLongExposureFrame != 0 ) {
        infraredLongExposureFrame->Release();
        infraredLongExposureFrame = 0;
    }
}
Exemplo n.º 9
0
/// <summary>
/// Main processing function
/// </summary>
// この関数がループ処理される
void CColorBasics::Update()
{
	HRESULT hr = NULL;
	// (1) カラーフレーム(背景描画のみに使用)
	if (m_pColorFrameReader)
	{

		IColorFrame* pColorFrame = NULL;

		hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame);

		if (SUCCEEDED(hr))
		{
			INT64 nTime = 0;
			IFrameDescription* pFrameDescription = NULL;
			int nWidth = 0;
			int nHeight = 0;
			ColorImageFormat imageFormat = ColorImageFormat_None;
			UINT nBufferSize = 0;
			RGBQUAD *pBuffer = NULL;

			hr = pColorFrame->get_RelativeTime(&nTime);

			if (SUCCEEDED(hr))
			{
				hr = pColorFrame->get_FrameDescription(&pFrameDescription);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_Width(&nWidth);
			}

			if (SUCCEEDED(hr))
			{
				hr = pFrameDescription->get_Height(&nHeight);
			}

			if (SUCCEEDED(hr))
			{
				hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
			}

			if (SUCCEEDED(hr))
			{
				if (imageFormat == ColorImageFormat_Bgra)
				{
					hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&pBuffer));
				}
				else if (m_pColorRGBX)
				{
					pBuffer = m_pColorRGBX;
					nBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD);
					hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pBuffer), ColorImageFormat_Bgra);
				}
				else
				{
					hr = E_FAIL;
				}
			}

			if (SUCCEEDED(hr))
			{
				ProcessColor(nTime, pBuffer, nWidth, nHeight);
			}
			SafeRelease(pFrameDescription);
		}
		SafeRelease(pColorFrame);
	}


	// これ以降にBody処理を実装
	TIMESPAN nBodyTime = 0;
	if (m_pBodyFrameReader)
	{
		IBodyFrame* pBodyFrame = NULL;

		hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);
		if (SUCCEEDED(hr))
		{
			hr = pBodyFrame->get_RelativeTime(&nBodyTime);

			// ここに、UI描画処理 ボタン等
			// ゲームのステータスによって描画を設定
			if (!m_pGame){ m_pGame = new CSemaphoreGame(m_pDrawColor, m_pCoordinateMapper); }
			if (m_pGame){ m_pGame->Display(nBodyTime); }

			IBody* ppBodies[BODY_COUNT] = { 0 };

			if (SUCCEEDED(hr))
			{
				hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies);
			}

			if (SUCCEEDED(hr))
			{
				// ここに、UI処理->ProcessBody()内でOK
				// ボディデータにてUI処理を行う
				// ステータスの変更
				if (m_pGame){ m_pGame->Play(nBodyTime, BODY_COUNT, ppBodies); }

				ProcessBody(nBodyTime, BODY_COUNT, ppBodies);
			}

			for (int i = 0; i < _countof(ppBodies); ++i)
			{
				SafeRelease(ppBodies[i]);
			}
		}
		SafeRelease(pBodyFrame);
	}

	return;
}