예제 #1
0
/* moduleCapture
/  Modulo con la interfaz al Kinect
*/
moduleCapture::moduleCapture()
{
	sensor = NULL;
	resolution.height = 480;
	resolution.width = 640;
	pSkeletonFrame = new NUI_SKELETON_FRAME;
	skeleton = new NUI_SKELETON_DATA;
	pColorImageFrame = new NUI_IMAGE_FRAME;
	if(SUCCEEDED(NuiCreateSensorByIndex(0, &sensor))){
		OutputDebugStringW(L"Connected to Kinect\n");
		if (SUCCEEDED(sensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR )))
		{
			OutputDebugStringW(L"Kinect with Skeleton\n");
		}
		else {
			OutputDebugStringW(L"Error initializing Kinect\n");
			exit(1);
		}
	} else {
		OutputDebugStringW(L"Error connecting to Kinect\n");
		exit(1);
	}
	sensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, NULL, &colorStreamHandle);
	OutputDebugStringW(L"Capture module Instantialized\n");
}
예제 #2
0
void DepthSensor::createInstance()
{
    int count = 0;
    NuiGetSensorCount(&count);
    if (count == 0)
    {
        throw std::runtime_error("no valid Kinect is connected");
    }
    // creat kinect
    NuiCreateSensorByIndex(0, &mNuiSensor);
    // get kinect status
    HRESULT status = mNuiSensor->NuiStatus();
    if (status != S_OK)
    {
        throw std::runtime_error("Kinect is not ready to work");
    }

    // Create and initialize a new vtk image renderer 
    // We'll use this to draw the data we receive from the Kinect to the screen
    mDrawDepth = new vtkImageRender();
    
    if (!mDrawDepth->Initialize(cDepthWidth, cDepthHeight, cDepthWidth * sizeof(long)))
    {
        throw std::runtime_error("Failed to initialize the vtk draw device.");
    }
}
/// <summary>
/// Create the first connected Kinect found 
/// </summary>
/// <returns>indicates success or failure</returns>
HRESULT CSkeletonBasics::CreateFirstConnected()
{
    INuiSensor * pNuiSensor;

    int iSensorCount = 0;
    HRESULT hr = NuiGetSensorCount(&iSensorCount);
    if (FAILED(hr))
    {
        return hr;
    }

    // Look at each Kinect sensor
    for (int i = 0; i < iSensorCount; ++i)
    {
        // Create the sensor so we can check status, if we can't create it, move on to the next
        hr = NuiCreateSensorByIndex(i, &pNuiSensor);
        if (FAILED(hr))
        {
            continue;
        }

        // Get the status of the sensor, and if connected, then we can initialize it
        hr = pNuiSensor->NuiStatus();
        if (S_OK == hr)
        {
            m_pNuiSensor = pNuiSensor;
            break;
        }

        // This sensor wasn't OK, so release it since we're not using it
        pNuiSensor->Release();
    }

    if (NULL != m_pNuiSensor)
    {
        // Initialize the Kinect and specify that we'll be using skeleton
        hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON); 
        if (SUCCEEDED(hr))
        {
            // Create an event that will be signaled when skeleton data is available
            m_hNextSkeletonEvent = CreateEventW(NULL, TRUE, FALSE, NULL);

            // Open a skeleton stream to receive skeleton data
            hr = m_pNuiSensor->NuiSkeletonTrackingEnable(m_hNextSkeletonEvent, 0); 
        }
    }

    if (NULL == m_pNuiSensor || FAILED(hr))
    {
        SetStatusMessage(L"No ready Kinect found!");
        return E_FAIL;
    }

    return hr;
}
예제 #4
0
Kinect::Kinect(void)
{
	HRESULT hr = NuiCreateSensorByIndex(0,  &m_kinect_handle);
    if (SUCCEEDED(hr))
    {
		
		m_kinect_handle->NuiInitialize( NUI_INITIALIZE_FLAG_USES_SKELETON);
		m_hNextSkeletonEvent = CreateEventW(NULL, TRUE, FALSE, NULL);
		hr = m_kinect_handle->NuiSkeletonTrackingEnable(m_hNextSkeletonEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE | NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT);
	}
}
예제 #5
0
    void kinect_gbuffer::init()
    {
        int device_id_ = 0;

        assert_hr(NuiCreateSensorByIndex(device_id_, &sensor_));

        DWORD options = NUI_INITIALIZE_FLAG_USES_COLOR; // | NUI_INITIALIZE_FLAG_USES_DEPTH;

        assert_hr(sensor_->NuiInitialize(options));

        NUI_IMAGE_RESOLUTION eResolution = NUI_IMAGE_RESOLUTION_640x480;

        if (width_ == 640 && height_ == 480)
            eResolution = NUI_IMAGE_RESOLUTION_640x480;
        else if (width_ == 1280 && height_ == 960)
            eResolution = NUI_IMAGE_RESOLUTION_1280x960;
        else
        {
            tcout << L"Invalid resolution, setting back to 640x480" << std::endl;
            width_ = 640;
            height_ = 480;
        }

        color_event_ = CreateEvent(nullptr, TRUE, FALSE, nullptr);
        depth_event_ = CreateEvent(nullptr, TRUE, FALSE, nullptr);
        kill_event_  = CreateEvent(nullptr, TRUE, FALSE, nullptr);

        assert_hr(sensor_->NuiImageStreamOpen(
            NUI_IMAGE_TYPE_COLOR, eResolution,
            0,
            2,
            color_event_,
            &color_stream_));

        if ((options & NUI_INITIALIZE_FLAG_USES_DEPTH) != 0)
        {
            // fixed size
            assert_hr(sensor_->NuiImageStreamOpen(
                NUI_IMAGE_TYPE_DEPTH,
                NUI_IMAGE_RESOLUTION_640x480,
                0,
                2,
                depth_event_,
                &depth_stream_));

            assert_hr(sensor_->NuiImageStreamSetImageFrameFlags(depth_stream_, NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE));
        }

        has_new_color_ = has_new_depth_ = false;
    }
bool initializeKinect()
{
    int numKinects = 0;
    HRESULT hr = NuiGetSensorCount( &numKinects );
    if ( FAILED(hr) || numKinects<=0 )
    {
        std::cout << "No Kinect device found." << std::endl;
        return false;
    }
    
    hr = NuiCreateSensorByIndex( 0, &context );
    if ( FAILED(hr) )
    {
        std::cout << "Failed to connect to Kinect device." << std::endl;
        return false;
    }
    
    DWORD nuiFlags = NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR |
                     NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX;
    hr = context->NuiInitialize( nuiFlags );
    if ( FAILED(hr) )
    {
        std::cout << "Failed to intialize Kinect: " << std::hex << (long)hr << std::dec << std::endl;
        return false;
    }
    
    hr = context->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, NULL, &colorStreamHandle );
    if ( FAILED(hr) )
    {
        std::cout << "Unable to create color stream: " << std::hex << (long)hr << std::dec << std::endl;
        return false;
    }
    
    hr = context->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480,
                                      0, 2, NULL, &depthStreamHandle );
    if ( FAILED(hr) )
    {
        std::cout << "Unable to create depth stream: " << std::hex << (long)hr << std::dec << std::endl;
        return false;
    }
    
    hr = context->NuiSkeletonTrackingEnable( NULL, 0 );
    if ( FAILED(hr) )
    {
        std::cout << "Unable to start tracking skeleton." << std::endl;
        return false;
    }
    return true;
}
예제 #7
0
Kinect1::Kinect1() :
	KinectInterface(1280, 960, 640, 480), sensor(NULL),
	colorBuffer(new UINT32[colorWidth * colorHeight]), depthBuffer(new UINT16[depthWidth * depthHeight])
{
	// Initialize both arrays to 0
	for (int i = 0; i < colorWidth * colorHeight; i++) {
		colorBuffer[i] = 0;
	}
	for (int i = 0; i < depthWidth * depthHeight; i++) {
		depthBuffer[i] = 0;
	}

	int numSensors, result;

	// Make sure sensor exists
	result = NuiGetSensorCount(&numSensors);
	if (result >= 0 && numSensors > 0)
	{
		result = NuiCreateSensorByIndex(0, &sensor);
		if (result >= 0)
		{
			// Initialize sensor with both color and depth
			sensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH);

			// Open color stream
			sensor->NuiImageStreamOpen(
				// 1280 x 960 color
				NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_1280x960,
				// Do not use near mode
				0,
				// Buffer for 2 frames
				2,
				// No handler for events
				NULL,
				// Get the color stream
				&colorStream);

			// Open depth stream
			sensor->NuiImageStreamOpen(
				NUI_IMAGE_TYPE_DEPTH,
				NUI_IMAGE_RESOLUTION_640x480,
				// Change this argument to NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE for near mode
				0,
				2,
				NULL,
				&depthStream);
		}
	}
}
예제 #8
0
	virtual bool start( uint32_t startGenerators, uint32_t configureImages ) 
	{
		if( m_IsInitialized )
			return false;
		m_GeneratorConfig = startGenerators;
		m_ImageConfig = configureImages;

		//initialization --------------------------------------------------------->
		_2REAL_LOG(info) << "_2Real: Initializing Microsoft Kinect SDK" << std::endl;
		
		HRESULT status = 0;
		//get number of devices
		int deviceCount = 0;
		if( FAILED( status = NuiGetSensorCount( &deviceCount ) ) )
		{
			throwError( "_2Real: Error when trying to enumerate devices\n" );
		}

		//abort if no devices found
		if( ( m_NumDevices = deviceCount ) == 0 )
		{
			_2REAL_LOG(error) << "_2Real: No devices found" << std::endl;
			return false;
		}

		_2REAL_LOG(info) << "_2Real: detected number of sensors: " << deviceCount << std::endl;
		std::stringstream ss;

		//initialization putting device instances to m_devices vector
		INuiSensor* devicePtr;
		for( int i = 0; i < deviceCount; ++i )
		{
			//creating instance
			if( FAILED( status = NuiCreateSensorByIndex( i, &devicePtr ) ) )
			{
				throwError(("_2Real: Error when trying to create device: " + i));
			}
			//saving to vector
			ss.str( "" ); 
			ss << "kinect_device_" << i;
			m_Devices.push_back( new WSDKDevice( devicePtr, startGenerators, configureImages, ss.str().c_str() ) );
		}
		m_IsInitialized = 1;
		_2REAL_LOG(info) << "_2Real: Initialization: OK" << std::endl;
		return true;
	}
예제 #9
0
HRESULT KinectManager::initialize()
{
	// reset vars //
	kinectInitialized = false;
	kinectFailed = false;

	// create vars //
	int numSensors;
	
	//get number of sensors
	HRESULT hr = NuiGetSensorCount(&numSensors);

	if (SUCCEEDED(hr))
	{
		if (numSensors == 0)
		{
			//NO KINECTS CONNECTED
			hr = MAKE_HRESULT(0, 0, -1);
			kinectFailed = true;
			
			cout << "No Kinect sensors found\n";
		}
		else
		{
			cout << "Found sensor...\n";
			//get the first kinect and its status
			NuiCreateSensorByIndex(0, &pKinect);
			hr = pKinect->NuiStatus();
		}
	}

	if (SUCCEEDED(hr))
	{
		//kinect ready
		cout << "Kinect is ready, starting now\n";
		kinectInitialized = true;
		startKinect();
	}
	else if (hr != S_NUI_INITIALIZING)
	{
		kinectFailed = true;
		cout << "Kinect failed and is not initializing\n";
	}

	return hr;
}
예제 #10
0
HRESULT
KinectImpl::initDevice()
{
    INuiSensor * pNuiSensor=NULL;

    int iSensorCount = 0;
    HRESULT hr = NuiGetSensorCount(&iSensorCount);
    if (FAILED(hr))
    {
        return hr;
    }

    // Look at each Kinect sensor
    for (int i = 0; i < iSensorCount; ++i)
    {
        // Create the sensor so we can check status, if we can't create it, move on to the next
        hr = NuiCreateSensorByIndex(i, &pNuiSensor);
        if (FAILED(hr))
        {
            continue;
        }

        // Get the status of the sensor, and if connected, then we can initialize it
        hr = pNuiSensor->NuiStatus();
        if (S_OK == hr)
        {
            m_pNuiSensor = pNuiSensor;
            break;
        }

        // This sensor wasn't OK, so release it since we're not using it
        pNuiSensor->Release();
    }

    if (NULL != m_pNuiSensor)
    {
        hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_AUDIO
				| NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX
				| NUI_INITIALIZE_FLAG_USES_SKELETON
				| NUI_INITIALIZE_FLAG_USES_COLOR
				);
    }

	return hr;
}
예제 #11
0
INuiSensor*
GetNuiPointer()
{
    int sensorCount = 0;
    INuiSensor *nuiSensor = NULL;
    HRESULT hr = NuiGetSensorCount(&sensorCount);

	if (FAILED(hr))
	{
		throw "Failed to get NUI sensor count";
	}
	

    // Look at each Kinect sensor
    for (int i=0; i<sensorCount; i++)
    {
        // Create the sensor so we can check status, if we can't create it, move on to the next
        hr = NuiCreateSensorByIndex(i, &nuiSensor);
        if (FAILED(hr)) continue;

        // Get the status of the sensor, and if connected, then we can initialize it
        hr = nuiSensor->NuiStatus();
        if (S_OK == hr) break;

        // This sensor wasn't OK, so release it since we're not using it
        nuiSensor->Release();
    }

    // If we couldn't get an instance, return failure
	if (NULL == nuiSensor)
	{
		throw "Failed to get NUI Sensor instance";
	}

    // Initialize the Kinect and specify that we'll be using depth
    hr = nuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR
            | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX);

    if (FAILED(hr)) return NULL;
    else return nuiSensor;
}
예제 #12
0
HRESULT KinectManager::initialize()
{
	INuiSensor * nui;
	int nuiCount = 0;
	HRESULT hr;


	NuiSetDeviceStatusCallback(OnSensorStatusChanged, NULL);

	hr = NuiGetSensorCount(&nuiCount);
	if ( FAILED(hr))
	{
		return hr;
	}

	// Look at each kinect sensor
	for (int i = 0; i < nuiCount; i++)
	{
		// Create the sensor so we can check status, if we can't create it, move on.
		hr = NuiCreateSensorByIndex(i, &nui);
		if (FAILED(hr))
		{
			continue;
		}

		// Get the status of the sensor, and if connected, then we can initialize it.
		hr = nui->NuiStatus();

		if (S_OK == hr)
		{
			nuiList.push_front(nui);
		}

		// This sensor was not okay, so we release it (into the wild!) since we're not using it.
		nui->Release();
	}

	return hr;


}
예제 #13
0
/// <summary>
/// Enumerate and construct all the sensors when the app starts up
/// </summary>
void CMainWindow::EnumerateSensors()
{
    int iCount = 0;
    HRESULT hr = NuiGetSensorCount(&iCount);
    if (FAILED(hr))
    {
        return;
    }

    for (int i = 0; i < iCount; ++i)
    {
        INuiSensor* pNuiSensor = nullptr;

        if (SUCCEEDED(NuiCreateSensorByIndex(i, &pNuiSensor)))
        {
            UpdateMainWindow(pNuiSensor->NuiDeviceConnectionId(), pNuiSensor->NuiStatus());
        }

        SafeRelease(pNuiSensor);
    }
}
예제 #14
0
/// <summary>
/// Funcion que crea la primera conexion de la aplicacion con Kinect. Comprueba e inicializa los sensores.
/// </summary>
/// <returns>Devuelve un HRESULT con las banderas de los flags inicializados y los que no.</returns>
HRESULT CreateFirstConnected() {
    INuiSensor * pNuiSensor;
    HRESULT hr;

    int iSensorCount = 0;
    hr = NuiGetSensorCount(&iSensorCount);
    if (FAILED(hr)) {
        return hr;
    }

    // Look at each Kinect sensor
    for (int i = 0; i < iSensorCount; ++i) {
        // Create the sensor so we can check status, if we can't create it, move on to the next
        hr = NuiCreateSensorByIndex(i, &pNuiSensor);
        if (FAILED(hr)) {
            continue;
        }

        // Get the status of the sensor, and if connected, then we can initialize it
        hr = pNuiSensor->NuiStatus();
        if (S_OK == hr) {
            m_pNuiSensor = pNuiSensor;
            break;
        }

        // This sensor wasn't OK, so release it since we're not using it
        pNuiSensor->Release();
    }

	// Initialize the Kinect and specify that we'll be using depth
	DWORD dwFlags = 0;
	if (m_bProcessColor) dwFlags |= NUI_INITIALIZE_FLAG_USES_COLOR;
	if (m_bProcessDepth) 
		m_bUserDetection ? (dwFlags |= NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX) : (dwFlags |= NUI_INITIALIZE_FLAG_USES_DEPTH);
	if (m_bProcessSkeleton)
		dwFlags |= NUI_INITIALIZE_FLAG_USES_SKELETON;

    hr = m_pNuiSensor->NuiInitialize(dwFlags);
    return hr;
}
예제 #15
0
bool initKinect() {
    // Get a working kinect sensor
    int numSensors;
    if (NuiGetSensorCount(&numSensors) < 0 || numSensors < 1) return false;
    if (NuiCreateSensorByIndex(0, &sensor) < 0) return false;

    // Initialize sensor
    sensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH | NUI_INITIALIZE_FLAG_USES_COLOR);
    sensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH, // Depth camera or rgb camera?
        NUI_IMAGE_RESOLUTION_640x480,                // Image resolution
        0,        // Image stream flags, e.g. near mode
        2,        // Number of frames to buffer
        NULL,     // Event handle
        &depthStream);
	sensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, // Depth camera or rgb camera?
        NUI_IMAGE_RESOLUTION_640x480,                // Image resolution
        0,      // Image stream flags, e.g. near mode
        2,      // Number of frames to buffer
        NULL,   // Event handle
		&rgbStream);
    return sensor;
}
예제 #16
0
// called once on initialization
void SensorManager::CreateListOfAvailableSensors()
{
    // gets the count of sensors from Kinect for Windows API
    int iCount = 0;
    HRESULT hr = NuiGetSensorCount( &iCount );
    if( FAILED(hr) )
    {
        return;
    }

    // if there are no sensors, create a temporary one
    if( 0 == iCount )
    {
        // when the first sensor is connected, it will be added
        // with the correct id
        if( m_kinectSensors.size() == 0 )
        {
            AddSensorToList( m_wcTempPortID );
        }
    }

    // continue with the known sensors to the list
    for (int i = 0; i < iCount; ++i)
    {
        INuiSensor* pNui = nullptr;
        ComSmartPtr<INuiSensor> pNuiPtr;
        if( SUCCEEDED( NuiCreateSensorByIndex(i, &pNui) ) )
        {
            pNuiPtr.Attach(pNui); // Setting up smart pointer for sensor.

            const WCHAR* wcPortID = pNuiPtr->NuiDeviceConnectionId();  // contains the connections string for the device
                                                                    // plugged into a specific USB port. 
                                                                    // removing the sensor and using a different port will have
                                                                    // a different ID.
            AddSensorToList( wcPortID );
        }
    }

}
예제 #17
0
KinectSensor::KinectSensor()
{
    m_hNextDepthFrameEvent = NULL;
    m_hNextVideoFrameEvent = NULL;
    m_hNextSkeletonEvent = NULL;
    m_pDepthStreamHandle = NULL;
    m_pVideoStreamHandle = NULL;
    m_hThNuiProcess=NULL;
    m_hEvNuiProcessStop=NULL;
    m_bNuiInitialized = false;
    m_FramesTotal = 0;
    m_SkeletonTotal = 0;
    m_VideoBuffer = NULL;
    m_DepthBuffer = NULL;
    m_ZoomFactor = 1.0f;
    m_ViewOffset.x = 0;
    m_ViewOffset.y = 0;

	// Get a working kinect sensor
	if (NuiCreateSensorByIndex(numOfInitilizedSensors, &m_Sensor) >= 0) {
		numOfInitilizedSensors++;
	}
}
bool initKinect() {
    // Get a working kinect sensor
    int numSensors;
    if (NuiGetSensorCount(&numSensors) < 0 || numSensors < 1) return false;
    if (NuiCreateSensorByIndex(0, &sensor) < 0) return false;

    // Initialize sensor
    sensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_SKELETON);
    sensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, // Depth camera or rgb camera?
        NUI_IMAGE_RESOLUTION_640x480,                // Image resolution
        0,        // Image stream flags, e.g. near mode
        2,        // Number of frames to buffer
        NULL,     // Event handle
        &depthStream);
	sensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, // Depth camera or rgb camera?
        NUI_IMAGE_RESOLUTION_640x480,                // Image resolution
        0,      // Image stream flags, e.g. near mode
        2,      // Number of frames to buffer
        NULL,   // Event handle
		&rgbStream);
	sensor->NuiSkeletonTrackingEnable(NULL, 0); // NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT for only upper body
    return sensor;
}
예제 #19
0
wxString KinectHelperImplMSW::GetDeviceName(size_t index)
{
    BSTR result;
    DWORD size;
    INuiSensor * instance(NULL);
    wxString name = wxT("Unknown Kinect Sensor");
    if(!FAILED(NuiCreateSensorByIndex(index, &instance)))
    {
        if(instance != NULL)
        {

            //if(instance->NuiGetPropsBlob(
            //	MsrNui::INDEX_UNIQUE_DEVICE_NAME,
            //	&result, &size))
            //{
            //name = result;
            //SysFreeString(result);
            //}
            //NuiDestroyInstance(instance);
            name = wxString::Format(wxT("Kinect#%u"), index);
        }
    }
    return name;
}
예제 #20
0
/*
 * @brief A general Nui initialization function.  Sets all of the initial parameters.
 */
void initNui(void)	        // We call this right after Nui functions called.
{
	HRESULT hr;

	hr = NuiCreateSensorByIndex(0, &pNuiSensor);
	if(FAILED(hr)) STDERR("Cannot connect with kinect0.\r\n");

	hr = pNuiSensor->NuiInitialize(

		//NUI_INITIALIZE_FLAG_USES_DEPTH |
		NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | 
		NUI_INITIALIZE_FLAG_USES_COLOR | 
		NUI_INITIALIZE_FLAG_USES_SKELETON
		);
	if ( E_NUI_SKELETAL_ENGINE_BUSY == hr ){
		hr = pNuiSensor->NuiInitialize(
			NUI_INITIALIZE_FLAG_USES_DEPTH |
			NUI_INITIALIZE_FLAG_USES_COLOR
			);
	}
	if(FAILED(hr)){
		STDERR("Cannot initialize kinect.\r\n");
	}

	hNextColorFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	hNextDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	hNextSkeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );

	if(HasSkeletalEngine(pNuiSensor)){
		hr = pNuiSensor->NuiSkeletonTrackingEnable( hNextSkeletonEvent, 
			//NUI_SKELETON_TRACKING_FLAG_TITLE_SETS_TRACKED_SKELETONS |
			//NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT 
			0
			);
		if(FAILED(hr)) STDERR("Cannot track skeletons\r\n");
	}

	hr = pNuiSensor->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_COLOR,
		NUI_IMAGE_RESOLUTION_640x480,
		0,
		2,
		hNextColorFrameEvent,
		&pVideoStreamHandle );
	if(FAILED(hr)){
		STDERR("Cannot open image stream\r\n");
	}

	hr = pNuiSensor->NuiImageStreamOpen(
		HasSkeletalEngine(pNuiSensor) ? NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX : NUI_IMAGE_TYPE_DEPTH,
		NUI_IMAGE_RESOLUTION_320x240,
		0,
		2,
		hNextDepthFrameEvent,
		&pDepthStreamHandle );
	if(FAILED(hr)){
		STDERR("Cannot open depth and player stream\r\n");
	}
/*
	hr = pNuiSensor->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_DEPTH,
		NUI_IMAGE_RESOLUTION_640x480,
		0,
		2,
		hNextDepthFrameEvent,
		&pDepthStreamHandle );
	if(FAILED(hr)){
		STDERR("Cannot open depth stream\r\n");
	}
*/
#if defined(USE_FACETRACKER)
	initFaceTracker();
#endif
}
예제 #21
0
void KinectDataMan::ShowColorDepth()
{
	// init kinect and connect
	if( m_cvhelper.IsInitialized() )
		return;

	m_cvhelper.SetColorFrameResolution(color_reso);
	m_cvhelper.SetDepthFrameResolution(depth_reso);

	HRESULT hr;
	// Get number of Kinect sensors
	int sensorCount = 0;
	hr = NuiGetSensorCount(&sensorCount);
	if (FAILED(hr)) 
	{
		return;
	}

	// If no sensors, update status bar to report failure and return
	if (sensorCount == 0)
	{
		cerr<<"No kinect"<<endl;
	}

	// Iterate through Kinect sensors until one is successfully initialized
	for (int i = 0; i < sensorCount; ++i) 
	{
		INuiSensor* sensor = NULL;
		hr = NuiCreateSensorByIndex(i, &sensor);
		if (SUCCEEDED(hr))
		{
			hr = m_cvhelper.Initialize(sensor);
			if (SUCCEEDED(hr)) 
			{
				// Report success
				cerr<<"Kinect initialized"<<endl;
				break;
			}
			else
			{
				// Uninitialize KinectHelper to show that Kinect is not ready
				m_cvhelper.UnInitialize();
				return;
			}
		}
	}

	DWORD width, height;
	m_cvhelper.GetColorFrameSize(&width, &height);
	Size colorsz(width, height);
	Mat cimg(colorsz, m_cvhelper.COLOR_TYPE);
	m_cvhelper.GetDepthFrameSize(&width, &height);
	Size depthsz(width, height);
	Mat dimg(depthsz, m_cvhelper.DEPTH_RGB_TYPE);

	// start processing
	while(true)
	{
		// get color frame
		if( SUCCEEDED(m_cvhelper.UpdateColorFrame()) )
		{
			HRESULT hr = m_cvhelper.GetColorImage(&cimg);
			if(FAILED(hr))
				break;

			imshow("color", cimg);
			if( waitKey(10) == 'q' )
				break;
		}

		if( SUCCEEDED(m_cvhelper.UpdateDepthFrame()) )
		{
			HRESULT hr = m_cvhelper.GetDepthImageAsArgb(&dimg);
			if(FAILED(hr))
				break;

			imshow("depth", dimg);
			if( waitKey(10) == 'q' )
				break;
		}
	}

	destroyAllWindows();

}
예제 #22
0
bool KinectInput::initSensor()
{
	INuiSensor* tempSensor;
	int numSensors = 0;
	if (NuiGetSensorCount(&numSensors) != S_OK ) 
	{
		std::cout << "Kinect Error: Sensor lookup failed, Probably no sensors connected" << std::endl;
		std::getchar();
		return false;
	}

	// Loop through all sensors
	for (int i = 0; i < numSensors; ++i)
	{
		// Create the sensor so we can check status, if we can't create it, move on to the next
		if (NuiCreateSensorByIndex(i, &tempSensor) < 0)
		{
			continue;
		}

		// Get the status of the sensor, and if connected, then we can initialize it
		if (tempSensor->NuiStatus() == S_OK)
		{
			kinectSensor = tempSensor;
			break;
		}

		// This sensor wasn't OK, so release it since we're not using it
		tempSensor->Release();
	}

	if (kinectSensor != NULL)
	{
		// Initialize the Kinect and specify that we'll be using skeleton
		if (kinectSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON) >= 0)
		{
			// Create an event that will be signaled when skeleton data is available
			nextSkeletonUpdate = CreateEventW(NULL, TRUE, FALSE, NULL);

			// Open a skeleton stream to receive skeleton data
			if (kinectSensor->NuiSkeletonTrackingEnable(nextSkeletonUpdate, 0) < 0)
			{
				std::cout << "Kinect Error: Couldn't open a skeleton stream" << std::endl;
				std::getchar();
				return false;
			}
		}
		else 
		{
			std::cout << "Kinect Error: Sensor failed initialization" << std::endl;
			std::getchar();
			return false;
		}
	}
	else
	{
		std::cout << "Kinect Error: No sensor could be initialized" << std::endl;
		std::getchar();
		return false;
	}

	return true;
}
예제 #23
0
/*!
//\par		Description:
//			Sets all memeber to their default values,
//			reads configuration from settings file and tries to find first connected kinect device.
//
//\par		Return value:
//			True value is a expected value. When this method returns false then something wrong is with
//			with kinect sensor. It can be disconnected or drivers are incompatible. First thing to do
//			in this case is to check green led on kinect sensor. It should shines all the time. Flashing
//			is not allowed.
//
//\retval	true	kinect is ready to work
//\retval	false	kinect is not found
*/
bool CKinectHandler::createFirstConnectedKinectSensor()
{
	qDebug() << "createFirstConnectedKinectSensor()";

	INuiSensor * pSensor;
	int iSensorCount = 0;

	// HRESULT - just standard long
	HRESULT hr = NuiGetSensorCount( & iSensorCount );
	if ( FAILED( hr ) ) // checks if hr is less than 0
	{
		qDebug() << "Cannot get available kinect sensor count";
		return false;
	}

	// checks all sensors and takes first one which can be created
	for ( int i = 0 ; i < iSensorCount ; i++ )
	{
		hr = NuiCreateSensorByIndex( i, & pSensor );

		if ( FAILED( hr ) )
		{
			qDebug() << "Cannot create sensor with index: " << i << ", check next index";
			continue;
		}

		hr = pSensor->NuiStatus();

		if ( hr == S_OK ) // sensor is connected, it can be initialized now
		{
			m_pSensor = pSensor;
			qDebug() << "Sensor with index: " << i << " is connected - breaking loop";

			break;
		}

		// release not ok sensor
		pSensor->Release();
	}

	if ( m_pSensor != NULL )
	{
		hr = m_pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_SKELETON ); // in order to use skeleton

		if ( SUCCEEDED( hr ) )
		{
			// create event to inform application about new skeleton frame
			m_hNextSkeletonEvent = CreateEventW( NULL, TRUE, FALSE, NULL );

			// open skeleton stream to get skeleton data
			hr = m_pSensor->NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, 0 );
		}
	}

	if ( m_pSensor == NULL || FAILED( hr ) )
	{
		qDebug() << "No ready kinect found";
		m_kinectDetails.insert( "connection", false );

		return false;
	}

	m_kinectDetails.insert( "connection", true );
	m_kinectDetails.insert( "skeleton_found", false );

	return true;
}
예제 #24
0
bool KinectHandler::initialize( int kid )
{
    HRESULT hr = NuiCreateSensorByIndex( 0, &_context );
    if ( FAILED(hr) )
    {
        OSG_WARN << "Failed to connect to Kinect device." << std::endl;
        return false;
    }
    
    DWORD nuiFlags = NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON
                   |  NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH;
    hr = _context->NuiInitialize( nuiFlags );
    if ( FAILED(hr) )
    {
        switch ( hr )
        {
        case E_NUI_DEVICE_IN_USE:
            OSG_WARN << "Kinect device is already in use." << std::endl;
            break;
        case E_NUI_SKELETAL_ENGINE_BUSY:
            OSG_WARN << "Kinect device is busy at present." << std::endl;
            break;
        default:
            OSG_WARN << "Kinect device failed with error code " << std::hex << (long)hr << std::dec << std::endl;
            break;
        }
        return false;
    }
    
    bool hasSkeleton = HasSkeletalEngine(_context);
    if ( hasSkeleton )
    {
        _nextSkeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
        hr = _context->NuiSkeletonTrackingEnable( _nextSkeletonEvent, 0 );
        if ( FAILED(hr) )
        {
            OSG_WARN << "Unable to start tracking skeleton." << std::endl;
            return false;
        }
        
        _nuiProcessStopEvent = CreateEvent( NULL, FALSE, FALSE, NULL );
        _nuiProcess = CreateThread( NULL, 0, nuiProcessThread, this, 0, NULL );
    }
    else
    {
        OSG_WARN << "Current device don't have a skeleton engine." << std::endl;
    }
    
    _nextImageFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
    /*if ( hasSkeleton )
    {
        hr = _context->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, KINECT_IMAGE_RESOLUTION,
                                           0, 2, _nextImageFrameEvent, &_videoStreamHandle );
    }
    else*/
    {
        hr = _context->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH, KINECT_IMAGE_RESOLUTION,
                                           0, 2, _nextImageFrameEvent, &_videoStreamHandle );
    }
    //hr = _context->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, KINECT_IMAGE_RESOLUTION,
    //                                   0, 2, _nextImageFrameEvent, &_videoStreamHandle );
    if ( FAILED(hr) )
    {
        OSG_WARN << "Unable to create image stream. Error code " << std::hex << (long)hr << std::dec << std::endl;
        return false;
    }
    return true;
}
// In current version, we can't set the resolution
// All sensors of Kinect is activated. 
HRESULT Kinect::createFirstConnected(){
	INuiSensor * pNuiSensor;
	HRESULT hr;

	int iSensorCount = 0;
	hr = NuiGetSensorCount(&iSensorCount);
	if (FAILED(hr))
	{
		return hr;
	}

	// Look at each Kinect sensor
	for (int i = 0; i < iSensorCount; ++i)
	{
		// Create the sensor so we can check status, if we can't create it, move on to the next
		hr = NuiCreateSensorByIndex(i, &pNuiSensor);
		if (FAILED(hr))
		{
			continue;
		}

		// Get the status of the sensor, and if connected, then we can initialize it
		hr = pNuiSensor->NuiStatus();
		if (S_OK == hr)
		{
			m_pNuiSensor = pNuiSensor;
			break;
		}

		// This sensor wasn't OK, so release it since we're not using it
		pNuiSensor->Release();
	}

	if (NULL != m_pNuiSensor)
	{
		// Initialize the Kinect and specify that we'll be using color, depth and skeletion 
		hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | 
			NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX |
			NUI_INITIALIZE_FLAG_USES_SKELETON);
		if (SUCCEEDED(hr))
		{
			// Create an event that will be signaled when depth data is available
			m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

			// Open a depth image stream to receive depth frames
			hr = m_pNuiSensor->NuiImageStreamOpen(
				NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
				NUI_IMAGE_RESOLUTION_640x480,
				NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE|NUI_IMAGE_STREAM_FLAG_DISTINCT_OVERFLOW_DEPTH_VALUES,
				//NUI_IMAGE_STREAM_FRAME_LIMIT_MAXIMUM,
				2,
				m_hNextDepthFrameEvent,
				&m_hDepthStreamHandle);
			//m_pNuiSensor->NuiImageStreamSetImageFrameFlags(&m_hDepthStreamHandle, NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE|NUI_IMAGE_STREAM_FLAG_DISTINCT_OVERFLOW_DEPTH_VALUES);

		}

		if (SUCCEEDED(hr))
		{
			// Open a color image stream to receive depth frames
			m_hNextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

			hr = m_pNuiSensor->NuiImageStreamOpen(
				NUI_IMAGE_TYPE_COLOR,
				NUI_IMAGE_RESOLUTION_640x480,
				0,
				2,
				m_hNextColorFrameEvent,
				&m_hColorStreamHandle);
		}
		if (SUCCEEDED(hr)){
			// Enable the skeleton tracking
			m_hNextSkeletonEvent = CreateEventW(NULL, TRUE, FALSE, NULL);
			//hr = m_pNuiSensor->NuiSkeletonTrackingEnable(m_hNextSkeletonEvent, 0); 
			hr = m_pNuiSensor->NuiSkeletonTrackingEnable(
				m_hNextSkeletonEvent,  NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE | NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT);
		}
	}

	if (NULL == m_pNuiSensor || FAILED(hr))
	{
		return E_FAIL;
	}
	m_bInitialized = true;
	return hr;
}
예제 #26
0
KinectImageSource::KinectImageSource(int device) : ImageSource(lexical_cast<string>(device)), frame() {

#ifdef WITH_MSKINECT_SDK
    m_pColorStreamHandle = INVALID_HANDLE_VALUE;
    /// Create the first connected Kinect found
    INuiSensor * pNuiSensor;
    HRESULT hr;

    int iSensorCount = 0;
    hr = NuiGetSensorCount(&iSensorCount);
    if (FAILED(hr))
    {
        std::cout << "Error getting sensor count. No Kinect plugged in?" << hr << std::endl;
    }

    // Look at each Kinect sensor
    for (int i = 0; i < iSensorCount; ++i)
    {
        // Create the sensor so we can check status, if we can't create it, move on to the next
        hr = NuiCreateSensorByIndex(i, &pNuiSensor);
        if (FAILED(hr))
        {
            continue;
        }

        // Get the status of the sensor, and if connected, then we can initialize it
        hr = pNuiSensor->NuiStatus();
        if (S_OK == hr)
        {
            m_pNuiSensor = pNuiSensor;
            break;
        }

        // This sensor wasn't OK, so release it since we're not using it
        pNuiSensor->Release();
    }

    if (NULL != m_pNuiSensor)
    {
        // Initialize the Kinect and specify that we'll be using color
        hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR);
        if (SUCCEEDED(hr))
        {
            // Create an event that will be signaled when color data is available
            //m_hNextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

            // Open a color image stream to receive color frames
            hr = m_pNuiSensor->NuiImageStreamOpen(
                     NUI_IMAGE_TYPE_COLOR,
                     NUI_IMAGE_RESOLUTION_640x480,
                     0,
                     2,
                     NULL,
                     &m_pColorStreamHandle);
        }
    }

    if (NULL == m_pNuiSensor || FAILED(hr))
    {
        std::cout << "No ready Kinect found!" << std::endl;
    }

    //std::cout << "hr: " << hr << std::endl;

#else
    std::cerr << "Error! This is the Microsoft Kinect SDK interface and not available under Linux." << std::endl;
#endif

}
예제 #27
0
HRESULT KinectSensor::createFirstConnected()
{
	INuiSensor* pNuiSensor = NULL;
	HRESULT hr = S_OK;

	int iSensorCount = 0;
	hr = NuiGetSensorCount(&iSensorCount);
	if (FAILED(hr) ) { return hr; }

	// Look at each Kinect sensor
	for (int i = 0; i < iSensorCount; ++i) {
		// Create the sensor so we can check status, if we can't create it, move on to the next
		hr = NuiCreateSensorByIndex(i, &pNuiSensor);
		if (FAILED(hr))	{
			continue;
		}

		// Get the status of the sensor, and if connected, then we can initialize it
		hr = pNuiSensor->NuiStatus();
		if (S_OK == hr)	{
			m_pNuiSensor = pNuiSensor;
			break;
		}

		// This sensor wasn't OK, so release it since we're not using it
		pNuiSensor->Release();
	}

	if (NULL == m_pNuiSensor) {
		return E_FAIL;
	}

	// Initialize the Kinect and specify that we'll be using depth
	//hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX); 
	hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH); 
	if (FAILED(hr) ) { return hr; }

	// Create an event that will be signaled when depth data is available
	m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

	// Open a depth image stream to receive depth frames
	hr = m_pNuiSensor->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_DEPTH,
		//NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
		cDepthResolution,
		(8000 << NUI_IMAGE_PLAYER_INDEX_SHIFT),
		2,
		m_hNextDepthFrameEvent,
		&m_pDepthStreamHandle);
	if (FAILED(hr) ) { return hr; }

	// Create an event that will be signaled when color data is available
	m_hNextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

	// Open a color image stream to receive color frames
	hr = m_pNuiSensor->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_COLOR,
		cColorResolution,
		0,
		2,
		m_hNextColorFrameEvent,
		&m_pColorStreamHandle );
	if (FAILED(hr) ) { return hr; }

	INuiColorCameraSettings* colorCameraSettings;
	HRESULT hrFlag = m_pNuiSensor->NuiGetColorCameraSettings(&colorCameraSettings);

	if (hr != E_NUI_HARDWARE_FEATURE_UNAVAILABLE)
	{
		m_kinect4Windows = true;
	}

	//TODO MATTHIAS: does this function have to be called every frame?

	USHORT* test = new USHORT[getDepthWidth()*getDepthHeight()];
	// Get offset x, y coordinates for color in depth space
	// This will allow us to later compensate for the differences in location, angle, etc between the depth and color cameras
	m_pNuiSensor->NuiImageGetColorPixelCoordinateFrameFromDepthPixelFrameAtResolution(
		cColorResolution,
		cDepthResolution,
		getDepthWidth()*getDepthHeight(),
		test,
		getDepthWidth()*getDepthHeight()*2,
		m_colorCoordinates
		);
	SAFE_DELETE_ARRAY(test);

	// Start with near mode on (if possible)
	m_bNearMode = false;
	if (m_kinect4Windows) {
		toggleNearMode();
	}

	//toggleAutoWhiteBalance();

	return hr;
}
int Init()
{

	//Make sure our image types are the same as the OpenNI image types.
	//assert(sizeof(XnRGB24Pixel) == sizeof(ColorPixel));
	//assert(sizeof(XnDepthPixel) == sizeof(DepthPixel));
	//assert(sizeof(XnStatus) == sizeof(int));
	
	INuiSensor* nuiSensorPtr;
	int ret;

	// Get number of sensors
	int sensorCount = 0;
	ret = NuiGetSensorCount(&sensorCount);
	if(ret < 0)
	{
		return -1;
	}

	// Connect to first sensor
	for(int i=0; i<sensorCount; i++)
	{
		// Create sensor so we can check the status
		ret = NuiCreateSensorByIndex(i, &nuiSensorPtr);
		if(ret < 0)
			continue;

		// Get the status of the sensor
		ret = nuiSensorPtr->NuiStatus();
		if(ret < 0)
		{
			(void)nuiSensorPtr->Release();
			nuiSensorPtr = NULL;
			continue;
		}
		
		sensor = nuiSensorPtr;
	}

	// Make sure we have a sensor
	if(sensor == NULL)
	{
		return -1;
	}
	
	// Initialize Kinect
	ret = sensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH);
	if(ret < 0)
	{
		return -1;
	}
	
	// Create an event that will be signaled when color data is available
    colorImageReadyEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

	// Create an event that will be signaled when depth data is available
    depthImageReadyEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

	// Open a color image stream to receive color frames
	ret = sensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, colorImageReadyEvent, &colorStream);
	if(ret < 0)
	{
		return -1;
	}

	// Open a depth image stream to receive depth frames
    ret = sensor->NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH, NUI_IMAGE_RESOLUTION_640x480, 0, 2, depthImageReadyEvent, &depthStream);
	if(ret < 0)
	{
		return -1;
	}

	return 0;
}
예제 #29
0
HRESULT Nui_Init( )
{
	HRESULT  hr;
	bool     result;

	if ( !m_pNuiSensor )
	{
		hr = NuiCreateSensorByIndex(0, &m_pNuiSensor);

		if ( FAILED(hr) )
		{
			return hr;
		}

		SysFreeString(m_instanceId);

		m_instanceId = m_pNuiSensor->NuiDeviceConnectionId();
	}

	m_hNextDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	m_hNextColorFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	m_hNextSkeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );	

	DWORD nuiFlags = NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON |  NUI_INITIALIZE_FLAG_USES_COLOR;

	hr = m_pNuiSensor->NuiInitialize(nuiFlags);
	if ( E_NUI_SKELETAL_ENGINE_BUSY == hr )
	{
		nuiFlags = NUI_INITIALIZE_FLAG_USES_DEPTH |  NUI_INITIALIZE_FLAG_USES_COLOR;
		hr = m_pNuiSensor->NuiInitialize( nuiFlags) ;
	}

	if ( HasSkeletalEngine( m_pNuiSensor ) )
	{
		//m_SkeletonTrackingFlags = NUI_SKELETON_TRACKING_FLAG_TITLE_SETS_TRACKED_SKELETONS;
		hr = m_pNuiSensor->NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, 0/*m_SkeletonTrackingFlags*/ );
		if( FAILED( hr ) )
		{
			return hr;
		}
	}

	g_ColorImgResolution = NUI_IMAGE_RESOLUTION_640x480;
	hr = m_pNuiSensor->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_COLOR,
		g_ColorImgResolution,
		0,
		2,
		m_hNextColorFrameEvent,
		&m_pVideoStreamHandle );

	if ( FAILED( hr ) )
	{
		return hr;
	}

	//g_DepthImgResolution = NUI_IMAGE_RESOLUTION_320x240;
	g_DepthImgResolution = NUI_IMAGE_RESOLUTION_640x480;
	hr = m_pNuiSensor->NuiImageStreamOpen(
		HasSkeletalEngine(m_pNuiSensor) ? NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX : NUI_IMAGE_TYPE_DEPTH,
		g_DepthImgResolution,
		m_DepthStreamFlags,
		2,
		m_hNextDepthFrameEvent,
		&m_pDepthStreamHandle );

	if ( FAILED( hr ) )
	{
		return hr;
	}

	//new点云数据
	NuiImageResolutionToSize(g_DepthImgResolution, g_DepthWidth, g_DepthHeight );
	g_PointsData = new Vector4[g_DepthWidth*g_DepthHeight];
	m_DepthData = new USHORT[g_DepthWidth*g_DepthHeight];

	//new图像数据
	NuiImageResolutionToSize(g_ColorImgResolution, g_ColorWidth, g_ColorHeight);
	g_ColorsData = new BYTE[g_ColorWidth*g_ColorHeight*4];

	g_ColorCoordinates = new LONG[g_DepthWidth*g_DepthHeight*2];

	g_colorToDepthDivisor = g_ColorWidth/g_DepthWidth;

	// Start the Nui processing thread
	m_hEvNuiProcessStop = CreateEvent( NULL, FALSE, FALSE, NULL );
	m_hThNuiProcess = CreateThread( NULL, 0, Nui_ProcessThread, NULL, 0, NULL );

	g_TrackingUserID= 0;

	return hr;
}
예제 #30
0
int main(void)
{
	//Set the error callback
	glfwSetErrorCallback(error_callback);

	//Initialize GLFW
	if (!glfwInit())
	{
		exit(EXIT_FAILURE);
	}

	//Set the GLFW window creation hints - these are optional
	//glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); //Request a specific OpenGL version
	//glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3); //Request a specific OpenGL version
	//glfwWindowHint(GLFW_SAMPLES, 4); //Request 4x antialiasing
	//glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);


	//Create a window and create its OpenGL context
	window = glfwCreateWindow(960, 720, "Test Window", NULL, NULL);

	//If the window couldn't be created
	if (!window)
	{
		fprintf(stderr, "Failed to open GLFW window.\n");
		glfwTerminate();
		//exit(EXIT_FAILURE);
	}

	//This function makes the context of the specified window current on the calling thread. 
	glfwMakeContextCurrent(window);

	//Sets the key callback
	glfwSetKeyCallback(window, key_callback);

	//Initialize GLEW
	GLenum err = glewInit();

	//If GLEW hasn't initialized
	if (err != GLEW_OK)
	{
		fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
		return -1;
	}

	//Set a background color
	glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
	glfwSetCursorPos(window, 1024 / 2, 768 / 2);

	GLuint VertexArrayID;
	glGenVertexArrays(1, &VertexArrayID);
	glBindVertexArray(VertexArrayID);

	// Create and compile our GLSL program from the shaders
	GLuint red = LoadShaders("SimpleTransform.vertexshader", "SingleColorRed.fragmentshader");
	GLuint grid = LoadShaders("SimpleTransform.vertexshader", "SingleColorGrid.fragmentshader");
	glBindFragDataLocation(red, 0, "red");
	glBindFragDataLocation(grid, 1, "grid");
	// Get a handle for our "MVP" uniform
	GLuint MatrixID = glGetUniformLocation(red, "MVP");

	// Projection matrix : 45° Field of View, 4:3 ratio, display range : 0.1 unit <-> 100 units
	glm::mat4 Projection = glm::perspective(45.0f, 4.0f / 3.0f, 0.1f, 1000.0f);
	// Or, for an ortho camera :
	//glm::mat4 Projection = glm::ortho(-10.0f,10.0f,-10.0f,10.0f,0.0f,100.0f); // In world coordinates

	// Camera matrix
	glm::mat4 View = glm::lookAt(
		glm::vec3(4, 3, 3), // Camera is at (4,3,3), in World Space
		glm::vec3(0, 0, 0), // and looks at the origin
		glm::vec3(0, 1, 0)  // Head is up (set to 0,-1,0 to look upside-down)
		);


	static const GLfloat g_vertex_buffer_data[] = {
		-1.0f, -1.0f, 0.0f,
		1.0f, -1.0f, 0.0f,
		0.0f, 1.0f, 0.0f,
	};

	static const GLushort g_element_buffer_data[] = { 0, 1, 2 };

	GLuint vertexbuffer;
	glGenBuffers(1, &vertexbuffer);
	glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
	glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);

	static const GLfloat g_triangle_buffer_data[] = {
		-1.0f, -1.0f, -1.0f,
		1.0f, -1.0f, -1.0f,
		0.0f, 1.0f, -1.0f,
	};

	GLuint triangle;
	glGenBuffers(1, &triangle);
	glBindBuffer(GL_ARRAY_BUFFER, triangle);
	glBufferData(GL_ARRAY_BUFFER, sizeof(g_triangle_buffer_data), g_triangle_buffer_data, GL_STATIC_DRAW);

	// Enable depth test
	glEnable(GL_DEPTH_TEST);
	// Accept fragment if it closer to the camera than the former one
	glDepthFunc(GL_LESS);
	glEnable(GL_CULL_FACE);
	glEnable(GL_LIGHTING);
	glEnable(GL_SMOOTH);//OPENGL INSTANTIATION
	HRESULT hr;
	NUI_IMAGE_FRAME depthFrame;
	HANDLE hDepth;
	INuiSensor* pNuiSensor = NULL;
	int iSensorCount = 0;
	hr = NuiGetSensorCount(&iSensorCount);

	if (FAILED(hr))
		return hr;

	for (int i = 0; i < iSensorCount; i++)
	{
		INuiSensor* tempSensor;
		hr = NuiCreateSensorByIndex(i, &tempSensor);

		if (FAILED(hr))
			continue;

		hr = tempSensor->NuiStatus();
		if (S_OK == hr)
		{
			pNuiSensor = tempSensor;
			break;
		}

		tempSensor->Release();
	}

	for (int i = 0; i < 2048; i++) {
		depthLookUp[i] = rawDepthToMeters(i);
	}

	rotation = getRotationMatrix(theta, psi, fi);

	pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH);
	pNuiSensor->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_DEPTH,
		NUI_IMAGE_RESOLUTION_320x240,
		0,
		2,
		NULL,
		&hDepth);//KINECT INSTANTIATION

	cout << "Starting Main Loop";

	static double lastTime = glfwGetTime();
	//Main Loop
	do
	{
		double currentTime = glfwGetTime();
		float deltaTime = float(currentTime - lastTime);
		//Clear color buffer
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

		glUseProgram(grid);
		modelMatrix(MatrixID);


		hr = pNuiSensor->NuiImageStreamGetNextFrame(hDepth, 0, &depthFrame);
		if (!FAILED(hr))
		{

			INuiFrameTexture* pTexture;
			NUI_LOCKED_RECT LockedRect;

			hr = pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture(
				hDepth, &depthFrame, false, &pTexture);

			if (FAILED(hr))
			{
				pNuiSensor->NuiImageStreamReleaseFrame(hDepth, &depthFrame);
				continue;
			}

			pTexture->LockRect(0, &LockedRect, NULL, 0);//Kinect Image Grab
			int skipX = 1;
			int skipY = 1;
			float scalar = 4.0f;

			if (LockedRect.Pitch != 0)
			{
				for (int x = 0; x < width; x += skipX)
				{
					for (int y = 0; y < height; y += skipY)
					{
						const NUI_DEPTH_IMAGE_PIXEL * pBufferRun = reinterpret_cast<const NUI_DEPTH_IMAGE_PIXEL *>(LockedRect.pBits) + x + y * width;
						
						//float depth = (float)(pBufferRun->depth);
						//glm::vec3 location = realWorld(depth, height - y, x, 500.0f, 1000.0f);
						//createCube(0.006f, location);
						Vector4 locationDepth = NuiTransformDepthImageToSkeleton(x, y, (short)(pBufferRun->depth << 3));
						glm::vec3 locationDepthxyz = glm::vec3(locationDepth.x * scalar, locationDepth.y * scalar, locationDepth.z * scalar);
						createCube(0.009f, locationDepthxyz);
					}
				}
			}

			pTexture->UnlockRect(0);
			pTexture->Release();

			pNuiSensor->NuiImageStreamReleaseFrame(hDepth, &depthFrame);
		}

		createGrid();

		//Test drawings
		/*
		glUseProgram(red);
		modelMatrix(MatrixID);
		//createCube(0.05f, glm::vec3(1.0f,1.0f,1.0f));
		// 1rst attribute buffer : vertices
		glEnableVertexAttribArray(0);
		//createObject(vertexbuffer, GL_TRIANGLES, 3);
		//createObject(triangle, GL_TRIANGLES, 3);
		glDisableVertexAttribArray(0);
		*/

		//Swap buffers
		glfwSwapBuffers(window);
		//Get and organize events, like keyboard and mouse input, window resizing, etc...
		glfwPollEvents();

		std::string title = "Title | DELTA TIME " + std::to_string(1.0f/deltaTime);
		const char* pszConstString = title.c_str();
		glfwSetWindowTitle(window, pszConstString);

		lastTime = currentTime;
	} //Check if the ESC key had been pressed or if the window had been closed
	while (!glfwWindowShouldClose(window));


	//Close OpenGL window and terminate GLFW
	glfwDestroyWindow(window);
	//Finalize and clean up GLFW
	glfwTerminate();

	exit(EXIT_SUCCESS);
}