예제 #1
0
// Initialize reading data from kinect
HRESULT KinectGrabber::Kinect_Init() {
	
	HRESULT hr;

    m_hNextVideoFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );    
	m_hNextDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );    
	m_hNextSkeletonFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );    
	m_hNextDepthPlayerFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );

	hr = NuiInitialize( 
        NUI_INITIALIZE_FLAG_USES_DEPTH |  NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
    if( FAILED( hr ) )
    {
		printf("failed to inialize nui");
	}
	hr = NuiSkeletonTrackingEnable( m_hNextSkeletonFrameEvent, 0 );
    if( FAILED( hr ) )
    {
		printf("failed to open skeleton tracking.");//    MessageBoxResource(m_hWnd,IDS_ERROR_SKELETONTRACKING,MB_OK | MB_ICONHAND);
        return hr;
    }
	hr = NuiImageStreamOpen(
        NUI_IMAGE_TYPE_COLOR,
        NUI_IMAGE_RESOLUTION_640x480,
        0,
        2,
        m_hNextVideoFrameEvent,
        &m_pVideoStreamHandle );
    if( FAILED( hr ) )
    {
		printf("failed to open NuiImagesStream");
        return hr;
    }
	hr = NuiImageStreamOpen(
        NUI_IMAGE_TYPE_DEPTH,
        NUI_IMAGE_RESOLUTION_640x480,
        0,
        2,
        m_hNextDepthFrameEvent,
        &m_pDepthStreamHandle );
    if( FAILED( hr ) )
    {
    	printf("failed to open NuiImagesStream");
        return hr;
    }
	//return hr;

	



	//audio init?


	//init other added parameters
	isSkeletonTracked=false;

}
예제 #2
0
HRESULT KinectCam::Nui_Init()
{
    HRESULT hr;

	m_hNextVideoFrameEvent = NULL;
    m_pVideoStreamHandle = NULL;
    m_hNextVideoFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );

    hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR);
    if( FAILED( hr ) )
    {
        return hr;
    }

	// Set camera angle
	char iniFile[MAX_PATH];
	SHGetFolderPath(NULL, CSIDL_LOCAL_APPDATA, NULL, SHGFP_TYPE_CURRENT, iniFile);
	strcat(iniFile, "\\KinectCam");
	CreateDirectory(iniFile, NULL);
	strcat(iniFile, "\\config.ini");
	if (_access(iniFile, 0) != 0)
	{
		FILE *f = fopen(iniFile, "w");
		if (f)
		{
			fprintf(f, "[KinectCam]\n");
			fprintf(f, "CameraElevationAngle=0\n");
			fprintf(f, "CameraShutdownAngle=0\n");
			fprintf(f, "FlipImage=0\n");
			fclose(f);
		}
	}

	char val[256];
	GetPrivateProfileString("KinectCam", "CameraElevationAngle", "999", val, 256, iniFile);
	int angle = atoi(val);
	if (angle != 999)
		NuiCameraElevationSetAngle(angle);
	
	GetPrivateProfileString("KinectCam", "FlipImage", "0", val, 256, iniFile);
	g_flipImage = (atoi(val) != 0);

	NUI_IMAGE_RESOLUTION resolution;
	resolution = NUI_IMAGE_RESOLUTION_640x480;

    hr = NuiImageStreamOpen(
        NUI_IMAGE_TYPE_COLOR,
		resolution,
        0,
        2,
        m_hNextVideoFrameEvent,
        &m_pVideoStreamHandle );

	return hr;
}
예제 #3
0
int init(int left,int top,int width,int height,string title) {
	glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA);
	if(!(NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX) == S_OK)){
		return -1;
	}
	glutInitWindowPosition(left,top);
	glutInitWindowSize(width,height);
	glutCreateWindow(title.c_str());
	glClearColor(0.0,0.0,0.0,0.0);			
	cargando_juego = false;
	return 0;
}
예제 #4
0
	// --- initialisation --- 
	HRESULT Init(){

		// Configure events to be listened on
		m_hNextDepthFrameEvent = CreateEvent( NULL, TRUE , FALSE, NULL );
		m_hNextVideoFrameEvent = CreateEvent( NULL, TRUE , FALSE, NULL );
		m_hNextSkeletonEvent   = CreateEvent( NULL, TRUE , FALSE, NULL );
		m_hEvNuiProcessStop    = CreateEvent( NULL, FALSE, FALSE, NULL );

		hEvents[0]=m_hEvNuiProcessStop;
		hEvents[1]=m_hNextDepthFrameEvent;
		hEvents[2]=m_hNextVideoFrameEvent;
		hEvents[3]=m_hNextSkeletonEvent;



		//printf("Kinect::Init() -- events created\n");

		// initialisation
		HRESULT hr = NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
		if(FAILED(hr)) {
			MessageBox(NULL,L"Kinect initialisation failed.",L"Kinect Error",NULL);
			return hr;
		}
		//printf("Kinect::Init() -- done NuiInitialize\n");

		hr = NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, 0 );
		if(FAILED(hr)) return hr;
		//printf("Kinect::Init() -- done NuiSkeletonTrackingEnable\n");

		hr = NuiImageStreamOpen(
			NUI_IMAGE_TYPE_COLOR,
			NUI_IMAGE_RESOLUTION_640x480,
			0,
			2,
			m_hNextVideoFrameEvent,
			&m_pVideoStreamHandle );
		if(FAILED(hr)) return hr;
		//printf("Kinect::Init() -- done NuiImageStreamOpen for video\n");

		hr = NuiImageStreamOpen(
			NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
			NUI_IMAGE_RESOLUTION_320x240,
			0,
			2,
			m_hNextDepthFrameEvent,
			&m_pDepthStreamHandle );
		if(FAILED(hr)) return hr;
		//printf("Kinect::Init() -- done NuiImageStreamOpen for depth\n");

		return hr;

	}
int Read_Kinect::control_initialize()
{
	int rtn = 0;


	//visualization_setup();

	HRESULT hr = NuiInitialize(
		NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX
		| NUI_INITIALIZE_FLAG_USES_COLOR
		| NUI_INITIALIZE_FLAG_USES_SKELETON);

	if (hr != S_OK)
	{
		cout << "NuiInitialize failed" << endl;
		return hr;
	}

	h1 = CreateEvent(NULL, TRUE, FALSE, NULL);
	h2 = NULL;
	hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480,
		0, 2, h1, &h2);
	if (FAILED(hr))
	{
		cout << "Could not open image stream video" << endl;
		return hr;
	}

	h3 = CreateEvent(NULL, TRUE, FALSE, NULL);
	h4 = NULL;	hr = NuiImageStreamOpen(NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
		NUI_IMAGE_RESOLUTION_320x240, 0, 2, h3, &h4);

	if (FAILED(hr))
	{
		cout << "Could not open depth stream video" << endl;
		return hr;
	}

	h5 = CreateEvent(NULL, TRUE, FALSE, NULL);
	hr = NuiSkeletonTrackingEnable(h5, 0);
	if (FAILED(hr))
	{
		cout << "Could not open skeleton stream video" << endl;
		return hr;
	}

	return rtn;
}
예제 #6
0
	bool Kinect::init() {
		HRESULT hr;
		hr = NuiInitialize( NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
		
		if(hr<0)
			return false;

		hr = NuiSkeletonTrackingEnable( _nextSkeletonEvent, 0 );

		if(hr<0)
			return false;

		hr = NuiImageStreamOpen(
			NUI_IMAGE_TYPE_COLOR,
			NUI_IMAGE_RESOLUTION_640x480,
			0,
			2,
			_nextVideoFrameEvent,
			&_videoHandle );

		if(hr<0)
			return false;

		hr = NuiImageStreamOpen(
			NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
			NUI_IMAGE_RESOLUTION_320x240,
			0,
			2,
			_nextDepthFrameEvent,
			&_depthHandle );

		if(hr<0)
			return false;

		_events[0] = _nuiProcessStop;
		_events[1] = _nextDepthFrameEvent;
		_events[2] = _nextVideoFrameEvent;
		_events[3] = _nextSkeletonEvent;

		CreateThread(NULL,0,nuiProcessThread,this,0,NULL);

		_initialized = true;
		return true;
	}
예제 #7
0
파일: qkinect.cpp 프로젝트: LuaxY/QKinect
/**
 * @brief QKinect::QKinect : constructeur
 * @param parent : classe parent
 * @param drawZone : zone de dessin
 */
QKinect::QKinect(QObject *parent, DrawZone* drawZone) :
    QThread(parent),
    isRunning(false),
    isInitialized(false),
    mDrawZone(drawZone)
{
    // Initialisation de capteur squeletique
    HRESULT res = NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON);

    if(res != S_OK)
    {
        //throw std::logic_error("No Kinect found...");
        qDebug() << "No Kinect found...";
    }
    else
    {
        isInitialized = true;
    }
}
예제 #8
0
RTC::ReturnCode_t RTCKinect::onActivated(RTC::UniqueId ec_id)
{
    /**
	 * The configured values should be reflected to the initialization process.
	 *
	 * m_enable_camera -> camera image
	 * m_enable_depth  -> depth image
	 * m_player_index  -> player index detection
	 *
	 * important: if player indexing is enabled, depth map image is limited up to 320x240
	 */

	DWORD dwFlag = NUI_INITIALIZE_FLAG_USES_SKELETON;
	if(m_enable_camera) {
		dwFlag |= NUI_INITIALIZE_FLAG_USES_COLOR;
	}
	if(m_enable_depth) {
		if(m_player_index) {
			dwFlag |= NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX;
		} else {
			dwFlag |= NUI_INITIALIZE_FLAG_USES_DEPTH;
		}
	}


	HRESULT hr = NuiInitialize(dwFlag); 
    if( FAILED( hr ) ) {
		std::cout << "NUI Initialize Failed." << std::endl;
		return RTC::RTC_ERROR;
    }

	if(m_depth_width == 640 && m_depth_height == 480 && m_enable_depth && m_player_index) {
		std::cout << "If PlayerIndex and Depth Map is ON, resolution should be 320X240" << std::endl;
		return RTC::RTC_ERROR;
	}
	NUI_IMAGE_RESOLUTION eResolution;
	if(m_camera_width == 640 && m_camera_height == 480) {
		eResolution = NUI_IMAGE_RESOLUTION_640x480;
	} else {
		std::cout << "Invalid Image Resolution" << std::endl;
		return RTC::RTC_ERROR;
	}
	if(m_enable_camera) {
		hr = NuiImageStreamOpen(::NUI_IMAGE_TYPE_COLOR, eResolution, 0, 2, NULL, &m_pVideoStreamHandle );
		if( FAILED( hr ) )
		{
			std::cout << "NUI Image Stream Open Failed." << std::endl;
			return RTC::RTC_ERROR;
		}
	}

	if(m_depth_width == 640 && m_depth_height == 480) {
		eResolution = NUI_IMAGE_RESOLUTION_640x480;
	} else if(m_depth_width == 320 && m_depth_height == 240) {
		eResolution = NUI_IMAGE_RESOLUTION_320x240;
	} else {
		std::cout << "Invalid Image Resolution" << std::endl;
		return RTC::RTC_ERROR;
	}
	if(m_enable_depth) {
		if(m_player_index) {
			hr = NuiImageStreamOpen(::NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, eResolution, 0, 2, NULL, &m_pDepthStreamHandle );
		} else {
			hr = NuiImageStreamOpen(::NUI_IMAGE_TYPE_DEPTH, eResolution, 0, 2, NULL, &m_pDepthStreamHandle );
		}
	}
    if( FAILED( hr ) ) {
		std::cout << "NUI Image Stream Open Failed." << std::endl;
		return RTC::RTC_ERROR;
    }

	this->m_image.width = m_camera_width;
	this->m_image.height = m_camera_height;
	this->m_image.pixels.length(m_camera_width*m_camera_height*3);

	this->m_depth.width = m_depth_width;
	this->m_depth.height = m_depth_height;
	this->m_depth.pixels.length(m_depth_width*m_depth_height*3);

    /**
	 * Initialization for raw sound input.
	 */
	if (m_enable_microphone) {
		UINT deviceCount;
		IMMDeviceEnumerator *deviceEnumerator = NULL;
		IMMDeviceCollection *deviceCollection = NULL;

		hr = CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&deviceEnumerator));
		if (FAILED(hr)) {
			std::cout << "Unable to instantiate device enumerator." << std::endl;
			return RTC::RTC_ERROR;
		}
		hr = deviceEnumerator->EnumAudioEndpoints(eCapture, DEVICE_STATE_ACTIVE, &deviceCollection);
		if (FAILED(hr)) {
			std::cout << "Unable to retrieve device collection." << std::endl;
			return RTC::RTC_ERROR;
		}
		hr = deviceCollection->GetCount(&deviceCount);
		if (FAILED(hr)) {
			std::cout << "Unable to get device collection length." << std::endl;
			return RTC::RTC_ERROR;
		}
		for (UINT i = 0; i < deviceCount; i++) {
			IPropertyStore *propertyStore;
			PROPVARIANT friendlyName;
			IMMDevice *endpoint;
			PropVariantInit(&friendlyName);

			hr = deviceCollection->Item(i, &endpoint);
			if (FAILED(hr)) {
				std::cout << "Unable to get device collection item." << std::endl;
				return RTC::RTC_ERROR;
			}

			hr = endpoint->OpenPropertyStore(STGM_READ, &propertyStore);
			if (FAILED(hr)) {
				std::cout << "Unable to open device property store." << std::endl;
				return RTC::RTC_ERROR;
			}

			hr = propertyStore->GetValue(PKEY_Device_FriendlyName, &friendlyName);
			SafeRelease(&propertyStore);
			if (FAILED(hr)) {
				std::cout << "Unable to retrieve friendly name for device." << std::endl;
				return RTC::RTC_ERROR;
			}

			std::cout << "Scanning for Kinect Audio device..." << std::endl;
			if (friendlyName.vt == VT_LPWSTR) {
				wprintf(L"  %s\n", friendlyName.pwszVal);
				if (wcscmp(friendlyName.pwszVal, L"Kinect USB Audio") != 0) {
					std::cout << "  Found Kinect Audio device" << std::endl;
					m_pAudioEndpoint = endpoint;
					m_pAudioEndpoint->AddRef();
					PropVariantClear(&friendlyName);
					SafeRelease(&endpoint);
					break;
				}
			}
			PropVariantClear(&friendlyName);
			SafeRelease(&endpoint);
		}
		SafeRelease(&deviceCollection);
		SafeRelease(&deviceEnumerator);

		m_AudioShutdownEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
		if (m_AudioShutdownEvent == NULL) {
			std::cout << "Unable to create shutdown event." << std::endl;
			return RTC::RTC_ERROR;
		}    
		hr = m_pAudioEndpoint->Activate(__uuidof(IAudioClient), CLSCTX_INPROC_SERVER, NULL, reinterpret_cast<void **>(&m_pAudioClient));
		if (FAILED(hr)) {
			std::cout << "Unable to activate audio client." << std::endl;
			return RTC::RTC_ERROR;
		}
		hr = m_pAudioClient->GetMixFormat(&m_pAudioMixFormat);
		if (FAILED(hr)) {
			std::cout << "Unable to get mix format on audio client." << std::endl;
			return RTC::RTC_ERROR;
		}
	    m_AudioFrameSize = (m_pAudioMixFormat->wBitsPerSample / 8) * m_pAudioMixFormat->nChannels;
		m_AudioCaptureBufferSize = m_pAudioMixFormat->nSamplesPerSec * 5 * m_AudioFrameSize;
		m_pAudioCaptureBuffer = new (std::nothrow) BYTE[m_AudioCaptureBufferSize];
		if (m_pAudioCaptureBuffer == NULL) {
			std::cout << "Unable to allocate capture buffer." << std::endl;
			return RTC::RTC_ERROR;
		}
		m_AudioCurrentCaptureIndex = 0;
		std::cout << "Audio capture format (" << m_pAudioMixFormat->nChannels << " channels, " << m_pAudioMixFormat->wBitsPerSample << " bits)"<< std::endl;
		m_AudioLatency = 10;
		hr = m_pAudioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_NOPERSIST, m_AudioLatency*10000, 0, m_pAudioMixFormat, NULL);
		if (FAILED(hr)) {
			std::cout << "Unable to initialize audio client." << std::endl;
			return RTC::RTC_ERROR;
		}
		hr = m_pAudioClient->GetService(IID_PPV_ARGS(&m_pAudioCaptureClient));
		if (FAILED(hr)) {
			std::cout << "Unable to get audio capture client." << std::endl;
			return RTC::RTC_ERROR;
		}
		m_AudioCaptureThread = CreateThread(NULL, 0, AudioCaptureThread, this, 0, NULL);
		if (m_AudioCaptureThread == NULL) {
			std::cout << "Unable to create transport thread: " << GetLastError() << std::endl;
			return RTC::RTC_ERROR;
		}
		hr = m_pAudioClient->Start();
		if (FAILED(hr)) {
			std::cout << "Unable to start audio capture client." << std::endl;
			return RTC::RTC_ERROR;
		}
	}

	return RTC::RTC_OK;
}
예제 #9
0
Flyer::Flyer(PE::GameContext &context, PE::MemoryArena arena, PE::Handle hMyself, int &threadOwnershipMask) 
: Component(context, arena, hMyself)
, m_prevCameraType(CameraManager::CameraType_Count)
{
	// NUI testing
#ifdef _XBOX
	// Could also try for a bit more smoothing ( 0.25f, 0.25f, 0.25f, 0.03f, 0.05f );
	m_JointFilter.Init( 0.5f, 0.5f, 0.5f, 0.05f, 0.05f );

	// create event which will be signaled when frame processing ends
	m_hFrameEndEvent = CreateEvent( NULL,
		FALSE,  // auto-reset
		FALSE,  // create unsignaled
		"NuiFrameEndEvent" );

	if ( !m_hFrameEndEvent )
	{
		ATG_PrintError( "Failed to create NuiFrameEndEvent\n" );
		return;
		// return E_FAIL;
	}

	HRESULT hr = NuiInitialize( NUI_INITIALIZE_FLAG_USES_SKELETON |
		NUI_INITIALIZE_FLAG_USES_COLOR |
		NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX,
		NUI_INITIALIZE_DEFAULT_HARDWARE_THREAD );

	if( FAILED( hr ))
	{
		ATG::NuiPrintError( hr, "NuiInitialize" );
		return;
		// return E_FAIL;
	}
	
	// register frame end event with NUI
	hr = NuiSetFrameEndEvent( m_hFrameEndEvent, 0 );
	if( FAILED(hr) )
	{
		ATG::NuiPrintError( hr, "NuiSetFrameEndEvent" );
		return;
		// return E_FAIL;
	}

	/*
	// Open the color stream
	hr = NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 1, NULL, &m_hImage );
	if( FAILED (hr) )
	{
		ATG::NuiPrintError( hr, "NuiImageStreamOpen" );
		return E_FAIL;
	}

	// Open the depth stream
	hr = NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_320x240, 0, 1, NULL, &m_hDepth );
	if( FAILED (hr) )
	{
		ATG::NuiPrintError( hr, "NuiImageStreamOpen" );
		return E_FAIL;
	}
	*/
	hr = NuiSkeletonTrackingEnable( NULL, 0 );
	if( FAILED( hr ))
	{
		ATG::NuiPrintError( hr, "NuiSkeletonTrackingEnable" );
	}

	m_pNuiJointConverterConstrained = new ATG::NuiJointConverter();
	if( m_pNuiJointConverterConstrained == NULL )
	{
		// return E_FAIL;
		return;
	}

	m_pNuiJointConverterConstrained->AddDefaultConstraints();
	
	Handle hSN("SceneNode", sizeof(SceneNode));
	m_pNuiSN = new(hSN) SceneNode(context, arena, hSN);
	m_pNuiSN->addDefaultComponents();
	m_pNuiSN->m_base.setPos(Vector3(0.0f, 0, 25.0f));
	m_pNuiSN->m_base.turnRight(1.2f * 3.1415f);
	RootSceneNode::Instance()->addComponent(hSN);
	for (int i = 0; i < XAVATAR_MAX_SKELETON_JOINTS; i++)
	{
		PE::Handle hMeshInstance("MeshInstance", sizeof(MeshInstance));
		MeshInstance *pMeshInstance = new(hMeshInstance) MeshInstance(*m_pContext, m_arena, hMeshInstance);
		pMeshInstance->addDefaultComponents();

		pMeshInstance->initFromFile("box.x_main_mesh.mesha", "Default", threadOwnershipMask);
		
		Handle hSN("SceneNode", sizeof(SceneNode));
		m_sceneNodes[i] = new(hSN) SceneNode(context, arena, hSN);
		m_sceneNodes[i]->addDefaultComponents();
		m_sceneNodes[i]->addComponent(hMeshInstance);
		m_pNuiSN->addComponent(hSN);

		if (m_pNuiJointConverterConstrained->MapAvatarJointToNUI_POSITION_INDEX(i) == NUI_SKELETON_POSITION_SHOULDER_RIGHT)
		{
			PE::Handle hMeshInstance("MeshInstance", sizeof(MeshInstance));
			MeshInstance *pMeshInstance = new(hMeshInstance) MeshInstance(*m_pContext, m_arena, hMeshInstance);
			pMeshInstance->addDefaultComponents();
			pMeshInstance->initFromFile("wings.x_rwing_mesh.mesha", "City", threadOwnershipMask);
			
			m_sceneNodes[i]->addComponent(hMeshInstance);
		}

		if (m_pNuiJointConverterConstrained->MapAvatarJointToNUI_POSITION_INDEX(i) == NUI_SKELETON_POSITION_SHOULDER_LEFT)
		{
			
			PE::Handle hMeshInstance("MeshInstance", sizeof(MeshInstance));
			MeshInstance *pMeshInstance = new(hMeshInstance) MeshInstance(*m_pContext, m_arena, hMeshInstance);

			pMeshInstance->addDefaultComponents();
			pMeshInstance->initFromFile("wings.x_lwing_mesh.mesha", "City", threadOwnershipMask);

			m_sceneNodes[i]->addComponent(hMeshInstance);
		}
	}
	{
			// put a camera here

		PE::Handle hMeshInstance("MeshInstance", sizeof(MeshInstance));
		MeshInstance *pMeshInstance = new(hMeshInstance) MeshInstance(*m_pContext, m_arena, hMeshInstance);

		pMeshInstance->addDefaultComponents();
		pMeshInstance->initFromFile("box.x_main_mesh.mesha", "Default", threadOwnershipMask);

		// we put camera in a scene node so we can rotate the camera within that scene node, but we could have also just added camera on its own
		Handle hSN("SceneNode", sizeof(SceneNode));
		m_pCamSN = new(hSN) SceneNode(context, arena,hSN);
		m_pCamSN->addDefaultComponents();
		m_pCamSN->addComponent(hMeshInstance);
		m_pNuiSN->addComponent(hSN);
		m_pCamSN->m_base.setPos(Vector3(0, +1.0f, +1.5f));

		Handle hDebugCamera("Camera", sizeof(Camera));
		Camera *debugCamera = new(hDebugCamera) Camera(context, arena, hDebugCamera, hSN);
		debugCamera->addDefaultComponents();
		CameraManager::Instance()->setCamera(CameraManager::PLAYER, hDebugCamera);
		//SceneNode *pCamSN = debugCamera->getCamSceneNode();

		}
	m_framesWithNoData = 0;
	m_framesWithData = 0;
#endif // #ifdef _XBOX
}
예제 #10
0
HRESULT KinectSensor::Init(NUI_IMAGE_TYPE depthType, NUI_IMAGE_RESOLUTION depthRes, BOOL bNearMode, BOOL bFallbackToDefault, NUI_IMAGE_TYPE colorType, NUI_IMAGE_RESOLUTION colorRes, BOOL bSeatedSkeletonMode)
{
    HRESULT hr = E_UNEXPECTED;

    Release(); // Deal with double initializations.

    //do not support NUI_IMAGE_TYPE_COLOR_RAW_YUV for now
    if(colorType != NUI_IMAGE_TYPE_COLOR && colorType != NUI_IMAGE_TYPE_COLOR_YUV
        || depthType != NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX && depthType != NUI_IMAGE_TYPE_DEPTH)
    {
        return E_INVALIDARG;
    }

    m_VideoBuffer = FTCreateImage();
    if (!m_VideoBuffer)
    {
        return E_OUTOFMEMORY;
    }

    DWORD width = 0;
    DWORD height = 0;

    NuiImageResolutionToSize(colorRes, width, height);

    hr = m_VideoBuffer->Allocate(width, height, FTIMAGEFORMAT_UINT8_B8G8R8X8);
    if (FAILED(hr))
    {
        return hr;
    }

    m_DepthBuffer = FTCreateImage();
    if (!m_DepthBuffer)
    {
        return E_OUTOFMEMORY;
    }

    NuiImageResolutionToSize(depthRes, width, height);

    hr = m_DepthBuffer->Allocate(width, height, FTIMAGEFORMAT_UINT16_D13P3);
    if (FAILED(hr))
    {
        return hr;
    }
    
    m_FramesTotal = 0;
    m_SkeletonTotal = 0;

    for (int i = 0; i < NUI_SKELETON_COUNT; ++i)
    {
        m_HeadPoint[i] = m_NeckPoint[i] = FT_VECTOR3D(0, 0, 0);
        m_SkeletonTracked[i] = false;
    }

    m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    m_hNextVideoFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    m_hNextSkeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    
    DWORD dwNuiInitDepthFlag = (depthType == NUI_IMAGE_TYPE_DEPTH)? NUI_INITIALIZE_FLAG_USES_DEPTH : NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX;

    hr = NuiInitialize(dwNuiInitDepthFlag | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
    if (FAILED(hr))
    {
        return hr;
    }
    m_bNuiInitialized = true;

	DWORD dwSkeletonFlags = NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE;
	if (bSeatedSkeletonMode)
	{
		dwSkeletonFlags |= NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT;
	}
    hr = NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, dwSkeletonFlags );
    if (FAILED(hr))
    {
        return hr;
    }

    hr = NuiImageStreamOpen(
        colorType,
        colorRes,
        0,
        2,
        m_hNextVideoFrameEvent,
        &m_pVideoStreamHandle );
    if (FAILED(hr))
    {
        return hr;
    }

    hr = NuiImageStreamOpen(
        depthType,
        depthRes,
        (bNearMode)? NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE : 0,
        2,
        m_hNextDepthFrameEvent,
        &m_pDepthStreamHandle );
    if (FAILED(hr))
    {
        if(bNearMode && bFallbackToDefault)
        {
            hr = NuiImageStreamOpen(
                depthType,
                depthRes,
                0,
                2,
                m_hNextDepthFrameEvent,
                &m_pDepthStreamHandle );
        }

        if(FAILED(hr))
        {
            return hr;
        }
    }

    // Start the Nui processing thread
    m_hEvNuiProcessStop=CreateEvent(NULL,TRUE,FALSE,NULL);
    m_hThNuiProcess=CreateThread(NULL,0,ProcessThread,this,0,NULL);

    return hr;
}
예제 #11
0
int _tmain(int argc, _TCHAR* argv[])
{
	NuiInitialize(NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH);

	return 0;
}
예제 #12
0
HRESULT CSkeletalViewerApp::Nui_Init()
{
    HRESULT                hr;
    RECT                rc;

    m_hNextDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
    m_hNextVideoFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
    m_hNextSkeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );

    GetWindowRect(GetDlgItem( m_hWnd, IDC_SKELETALVIEW ), &rc );
    int width = rc.right - rc.left;
    int height = rc.bottom - rc.top;
    HDC hdc = GetDC(GetDlgItem( m_hWnd, IDC_SKELETALVIEW));
    m_SkeletonBMP = CreateCompatibleBitmap( hdc, width, height );
    m_SkeletonDC = CreateCompatibleDC( hdc );
    ::ReleaseDC(GetDlgItem(m_hWnd,IDC_SKELETALVIEW), hdc );
    m_SkeletonOldObj = SelectObject( m_SkeletonDC, m_SkeletonBMP );

    hr = m_DrawDepth.CreateDevice( GetDlgItem( m_hWnd, IDC_DEPTHVIEWER ) );
    if( FAILED( hr ) )
    {
        MessageBoxResource( m_hWnd,IDS_ERROR_D3DCREATE,MB_OK | MB_ICONHAND);
        return hr;
    }

    hr = m_DrawDepth.SetVideoType( 320, 240, 320 * 4 );
    if( FAILED( hr ) )
    {
        MessageBoxResource( m_hWnd,IDS_ERROR_D3DVIDEOTYPE,MB_OK | MB_ICONHAND);
        return hr;
    }

    hr = m_DrawVideo.CreateDevice( GetDlgItem( m_hWnd, IDC_VIDEOVIEW ) );
    if( FAILED( hr ) )
    {
        MessageBoxResource( m_hWnd,IDS_ERROR_D3DCREATE,MB_OK | MB_ICONHAND);
        return hr;
    }

    hr = m_DrawVideo.SetVideoType( 640, 480, 640 * 4 );
    if( FAILED( hr ) )
    {
        MessageBoxResource( m_hWnd,IDS_ERROR_D3DVIDEOTYPE,MB_OK | MB_ICONHAND);
        return hr;
    }
    
    hr = NuiInitialize( 
        NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
    if( FAILED( hr ) )
    {
        MessageBoxResource(m_hWnd,IDS_ERROR_NUIINIT,MB_OK | MB_ICONHAND);
        return hr;
    }

    hr = NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, 0 );
    if( FAILED( hr ) )
    {
        MessageBoxResource(m_hWnd,IDS_ERROR_SKELETONTRACKING,MB_OK | MB_ICONHAND);
        return hr;
    }

    hr = NuiImageStreamOpen(
        NUI_IMAGE_TYPE_COLOR,
        NUI_IMAGE_RESOLUTION_640x480,
        0,
        2,
        m_hNextVideoFrameEvent,
        &m_pVideoStreamHandle );
    if( FAILED( hr ) )
    {
        MessageBoxResource(m_hWnd,IDS_ERROR_VIDEOSTREAM,MB_OK | MB_ICONHAND);
        return hr;
    }

    hr = NuiImageStreamOpen(
        NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX,
        NUI_IMAGE_RESOLUTION_320x240,
        0,
        2,
        m_hNextDepthFrameEvent,
        &m_pDepthStreamHandle );
    if( FAILED( hr ) )
    {
        MessageBoxResource(m_hWnd,IDS_ERROR_DEPTHSTREAM,MB_OK | MB_ICONHAND);
        return hr;
    }

    // Start the Nui processing thread
    m_hEvNuiProcessStop=CreateEvent(NULL,FALSE,FALSE,NULL);
    m_hThNuiProcess=CreateThread(NULL,0,Nui_ProcessThread,this,0,NULL);

	//initialize background subtraction.
	initialdepth = (USHORT*)malloc(320*480*sizeof(USHORT));
	FrameCount = 0;

    return hr;
}