IService* getServiceForType( IInterface* serviceType, IInterface* clientType ) { if( clientType ) return getServices()->getServiceForType( serviceType, clientType ); else return getServices()->getService( serviceType ); }
oni::driver::DeviceBase* XnOniDriver::deviceOpen(const char* uri, const char* mode) { XnOniDevice* pDevice = NULL; // if device was already opened for this uri, return the previous one if (m_devices.Get(uri, pDevice) == XN_STATUS_OK) { getServices().errorLoggerAppend("Device is already open."); return NULL; } pDevice = XN_NEW(XnOniDevice, uri, getServices(), this); XnStatus nRetVal = pDevice->Init(mode); if (nRetVal != XN_STATUS_OK) { getServices().errorLoggerAppend("Could not open \"%s\": %s", uri, xnGetStatusString(nRetVal)); return NULL; } // Register to error state property changed. XnCallbackHandle handle; nRetVal = pDevice->GetSensor()->RegisterToPropertyChange(XN_MODULE_NAME_DEVICE, XN_MODULE_PROPERTY_ERROR_STATE, OnDevicePropertyChanged, pDevice, handle); if (nRetVal != XN_STATUS_OK) { XN_DELETE(pDevice); return NULL; } // Add the device and return it. m_devices[uri] = pDevice; return pDevice; }
void buildFrame(void* data, uint32_t timestamp) { if (!running) return; OniFrame* frame = getServices().acquireFrame(); frame->frameIndex = frame_id++; frame->videoMode = video_mode; frame->width = video_mode.resolutionX; frame->height = video_mode.resolutionY; // Handle overflow, input timestamp comes from a 60MHz clock and overflows // in ~70s if (timestamp < prev_timestamp) { uint32_t prev_int = static_cast<uint32_t>(prev_timestamp); uint64_t temp_delta = std::abs(timestamp - prev_int); prev_timestamp += temp_delta; } else { prev_timestamp = timestamp; } // OpenNI wants the value in microseconds frame->timestamp = prev_timestamp / 60; populateFrame(data, frame); raiseNewFrame(frame); getServices().releaseFrame(frame); }
bool buildFrame(libfreenect2::Frame* lf2Frame) { if (!running) return false; OniFrame* oniFrame = getServices().acquireFrame(); oniFrame->frameIndex = frame_id++; oniFrame->timestamp = lf2Frame->timestamp; oniFrame->videoMode = video_mode; oniFrame->width = video_mode.resolutionX; oniFrame->height = video_mode.resolutionY; if (cropping.enabled) { oniFrame->height = cropping.height; oniFrame->width = cropping.width; oniFrame->cropOriginX = cropping.originX; oniFrame->cropOriginY = cropping.originY; oniFrame->croppingEnabled = true; } else { oniFrame->cropOriginX = 0; oniFrame->cropOriginY = 0; oniFrame->croppingEnabled = false; } int width = std::min(oniFrame->width, (int)lf2Frame->width); int height = std::min(oniFrame->height, (int)lf2Frame->height); populateFrame(lf2Frame, oniFrame->cropOriginX, oniFrame->cropOriginY, oniFrame, 0, 0, width, height); raiseNewFrame(oniFrame); getServices().releaseFrame(oniFrame); return false; }
void Mainloop() { int frameId = 1; int xdir = 1; int ydir = 1; struct {int x, y;} center = {0,0}; while (m_running) { // printf("Tick"); OniFrame* pFrame = getServices().acquireFrame(); if (pFrame == NULL) {printf("Didn't get frame...\n"); continue;} // Fill frame xnOSMemSet(pFrame->data, 0, pFrame->dataSize); OniDepthPixel* pDepth = (OniDepthPixel*)pFrame->data; for (int y1 = XN_MAX(center.y-10, 0); y1 < XN_MIN(center.y+10, OZ_RESOLUTION_Y); ++y1) for (int x1 = XN_MAX(center.x-10, 0); x1 < XN_MIN(center.x+10, OZ_RESOLUTION_X); ++x1) if ((x1-center.x)*(x1-center.x)+(y1-center.y)*(y1-center.y) < 70) pDepth[singleRes(x1, y1)] = OniDepthPixel(1000+(x1-y1)*3); // pDepth[singleRes(center.x, center.y)] = 1000; center.x += xdir; center.y += ydir; if (center.x < abs(xdir) || center.x > OZ_RESOLUTION_X-1-abs(xdir)) xdir*=-1; if (center.y < abs(ydir) || center.y > OZ_RESOLUTION_Y-1-abs(ydir)) ydir*=-1; for (int i = 0; i < OZ_RESOLUTION_X; ++i) pDepth[i] = 2000; pDepth[0] = 2000; // Fill metadata pFrame->frameIndex = frameId; pFrame->videoMode.pixelFormat = ONI_PIXEL_FORMAT_DEPTH_1_MM; pFrame->videoMode.resolutionX = OZ_RESOLUTION_X; pFrame->videoMode.resolutionY = OZ_RESOLUTION_Y; pFrame->videoMode.fps = 30; pFrame->width = OZ_RESOLUTION_X; pFrame->height = OZ_RESOLUTION_Y; pFrame->cropOriginX = pFrame->cropOriginY = 0; pFrame->croppingEnabled = FALSE; pFrame->sensorType = ONI_SENSOR_DEPTH; pFrame->stride = OZ_RESOLUTION_X*sizeof(OniDepthPixel); pFrame->timestamp = frameId*33000; raiseNewFrame(pFrame); getServices().releaseFrame(pFrame); frameId++; xnOSSleep(33); } }
void buildFrame(void* data, uint32_t timestamp) { if (!running) return; OniFrame* frame = getServices().acquireFrame(); frame->frameIndex = frame_id++; frame->timestamp = timestamp; frame->videoMode = video_mode; frame->width = video_mode.resolutionX; frame->height = video_mode.resolutionY; populateFrame(data, frame); raiseNewFrame(frame); getServices().releaseFrame(frame); }
virtual oni::driver::DeviceBase* deviceOpen(const char* uri, const char* /*mode*/) { if ( !kinect_ ) { return 0; } return XN_NEW(KinectV2Device, getServices(), kinect_); }
IService* ServiceManager::find(const string& name) { vector<IService*> services = getServices(); vector<IService*>::iterator iter = find_if(services.begin(), services.end() ,bind(&ServiceFunctors::matchByName, _1, name)); return (iter != services.end())?(*iter):NULL; }
Driver(OniDriverServices* pDriverServices) : DriverBase(pDriverServices) { WriteMessage("Using libfreenect v" + to_string(PROJECT_VER)); freenect_set_log_level(m_ctx, FREENECT_LOG_DEBUG); freenect_select_subdevices(m_ctx, FREENECT_DEVICE_CAMERA); // OpenNI2 doesn't use MOTOR or AUDIO DriverServices = &getServices(); }
IService* ServiceManager::find(long id) { vector<IService*> services = getServices(); vector<IService*>::iterator iter = find_if(services.begin(), services.end() ,bind(&ServiceFunctors::matchById, _1, id)); return (iter != services.end())?(*iter):NULL; }
void Mainloop() { m_running = true; while (m_running) { OniFrame* pFrame = getServices().acquireFrame(); BuildFrame(pFrame); raiseNewFrame(pFrame); } }
void Mainloop() { int frameId = 1; while (m_running) { m_osEvent.Wait(XN_WAIT_INFINITE); m_osEvent.Reset(); OniFrame* pFrame = getServices().acquireFrame(); if (pFrame == NULL) {printf("Didn't get frame...\n"); continue;} // Fill frame xnOSMemSet(pFrame->data, 0, pFrame->dataSize); pFrame->frameIndex = frameId; pFrame->videoMode.pixelFormat = ONI_PIXEL_FORMAT_RGB888; pFrame->videoMode.resolutionX = DEPTHSENSE_COLOR_RESOLUTION_X; pFrame->videoMode.resolutionY = DEPTHSENSE_COLOR_RESOLUTION_Y; pFrame->videoMode.fps = 30; pFrame->width = DEPTHSENSE_COLOR_RESOLUTION_X; pFrame->height = DEPTHSENSE_COLOR_RESOLUTION_Y; xnOSMemCopy( pFrame->data, &m_data[0], m_data.size() ); pFrame->cropOriginX = pFrame->cropOriginY = 0; pFrame->croppingEnabled = FALSE; pFrame->sensorType = ONI_SENSOR_COLOR; pFrame->stride = DEPTHSENSE_COLOR_RESOLUTION_X*sizeof(OniDepthPixel); pFrame->timestamp = frameId * 33000; raiseNewFrame(pFrame); getServices().releaseFrame(pFrame); frameId++; } }
virtual oni::driver::DeviceBase* deviceOpen(const char* uri, const char* /*mode*/) { for (xnl::Hash<OniDeviceInfo*, oni::driver::DeviceBase*>::Iterator iter = m_devices.Begin(); iter != m_devices.End(); ++iter) { if (xnOSStrCmp(iter->Key()->uri, uri) == 0) { // Found if (iter->Value() != NULL) { // already using return iter->Value(); } OzDevice* pDevice = XN_NEW(OzDevice, iter->Key(), getServices()); iter->Value() = pDevice; return pDevice; } } getServices().errorLoggerAppend("Looking for '%s'", uri); return NULL; }
virtual oni::driver::DeviceBase* deviceOpen(const char* uri, const char* /*mode*/) { for ( auto iter = m_devices.Begin(); iter != m_devices.End(); ++iter) { if (xnOSStrCmp(iter->Key()->uri, uri) == 0) { // Found if (iter->Value() != NULL) { // already using return iter->Value(); } int deviceIndex = atoi(strrchr(uri, '/') + 1); // Create the device instance // デバイスインスタンスを生成する DepthSenseDevice* pDevice = XN_NEW(DepthSenseDevice, iter->Key(), getServices(), m_context, m_depthSenseDevices[deviceIndex] ); iter->Value() = pDevice; return pDevice; } } getServices().errorLoggerAppend("Looking for '%s'", uri); return NULL; }
/** * @brief Get the service object corresponding to the uuid. * @param [in] uuid The UUID of the service being sought. * @return A reference to the Service or nullptr if don't know about it. * @throws BLEUuidNotFound */ BLERemoteService* BLEClient::getService(BLEUUID uuid) { ESP_LOGD(LOG_TAG, ">> getService: uuid: %s", uuid.toString().c_str()); // Design // ------ // We wish to retrieve the service given its UUID. It is possible that we have not yet asked the // device what services it has in which case we have nothing to match against. If we have not // asked the device about its services, then we do that now. Once we get the results we can then // examine the services map to see if it has the service we are looking for. if (!m_haveServices) { getServices(); } std::string uuidStr = uuid.toString(); for (auto &myPair : m_servicesMap) { if (myPair.first == uuidStr) { ESP_LOGD(LOG_TAG, "<< getService: found the service with uuid: %s", uuid.toString().c_str()); return myPair.second; } } // End of each of the services. ESP_LOGD(LOG_TAG, "<< getService: not found"); return nullptr; } // getService
IService* getServiceForInstance( IInterface* serviceType, IService* clientInstance ) { return getServices()->getServiceForInstance( serviceType, clientInstance ); }
Driver(OniDriverServices* pDriverServices) : DriverBase(pDriverServices) { freenect_set_log_level(m_ctx, FREENECT_LOG_DEBUG); freenect_select_subdevices(m_ctx, FREENECT_DEVICE_CAMERA); // OpenNI2 doesn't use MOTOR or AUDIO DriverServices = &getServices(); }
void Mainloop() { int frameId = 1; int xdir = -3; int ydir = 1; struct {int x, y;} center = {160,120}; while (m_running) { xnOSSleep(33); // printf("Tick"); OniFrame* pFrame = getServices().acquireFrame(); if (pFrame == NULL) {printf("Didn't get frame...\n"); continue;} // Fill frame xnOSMemSet(pFrame->data, 0, pFrame->dataSize); OniRGB888Pixel* pImage = (OniRGB888Pixel*)pFrame->data; for (int y = XN_MAX(center.y-10, 0); y < XN_MIN(center.y+10, OZ_RESOLUTION_Y); ++y) for (int x = XN_MAX(center.x-10, 0); x < XN_MIN(center.x+10, OZ_RESOLUTION_X); ++x) if ((x-center.x)*(x-center.x)+(y-center.y)*(y-center.y) < 70) { pImage[singleRes(x, y)].r = (char)(255*(x/(double)OZ_RESOLUTION_X)); pImage[singleRes(x, y)].g = (char)(255*(y/(double)OZ_RESOLUTION_Y)); pImage[singleRes(x, y)].b = (char)(255*((OZ_RESOLUTION_X-x)/(double)OZ_RESOLUTION_X)); } // pImage[singleRes(center.x, center.y)].r = 255; center.x += xdir; center.y += ydir; if (center.x < abs(xdir) || center.x > OZ_RESOLUTION_X-1-abs(xdir)) xdir*=-1; if (center.y < abs(ydir) || center.y > OZ_RESOLUTION_Y-1-abs(ydir)) ydir*=-1; pImage[0].b = (unsigned char)255; // for (int y = 0; y < OZ_RESOLUTION_Y; ++y) // { // pDepth[y*OZ_RESOLUTION_X+(OZ_RESOLUTION_Y-y)] = pDepth[y*OZ_RESOLUTION_X+(y)] = 500+y; // } // Fill metadata pFrame->frameIndex = frameId; pFrame->videoMode.pixelFormat = ONI_PIXEL_FORMAT_RGB888; pFrame->videoMode.resolutionX = OZ_RESOLUTION_X; pFrame->videoMode.resolutionY = OZ_RESOLUTION_Y; pFrame->videoMode.fps = 30; pFrame->width = OZ_RESOLUTION_X; pFrame->height = OZ_RESOLUTION_Y; pFrame->cropOriginX = pFrame->cropOriginY = 0; pFrame->croppingEnabled = FALSE; pFrame->sensorType = ONI_SENSOR_COLOR; pFrame->stride = OZ_RESOLUTION_X*3; pFrame->timestamp = frameId*33000; raiseNewFrame(pFrame); getServices().releaseFrame(pFrame); frameId++; } }