void XBeeResponse::getZBRxResponse(XBeeResponse &rxResponse) { ZBRxResponse* zb = static_cast<ZBRxResponse*>(&rxResponse); //TODO verify response api id matches this api for this response // pass pointer array to subclass zb->setFrameData(getFrameData()); setCommon(rxResponse); zb->getRemoteAddress64().setMsb((uint32_t(getFrameData()[0]) << 24) + (uint32_t(getFrameData()[1]) << 16) + (uint16_t(getFrameData()[2]) << 8) + getFrameData()[3]); zb->getRemoteAddress64().setLsb((uint32_t(getFrameData()[4]) << 24) + (uint32_t(getFrameData()[5]) << 16) + (uint16_t(getFrameData()[6]) << 8) + (getFrameData()[7])); }
bool Node::_cmdFrameDataTransmit( co::ICommand& cmd ) { co::ObjectICommand command( cmd ); const co::ObjectVersion& frameDataVersion = command.read< co::ObjectVersion >(); const PixelViewport& pvp = command.read< PixelViewport >(); const Zoom& zoom = command.read< Zoom >(); const uint32_t buffers = command.read< uint32_t >(); const uint32_t frameNumber = command.read< uint32_t >(); const bool useAlpha = command.read< bool >(); const uint8_t* data = reinterpret_cast< const uint8_t* >( command.getRemainingBuffer( command.getRemainingBufferSize( ))); LBLOG( LOG_ASSEMBLY ) << "received image data for " << frameDataVersion << ", buffers " << buffers << " pvp " << pvp << std::endl; LBASSERT( pvp.isValid( )); FrameDataPtr frameData = getFrameData( frameDataVersion ); LBASSERT( !frameData->isReady() ); NodeStatistics event( Statistic::NODE_FRAME_DECOMPRESS, this, frameNumber ); // Note on the const_cast: since the PixelData structure stores non-const // pointers, we have to go non-const at some point, even though we do not // modify the data. LBCHECK( frameData->addImage( frameDataVersion, pvp, zoom, buffers, useAlpha, const_cast< uint8_t* >( data ))); return true; }
// Main loop void MultiCursorAppCpp::run() { /* 1. Get frame data and prepear data needed */ bool isGetFrameData = getFrameData(); if (isGetFrameData) { /* 2. Labeling users' area */ CvBlobs blobs = labelingUserArea(userAreaMat); /* 3. Detect users' head postiions */ detectHeadPosition(blobs); /* 4. Detect users' hand positions */ detectHandPosition(blobs); /* 5. Draw cursors position */ setCursor(blobs); /* 6. Detect hand gesture */ detectHandGesture(blobs); /* Show images */ isShowDebugWindows ? showDebugWindows() : destroyAllWindows(); } }
void XBeeResponse::getZBTxStatusResponse(XBeeResponse &zbXBeeResponse) { // way off? ZBTxStatusResponse* zb = static_cast<ZBTxStatusResponse*>(&zbXBeeResponse); // pass pointer array to subclass zb->setFrameData(getFrameData()); setCommon(zbXBeeResponse); }
void frameViewFinish() { _channel->applyBuffer(); _channel->applyViewport(); FrameSettingsPtr frameSettingsPtr = getFrameData()->getFrameSettings(); if( frameSettingsPtr->getStatistics( )) { _channel->drawStatistics(); drawCacheStatistics(); } }
void applyCamera() { ConstCameraSettingsPtr cameraSettings = getFrameData()->getCameraSettings( ); const Matrix4f& cameraRotation = cameraSettings->getCameraRotation( ); const Matrix4f& modelRotation = cameraSettings->getModelRotation( ); const Vector3f& cameraPosition = cameraSettings->getCameraPosition( ); EQ_GL_CALL( glMultMatrixf( cameraRotation.array ) ); EQ_GL_CALL( glTranslatef( cameraPosition[ 0 ], cameraPosition[ 1 ], cameraPosition[ 2 ] ) ); EQ_GL_CALL( glMultMatrixf( modelRotation.array ) ); }
bool Node::_cmdFrameDataReady( co::Command& command ) { const NodeFrameDataReadyPacket* packet = command.get<NodeFrameDataReadyPacket>(); LBLOG( LOG_ASSEMBLY ) << "received ready for " << packet->frameData << std::endl; FrameDataPtr frameData = getFrameData( packet->frameData ); LBASSERT( frameData ); LBASSERT( !frameData->isReady() ); frameData->setReady( packet ); LBASSERT( frameData->isReady() ); return true; }
bool Node::_cmdFrameDataReady( co::ICommand& cmd ) { co::ObjectICommand command( cmd ); const co::ObjectVersion& frameDataVersion = command.read< co::ObjectVersion >(); const fabric::FrameData& data = command.read< fabric::FrameData >(); LBLOG( LOG_ASSEMBLY ) << "received ready for " << frameDataVersion << std::endl; FrameDataPtr frameData = getFrameData( frameDataVersion ); LBASSERT( frameData ); LBASSERT( !frameData->isReady() ); frameData->setReady( frameDataVersion, data ); LBASSERT( frameData->isReady() ); return true; }
bool Node::_cmdFrameDataTransmit( co::Command& command ) { const NodeFrameDataTransmitPacket* packet = command.get<NodeFrameDataTransmitPacket>(); LBLOG( LOG_ASSEMBLY ) << "received image data for " << packet->frameData << ", buffers " << packet->buffers << " pvp " << packet->pvp << std::endl; LBASSERT( packet->pvp.isValid( )); FrameDataPtr frameData = getFrameData( packet->frameData ); LBASSERT( !frameData->isReady() ); NodeStatistics event( Statistic::NODE_FRAME_DECOMPRESS, this, packet->frameNumber ); LBCHECK( frameData->addImage( packet )); return true; }
void frameDraw( const eq::uint128_t& ) { applyCamera(); initializeLivreFrustum(); requestData(); const eq::fabric::Viewport& vp = _channel->getViewport( ); const Viewportf viewport( Vector2f( vp.x, vp.y ), Vector2f( vp.w, vp.h )); _renderViewPtr->setViewport( viewport ); Viewporti pixelViewport; _glWidgetPtr->setViewport( _renderViewPtr.get( ), pixelViewport ); livre::Node* node = static_cast< livre::Node* >( _channel->getNode( )); AvailableSetGenerator generateSet( node->getDashTree( )); FrameInfo frameInfo( _currentFrustum ); generateSet.generateRenderingSet( _currentFrustum, frameInfo); EqRenderViewPtr renderViewPtr = boost::static_pointer_cast< EqRenderView >( _renderViewPtr ); renderViewPtr->setParameters( getFrameData()->getVRParameters( )); RayCastRendererPtr renderer = boost::static_pointer_cast< RayCastRenderer >( renderViewPtr->getRenderer( )); const livre::Pipe* pipe = static_cast< const livre::Pipe* >( _channel->getPipe()); renderer->initTransferFunction( pipe->getFrameData()->getRenderSettings()->getTransferFunction( )); RenderBricks renderBricks; generateRenderBricks( frameInfo.renderNodeList, renderBricks ); renderViewPtr->render( frameInfo, renderBricks, *_glWidgetPtr ); }
uint8_t ZBExplicitRxResponse::getDstEndpoint() { return getFrameData()[11]; }
uint16_t ZBExplicitRxResponse::getClusterId() { return (uint16_t)(getFrameData()[12]) << 8 | getFrameData()[13]; }
uint8_t ZBTxStatusResponse::getDiscoveryStatus() { return getFrameData()[5]; }
uint8_t RxResponse::getRssi() { return getFrameData()[getRssiOffset()]; }
uint16_t ZBRxResponse::getRemoteAddress16() { return (getFrameData()[8] << 8) + getFrameData()[9]; }
uint8_t ZBRxIoSampleResponse::getAnalogMask() { return getFrameData()[14] & 0x8f; }
uint16_t ZBExplicitRxResponse::getProfileId() { return (uint16_t)(getFrameData()[14]) << 8 | getFrameData()[15]; }
uint8_t ZBExplicitRxResponse::getOption() { return getFrameData()[16]; }
void removeImageListener() { if( getFrameData()->getFrameSettings()->getGrabFrame() ) _channel->removeResultImageListener( &_frameGrabber ); }
uint8_t AntVersion::getVersionByte(uint8_t pos) { return getFrameData()[pos]; }
uint8_t RxResponse::getOption() { return getFrameData()[getRssiOffset() + 1]; }
// 64 + 16 addresses, sample size, option = 12 (index 11), so this starts at 12 uint8_t ZBRxIoSampleResponse::getDigitalMaskMsb() { return getFrameData()[12] & 0x1c; }
uint8_t ZBRxIoSampleResponse::getDigitalMaskLsb() { return getFrameData()[13]; }
uint8_t ZBExplicitRxResponse::getSrcEndpoint() { return getFrameData()[10]; }
uint8_t ZBRxResponse::getOption() { return getFrameData()[10]; }
uint16_t ZBTxStatusResponse::getRemoteAddress() { return (getFrameData()[1] << 8) + getFrameData()[2]; }
uint16_t Rx16Response::getRemoteAddress16() { return (getFrameData()[0] << 8) + getFrameData()[1]; }
uint8_t ZBTxStatusResponse::getTxRetryCount() { return getFrameData()[3]; }
uint8_t ZBTxStatusResponse::getDeliveryStatus() { return getFrameData()[4]; }
/** This thread controls acquisition, reads image files to get the image data, * and does the callbacks to send it to higher layers */ void hdf5Driver::hdf5Task (void) { const char *functionName = "hdf5Task"; int status = asynSuccess; epicsTimeStamp startTime, endTime; int imageMode, currentFrame, colorMode; double acquirePeriod, elapsedTime, delay; this->lock(); for(;;) { int acquire; getIntegerParam(ADAcquire, &acquire); if (!acquire) { this->unlock(); // Wait for semaphore unlocked asynPrint(this->pasynUserSelf, ASYN_TRACE_FLOW, "%s:%s: waiting for acquire to start\n", driverName, functionName); status = epicsEventWait(this->mStartEventId); this->lock(); acquire = 1; setStringParam(ADStatusMessage, "Acquiring data"); setIntegerParam(ADNumImagesCounter, 0); } // Are there datasets loaded? if(!mDatasetsCount) { setStringParam(ADStatusMessage, "No datasets loaded"); goto error; } // Get acquisition parameters epicsTimeGetCurrent(&startTime); getIntegerParam(ADImageMode, &imageMode); getDoubleParam(ADAcquirePeriod, &acquirePeriod); getIntegerParam(HDF5CurrentFrame, ¤tFrame); setIntegerParam(ADStatus, ADStatusAcquire); callParamCallbacks(); // Get information to allocate NDArray size_t dims[2]; NDDataType_t dataType; if(getFrameInfo(currentFrame, dims, &dataType)) { setStringParam(ADStatusMessage, "Failed to get frame info"); goto error; } // Allocate NDArray NDArray *pImage; if(!(pImage = pNDArrayPool->alloc(2, dims, dataType, 0, NULL))) { setStringParam(ADStatusMessage, "Failed to allocate frame"); goto error; } // Copy data into NDArray if(getFrameData(currentFrame, pImage->pData)) { setStringParam(ADStatusMessage, "Failed to read frame data"); goto error; } // Set ColorMode colorMode = NDColorModeMono; pImage->pAttributeList->add("ColorMode", "Color mode", NDAttrInt32, &colorMode); // Call plugins callbacks int arrayCallbacks; getIntegerParam(NDArrayCallbacks, &arrayCallbacks); if (arrayCallbacks) { this->unlock(); asynPrint(this->pasynUserSelf, ASYN_TRACE_FLOW, "%s:%s: calling imageData callback\n", driverName, functionName); doCallbacksGenericPointer(pImage, NDArrayData, 0); this->lock(); } pImage->release(); // Get the current parameters int lastFrame, imageCounter, numImages, numImagesCounter; getIntegerParam(HDF5LastFrame, &lastFrame); getIntegerParam(NDArrayCounter, &imageCounter); getIntegerParam(ADNumImages, &numImages); getIntegerParam(ADNumImagesCounter, &numImagesCounter); setIntegerParam(NDArrayCounter, ++imageCounter); setIntegerParam(ADNumImagesCounter, ++numImagesCounter); setIntegerParam(HDF5CurrentFrame, ++currentFrame); // Put the frame number and time stamp into the buffer pImage->uniqueId = imageCounter; pImage->timeStamp = startTime.secPastEpoch + startTime.nsec / 1.e9; updateTimeStamp(&pImage->epicsTS); // Prepare loop if necessary int loop; getIntegerParam(HDF5Loop, &loop); if (loop && currentFrame > lastFrame) { getIntegerParam(HDF5FirstFrame, ¤tFrame); setIntegerParam(HDF5CurrentFrame, currentFrame); } // See if acquisition is done if (imageMode == ADImageSingle || currentFrame > lastFrame || (imageMode == ADImageMultiple && numImagesCounter >= numImages)) { // First do callback on ADStatus setStringParam(ADStatusMessage, "Waiting for acquisition"); setIntegerParam(ADStatus, ADStatusIdle); acquire = 0; setIntegerParam(ADAcquire, acquire); asynPrint(this->pasynUserSelf, ASYN_TRACE_FLOW, "%s:%s: acquisition completed\n", driverName, functionName); } callParamCallbacks(); // Delay next acquisition and check if received STOP signal if(acquire) { epicsTimeGetCurrent(&endTime); elapsedTime = epicsTimeDiffInSeconds(&endTime, &startTime); delay = acquirePeriod - elapsedTime; asynPrint(this->pasynUserSelf, ASYN_TRACE_FLOW, "%s:%s: delay=%f\n", driverName, functionName, delay); if(delay > 0.0) { // Set the status to waiting to indicate we are in the delay setIntegerParam(ADStatus, ADStatusWaiting); callParamCallbacks(); this->unlock(); status = epicsEventWaitWithTimeout(mStopEventId, delay); this->lock(); if (status == epicsEventWaitOK) { acquire = 0; if (imageMode == ADImageContinuous) setIntegerParam(ADStatus, ADStatusIdle); else setIntegerParam(ADStatus, ADStatusAborted); callParamCallbacks(); } } } continue; error: setIntegerParam(ADAcquire, 0); setIntegerParam(ADStatus, ADStatusError); callParamCallbacks(); continue; } }