void XnFrameStreamProcessor::OnEndOfFrame(const XnSensorProtocolResponseHeader* pHeader) { // write dump XnBuffer* pCurWriteBuffer = m_pTripleBuffer->GetWriteBuffer(); xnDumpWriteBuffer(m_InternalDump, pCurWriteBuffer->GetData(), pCurWriteBuffer->GetSize()); xnDumpClose(&m_InternalDump); xnDumpClose(&m_InDump); if (!m_bFrameCorrupted) { // mark the buffer as stable XnUInt64 nTimestamp = GetTimeStamp(pHeader->nTimeStamp); XnUInt32 nFrameID; m_pTripleBuffer->MarkWriteBufferAsStable(nTimestamp, &nFrameID); // let inheriting classes do their stuff OnFrameReady(nFrameID, nTimestamp); } else { // restart m_pTripleBuffer->GetWriteBuffer()->Reset(); } // log bandwidth XnUInt64 nSysTime; xnOSGetTimeStamp(&nSysTime); xnDumpWriteString(m_pDevicePrivateData->BandwidthDump, "%llu,%s,%d,%d\n", nSysTime, m_csName, GetCurrentFrameID(), m_nBytesReceived); // re-init dumps xnDumpInit(&m_InDump, m_csInDumpMask, NULL, "%s_%d.raw", m_csInDumpMask, GetCurrentFrameID()); xnDumpInit(&m_InternalDump, m_csInternalDumpMask, NULL, "%s_%d.raw", m_csInternalDumpMask, GetCurrentFrameID()); m_nBytesReceived = 0; }
//--------------------------------------------------------------------------- // Code //--------------------------------------------------------------------------- XnFrameStreamProcessor::XnFrameStreamProcessor(XnFrameStream* pStream, XnSensorStreamHelper* pHelper, XnUInt16 nTypeSOF, XnUInt16 nTypeEOF) : XnStreamProcessor(pStream, pHelper), m_nTypeSOF(nTypeSOF), m_nTypeEOF(nTypeEOF), m_pTripleBuffer(pStream->GetTripleBuffer()), m_InDump(XN_DUMP_CLOSED), m_InternalDump(XN_DUMP_CLOSED), m_bFrameCorrupted(FALSE), m_bAllowDoubleSOF(FALSE), m_nLastSOFPacketID(0) { sprintf(m_csInDumpMask, "%sIn", pStream->GetType()); sprintf(m_csInternalDumpMask, "Internal%s", pStream->GetType()); xnDumpInit(&m_InDump, m_csInDumpMask, NULL, "%s_0.raw", m_csInDumpMask); xnDumpInit(&m_InternalDump, m_csInternalDumpMask, NULL, "%s_0.raw", m_csInternalDumpMask); }
XnAudioProcessor::XnAudioProcessor(XnSensorAudioStream* pStream, XnSensorStreamHelper* pHelper, XnUInt32 nInputPacketSize) : XnWholePacketProcessor(pHelper->GetPrivateData(), pStream->GetType(), nInputPacketSize), m_pStream(pStream), m_pHelper(pHelper), m_AudioInDump(XN_DUMP_CLOSED) { xnDumpInit(&m_AudioInDump, XN_DUMP_AUDIO_IN, NULL, "AudioIn.pcm"); }
//--------------------------------------------------------------------------- // Code //--------------------------------------------------------------------------- XnSensorFPS::XnSensorFPS() : m_FramesDump(XN_DUMP_CLOSED) { xnFPSInit(&m_InputDepth, XN_SENSOR_FPS_FRAME_COUNT); xnFPSInit(&m_InputImage, XN_SENSOR_FPS_FRAME_COUNT); xnFPSInit(&m_ReadCalls, XN_SENSOR_FPS_FRAME_COUNT); xnFPSInit(&m_OutputDepth, XN_SENSOR_FPS_FRAME_COUNT); xnFPSInit(&m_OutputImage, XN_SENSOR_FPS_FRAME_COUNT); xnDumpInit(&m_FramesDump, XN_MASK_SENSOR_FPS, "TS,Type,FrameID,FrameTS\n", "FramesTimes.csv"); }
XnStatus XnSensor::InitReading() { XnStatus nRetVal = XN_STATUS_OK; // open data endpoints nRetVal = m_SensorIO.OpenDataEndPoints((XnSensorUsbInterface)m_Interface.GetValue()); XN_IS_STATUS_OK(nRetVal); // take frequency information XnFrequencyInformation FrequencyInformation; nRetVal = XnHostProtocolAlgorithmParams(&m_DevicePrivateData, XN_HOST_PROTOCOL_ALGORITHM_FREQUENCY, &FrequencyInformation, sizeof(XnFrequencyInformation), (XnResolutions)0, 0); if (nRetVal != XN_STATUS_OK) return nRetVal; m_DevicePrivateData.fDeviceFrequency = XN_PREPARE_VAR_FLOAT_IN_BUFFER(FrequencyInformation.fDeviceFrequency); // Init Dumps m_DevicePrivateData.BandwidthDump = XN_DUMP_CLOSED; xnDumpInit(&m_DevicePrivateData.BandwidthDump, XN_DUMP_BANDWIDTH, "Timestamp,Frame Type,Frame ID,Size\n", "Bandwidth.csv"); m_DevicePrivateData.TimestampsDump = XN_DUMP_CLOSED; xnDumpInit(&m_DevicePrivateData.TimestampsDump, XN_DUMP_TIMESTAMPS, "Host Time (us),Stream,Device TS,Time (ms),Comments\n", "Timestamps.csv"); m_DevicePrivateData.MiniPacketsDump = XN_DUMP_CLOSED; xnDumpInit(&m_DevicePrivateData.MiniPacketsDump, XN_DUMP_MINI_PACKETS, "HostTS,Type,ID,Size,Timestamp\n", "MiniPackets.csv"); m_DevicePrivateData.nGlobalReferenceTS = 0; nRetVal = xnOSCreateCriticalSection(&m_DevicePrivateData.hEndPointsCS); XN_IS_STATUS_OK(nRetVal); // NOTE: when we go up, some streams might be open, and so we'll receive lots of garbage. // wait till streams are turned off, and then start reading. // pDevicePrivateData->bIgnoreDataPackets = TRUE; // open input threads nRetVal = XnDeviceSensorOpenInputThreads(GetDevicePrivateData(), (XnBool)m_ReadFromEP1.GetValue(), (XnBool)m_ReadFromEP2.GetValue(), (XnBool)m_ReadFromEP3.GetValue()); XN_IS_STATUS_OK(nRetVal); return XN_STATUS_OK; }
XnStatus XnBufferPool::Init(XnUInt32 nBufferSize) { XnStatus nRetVal = XN_STATUS_OK; xnDumpInit(&m_dump, "BufferPool", "", "bufferpool_%x.txt", this); nRetVal = xnOSCreateCriticalSection(&m_hLock); XN_IS_STATUS_OK(nRetVal); // allocate buffers nRetVal = ChangeBufferSize(nBufferSize); XN_IS_STATUS_OK(nRetVal); return (XN_STATUS_OK); }
void audioInit() { g_AudioData.hWaveOut = NULL; g_AudioData.bFlush = false; g_AudioData.nFirstToCheck = -1; g_AudioData.SyncDump = XN_DUMP_CLOSED; // check if device audio is enabled const AudioMetaData* pAudioMD = getAudioMetaData(); if (pAudioMD == NULL) return; // start audio out device WAVEFORMATEX wf; wf.wFormatTag = 0x0001; // PCM wf.nChannels = pAudioMD->NumberOfChannels(); wf.nSamplesPerSec = pAudioMD->SampleRate(); wf.wBitsPerSample = pAudioMD->BitsPerSample(); wf.nBlockAlign = wf.wBitsPerSample * wf.nChannels / 8; wf.nAvgBytesPerSec = wf.nBlockAlign * wf.nSamplesPerSec; MMRESULT mmRes = waveOutOpen(&g_AudioData.hWaveOut, WAVE_MAPPER, &wf, (DWORD_PTR)audioCallback, NULL, CALLBACK_FUNCTION); if (mmRes != MMSYSERR_NOERROR) { printf("Warning: Failed opening wave out device. Audio will not be played!\n"); g_AudioData.hWaveOut = NULL; return; } // create some wave headers for playing g_AudioData.pAudioBuffers = new WAVEHDR[NUMBER_OF_AUDIO_BUFFERS]; g_AudioData.pAudioTimestamps = new XnUInt64[NUMBER_OF_AUDIO_BUFFERS]; xnOSMemSet(g_AudioData.pAudioBuffers, 0, sizeof(WAVEHDR)*NUMBER_OF_AUDIO_BUFFERS); // allocate max buffer for one second g_AudioData.nBufferSize = wf.nAvgBytesPerSec; for (int i = 0; i < NUMBER_OF_AUDIO_BUFFERS; ++i) { g_AudioData.pAudioBuffers[i].lpData = new XnChar[g_AudioData.nBufferSize]; g_AudioData.pAudioBuffers[i].dwUser = i; g_AudioData.pAudioBuffers[i].dwFlags = WHDR_DONE; // mark this buffer as empty (already played) } g_AudioData.nAudioNextBuffer = 0; xnDumpInit(&g_AudioData.SyncDump, AUDIO_SYNC_DUMP_MASK, "", "%s.txt", AUDIO_SYNC_DUMP_MASK); }
XnStatus XnSensor::InitImpl(const XnDeviceConfig *pDeviceConfig) { XnStatus nRetVal = XN_STATUS_OK; xnLogVerbose(XN_MASK_DEVICE_SENSOR, "Initializing device sensor..."); // Frame Sync XnCallbackHandle hCallbackDummy; nRetVal = m_FrameSync.OnChangeEvent().Register(FrameSyncPropertyChangedCallback, this, &hCallbackDummy); XN_IS_STATUS_OK(nRetVal); nRetVal = GetFirmware()->GetParams()->m_Stream0Mode.OnChangeEvent().Register(FrameSyncPropertyChangedCallback, this, &hCallbackDummy); XN_IS_STATUS_OK(nRetVal); nRetVal = GetFirmware()->GetParams()->m_Stream1Mode.OnChangeEvent().Register(FrameSyncPropertyChangedCallback, this, &hCallbackDummy); XN_IS_STATUS_OK(nRetVal); // other stuff xnDumpInit(&m_FrameSyncDump, XN_DUMP_FRAME_SYNC, "HostTime(us),DepthNewData,DepthTimestamp(ms),ImageNewData,ImageTimestamp(ms),Diff(ms),Action\n", "FrameSync.csv"); nRetVal = XnDeviceBase::InitImpl(pDeviceConfig); XN_IS_STATUS_OK(nRetVal); // now that everything is configured, open the sensor nRetVal = InitSensor(pDeviceConfig); if (nRetVal != XN_STATUS_OK) { Destroy(); return (nRetVal); } xnLogInfo(XN_MASK_DEVICE_SENSOR, "Device sensor initialized"); return (XN_STATUS_OK); }