void ScrollString(char *ptr_msgPointer) { unsigned char i,len; len = NumberOfSegments+ MsgLength; ptr_msgPointer[len+1] = 0; ptr_msgPointer[len] = ' '; for(i=len-1;i>(NumberOfSegments-1);i--) { ptr_msgPointer[i] = ptr_msgPointer[i-(NumberOfSegments)]; } for(i=0;i<(NumberOfSegments);i++) { ptr_msgPointer[i] = ' '; } for(i=0;(ptr_msgPointer[i]) && (NewMsgReceived==0);i++) { /* Loop to display the complete string, each time N chars are displayed and pointer is incremented to point to next char */ BuildFrame(ptr_msgPointer+i); TWI_ScrollFrame(); /* if(NewMsgReceived == 1) { BuildFrame(ptr_msgPointer+i+1); TWI_HorizontalScrollFrame(0); } */ } }
// RequestHandler receives requests from the Client. Server should // build and return "request responses" packets in this same thread. // // pPacketIn - data packet from Client // pPacketOut - empty packet, to be filled in and returned to the // Client as the "request response". // int __cdecl RequestHandler(sPacket* pPacketIn, sPacket* pPacketOut, void* pUserData) { int iHandled = 1; // handled switch (pPacketIn->iMessage) { case NAT_PING: printf("[SampleServer] received ping from Client.\n"); printf("[SampleServer] Client App Name : %s\n", pPacketIn->Data.Sender.szName); printf("[SampleServer] Client App Version : %d.%d.%d.%d\n", pPacketIn->Data.Sender.Version[0], pPacketIn->Data.Sender.Version[1],pPacketIn->Data.Sender.Version[2],pPacketIn->Data.Sender.Version[3]); printf("[SampleServer] Client App NatNet Version : %d.%d.%d.%d\n", pPacketIn->Data.Sender.NatNetVersion[0], pPacketIn->Data.Sender.NatNetVersion[1],pPacketIn->Data.Sender.NatNetVersion[2],pPacketIn->Data.Sender.NatNetVersion[3]); // build server info packet strcpy(pPacketOut->Data.Sender.szName, "SimpleServer"); pPacketOut->Data.Sender.Version[0] = 2; pPacketOut->Data.Sender.Version[1] = 1; pPacketOut->iMessage = NAT_PINGRESPONSE; pPacketOut->nDataBytes = sizeof(sSender); iHandled = 1; break; case NAT_REQUEST_MODELDEF: printf("[SimpleServer] Received request for data descriptions.\n"); theServer->PacketizeDataDescriptions(&descriptions, pPacketOut); break; case NAT_REQUEST_FRAMEOFDATA: { // note: Client does not typically poll for data, but we accomodate it here anyway // note: need to return response on same thread as caller printf("[SimpleServer] Received request for frame of data.\n"); sFrameOfMocapData frame; BuildFrame(g_lCurrentFrame, &descriptions, &frame); theServer->PacketizeFrameOfMocapData(&frame, pPacketOut); FreeFrame(&frame); } break; case NAT_REQUEST: printf("[SampleServer] Received request from Client: %s\n", pPacketIn->Data.szData); pPacketOut->iMessage = NAT_UNRECOGNIZED_REQUEST; if (stricmp(pPacketIn->Data.szData, "TestRequest") == 0) { pPacketOut->iMessage = NAT_RESPONSE; strcpy(pPacketOut->Data.szData, "TestResponse"); pPacketOut->nDataBytes = ((int)strlen(pPacketOut->Data.szData)) + 1; } break; default: pPacketOut->iMessage = NAT_UNRECOGNIZED_REQUEST; pPacketOut->nDataBytes = 0; iHandled = 0; break; } return iHandled; // 0 = not handled, 1 = handled; }
void DisplayString(char *ptr_msgPointer) { int i,j; uint8_t activeStatus = 1; NumberOfSegments = 1; BuildFrame(ptr_msgPointer); for(i=0;activeStatus;i++) { if(i==0) { for(j=0;j<8;j++) pattern[j] = FrameBuffer[j]; } else { TWI_start(); // Function to send start condition activeStatus = TWI_write_address(i*2+0x30); // Function to write address and data direction bit(write) on SDA if(activeStatus) { NumberOfSegments++; for(j=0;j<8;j++) { TWI_write_data(FrameBuffer[i*8+j]); // Function to write data in slave // UART_TxChar(' '); // UART_TxHexNumber(i,2); // Function to write data in slave } } TWI_stop(); // Function to send stop condition } } }
void Mainloop() { m_running = true; while (m_running) { OniFrame* pFrame = getServices().acquireFrame(); BuildFrame(pFrame); raiseNewFrame(pFrame); } }
// PlayingThread_Func streams data at ~60hz DWORD WINAPI PlayingThread_Func(void * empty) { while (1) { sFrameOfMocapData frame; BuildFrame(g_lCurrentFrame, &descriptions, &frame); SendFrame(&frame); FreeFrame(&frame); printf("Sent Frame %d", g_lCurrentFrame); g_lCurrentFrame++; HiResSleep(10); } return ErrorCode_OK; }
/** @brief Set Size (resets the page as well) */ void WindowBox::SetSize(Rect NewSize) { FixAspectRatio(NewSize); // align size to block size NewSize.W = max(NewSize.W, 2 * GRID); NewSize.H = max(NewSize.H, 2 * GRID); const lint newW = NewSize.W - (NewSize.W % GRID); const lint newH = NewSize.H - (NewSize.H % GRID); NewSize.X += (NewSize.W - newW) / 2; NewSize.Y += (NewSize.H - newH) / 2; NewSize.W = newW; NewSize.H = newH; if (Size != NewSize) { Size = NewSize; BuildFrame(); Reset(); } }
int _tmain(int argc, _TCHAR* argv[]) { // Create a NatNet server int iConnectionType = ConnectionType_Multicast; //int iConnectionType = ConnectionType_Unicast; int iResult = CreateServer(iConnectionType); if(iResult != ErrorCode_OK) { printf("Error initializing server. See log for details. Exiting"); return 1; } // Create a MarkerSet description BuildDescription(&descriptions); // OK! Ready to stream data. Listen for request from clients (RequestHandler()) printf("\n\nCommands:\nn\tnext frame\ns\tstream frames\nr\treset server\nq\tquit\n\r\tmulticast\nu\tunicast\n\n"); bool bExit = false; while(int c =_getch()) { switch(c) { case 'n': // next frame { sFrameOfMocapData frame; BuildFrame(g_lCurrentFrame++, &descriptions, &frame); SendFrame(&frame); FreeFrame(&frame); } break; case 'q': // quit bExit = true; break; case 's': // play continuously g_lCurrentFrame = 0; if(g_bPlaying) StopPlayingThread(); else StartPlayingThread(); break; case 'r': // reset server resetServer(); break; case 'm': // change to multicast iResult = CreateServer(ConnectionType_Multicast); if(iResult == ErrorCode_OK) printf("Server connection type changed to Multicast.\n\n"); else printf("Error changing server connection type to Multicast.\n\n"); break; case 'u': // change to unicast iResult = CreateServer(ConnectionType_Unicast); if(iResult == ErrorCode_OK) printf("Server connection type changed to Unicast.\n\n"); else printf("Error changing server connection type to Unicast.\n\n"); break; default: break; } if(bExit) { theServer->Uninitialize(); FreeDescription(&descriptions); break; } } return ErrorCode_OK; }
bool BaseInFileStream::SendCodecs() { //1. Read the first frame MediaFrame frame1; if (!_pSeekFile->SeekTo(_framesBaseOffset + 0 * sizeof (MediaFrame))) { FATAL("Unablt to seek inside seek file"); return false; } if (!_pSeekFile->ReadBuffer((uint8_t *) & frame1, sizeof (MediaFrame))) { FATAL("Unable to read frame from seeking file"); return false; } //2. Read the second frame MediaFrame frame2; if (!_pSeekFile->SeekTo(_framesBaseOffset + 1 * sizeof (MediaFrame))) { FATAL("Unablt to seek inside seek file"); return false; } if (!_pSeekFile->ReadBuffer((uint8_t *) & frame2, sizeof (MediaFrame))) { FATAL("Unable to read frame from seeking file"); return false; } //3. Read the current frame to pickup the timestamp from it MediaFrame currentFrame; if (!_pSeekFile->SeekTo(_framesBaseOffset + _currentFrameIndex * sizeof (MediaFrame))) { FATAL("Unablt to seek inside seek file"); return false; } if (!_pSeekFile->ReadBuffer((uint8_t *) & currentFrame, sizeof (MediaFrame))) { FATAL("Unable to read frame from seeking file"); return false; } //4. Is the first frame a codec setup? //If not, the second is not a codec setup for sure if (!frame1.isBinaryHeader) { _audioVideoCodecsSent = true; return true; } //5. Build the buffer for the first frame IOBuffer buffer; if (!BuildFrame(_pFile, frame1, buffer)) { FATAL("Unable to build the frame"); return false; } //6. Do the feedeng with the first frame if (!_pOutStreams->info->FeedData( GETIBPOINTER(buffer), //pData GETAVAILABLEBYTESCOUNT(buffer), //dataLength 0, //processedLength GETAVAILABLEBYTESCOUNT(buffer), //totalLength currentFrame.absoluteTime, //absoluteTimestamp frame1.type == MEDIAFRAME_TYPE_AUDIO //isAudio )) { FATAL("Unable to feed audio data"); return false; } //7. Is the second frame a codec setup? if (!frame2.isBinaryHeader) { _audioVideoCodecsSent = true; return true; } //8. Build the buffer for the second frame buffer.IgnoreAll(); if (!BuildFrame(_pFile, frame2, buffer)) { FATAL("Unable to build the frame"); return false; } //9. Do the feedeng with the second frame if (!_pOutStreams->info->FeedData( GETIBPOINTER(buffer), //pData GETAVAILABLEBYTESCOUNT(buffer), //dataLength 0, //processedLength GETAVAILABLEBYTESCOUNT(buffer), //totalLength currentFrame.absoluteTime, //absoluteTimestamp frame2.type == MEDIAFRAME_TYPE_AUDIO //isAudio )) { FATAL("Unable to feed audio data"); return false; } //10. Done _audioVideoCodecsSent = true; return true; }
bool BaseInFileStream::Feed() { //1. Are we in paused state? if (_paused) return true; //2. First, send audio and video codecs if (!_audioVideoCodecsSent) { if (!SendCodecs()) { FATAL("Unable to send audio codec"); return false; } } //2. Determine if the client has enough data on the buffer and continue //or stay put uint32_t elapsedTime = (uint32_t) (time(NULL) - _startFeedingTime); if ((int32_t) _totalSentTime - (int32_t) elapsedTime >= _clientSideBufferLength) { return true; } //3. Test to see if we have sent the last frame if (_currentFrameIndex >= _totalFrames) { FINEST("Done streaming file"); _pOutStreams->info->SignalStreamCompleted(); _paused = true; return true; } //FINEST("_totalSentTime: %.2f; _playLimit: %.2f", (double) _totalSentTime, _playLimit); if (_playLimit >= 0) { if (_playLimit < (double) _totalSentTime) { FINEST("Done streaming file"); _pOutStreams->info->SignalStreamCompleted(); _paused = true; return true; } } //4. Read the current frame from the seeking file if (!_pSeekFile->SeekTo(_framesBaseOffset + _currentFrameIndex * sizeof (MediaFrame))) { FATAL("Unablt to seek inside seek file"); return false; } if (!_pSeekFile->ReadBuffer((uint8_t *) & _currentFrame, sizeof (_currentFrame))) { FATAL("Unable to read frame from seeking file"); return false; } //5. Take care of metadata if (_currentFrame.type == MEDIAFRAME_TYPE_DATA) { _currentFrameIndex++; if (!FeedMetaData(_pFile, _currentFrame)) { FATAL("Unable to feed metadata"); return false; } return Feed(); } //6. get our hands on the correct buffer, depending on the frame type: audio or video IOBuffer &buffer = _currentFrame.type == MEDIAFRAME_TYPE_AUDIO ? _audioBuffer : _videoBuffer; //7. Build the frame if (!BuildFrame(_pFile, _currentFrame, buffer)) { FATAL("Unable to build the frame"); return false; } //8. Compute the timestamp _totalSentTime = (uint32_t) (_currentFrame.absoluteTime / 1000) - _totalSentTimeBase; //9. Do the feedeng if (!_pOutStreams->info->FeedData( GETIBPOINTER(buffer), //pData GETAVAILABLEBYTESCOUNT(buffer), //dataLength 0, //processedLength GETAVAILABLEBYTESCOUNT(buffer), //totalLength (uint32_t) _currentFrame.absoluteTime, //absoluteTimestamp _currentFrame.type == MEDIAFRAME_TYPE_AUDIO //isAudio )) { FATAL("Unable to feed audio data"); return false; } //10. Discard the data buffer.IgnoreAll(); //11. Increment the frame index _currentFrameIndex++; //12. Done. We either feed again if frame length was 0 //or just return true if (_currentFrame.length == 0) { return Feed(); } else { return true; } }