void KitSocket::writeSignon(unsigned ver, unsigned short tlv, const QString &_name) { QCString name(_name.local8Bit()); sflap_frame frame; frame.asterisk = '*'; frame.type = SFLAP_SIGNON; frame.sequence = htons(++out_sequence & 0xFFFF); frame.data_length = htons( 8 + name.length() ); // version *(unsigned int *)(frame.data) = htonl( ver ); // For Solaris... //*(frame.data) = (char)0; //*(frame.data+1) = (char)0; //*(frame.data+2) = (char)0; //*(frame.data+3) = (char)1; // TLV flag (whatever that is) *(unsigned short *)(frame.data + 4) = htons( tlv ); // normalized user name length *(unsigned short *)(frame.data + 6) = htons( name.length() ); // normalized user name strncpy(frame.data + 8, name.data(), name.length()); writeFrame(frame); }
void VideoExporter::saveFrame(const QImage &image, int count) { Q_ASSERT(count>0); Q_ASSERT(!image.isNull()); QImage frameImage = image; if(!isVariableSize() && image.size() != _targetsize) { QImage newframe = QImage(_targetsize, QImage::Format_ARGB32); newframe.fill(Qt::black); QSize newsize = image.size().scaled(_targetsize, Qt::KeepAspectRatio); QRect rect( QPoint( _targetsize.width()/2 - newsize.width()/2, _targetsize.height()/2 - newsize.height()/2 ), newsize ); QPainter painter(&newframe); painter.drawImage(rect, image, QRect(QPoint(), image.size())); painter.end(); frameImage = newframe; } if(count>0) { writeFrame(frameImage, count); _frame += count; } }
void VideoWriterThread::encodeFrame(BitmapPtr pBmp) { ScopeTimer timer(ProfilingZoneEncodeFrame); convertRGBImage(pBmp); writeFrame(m_pConvertedFrame); ThreadProfiler::get()->reset(); }
// Transmits an entire frame of imagery to the client int sendFullFrame(SOCKET &clientSocket) { int status = 0; FIBITMAP *fiImage; FIMEMORY *fiBuffer; // Signal that a new frame is required and wait for frame InterlockedExchange( &g_lRequestFlag, TRUE ); g_pRequestEvent->waitFor(); // Enter critical section for frame buffer from UT2004 // and copy new raw image to local buffer EnterCriticalSection( &g_CriticalSection ); { fiImage = FreeImage_ConvertTo24Bits(g_fiImage); } LeaveCriticalSection( &g_CriticalSection ); // Create memory reference fiBuffer = FreeImage_OpenMemory(); // Convert a raw frame to a useful image status = writeFrame( fiBuffer, fiImage, g_iImageType ); if (status != 1) status = 0; // TODO: handle error here // Transmit frame over socket status = transmitFrame( fiBuffer, clientSocket, g_iImageType ); // Delete memory references FreeImage_Unload( fiImage ); FreeImage_CloseMemory( fiBuffer ); return status; }
void Profiler::writeData() const { outStream.open(fileName, std::ios::trunc); //Write category headers for(uint i=0;i< numUsedCategories; i++) { outStream << categories[i].name; outStream << getDelimiter(i); } //Account for last frame if they added entries uint endIndex; uint startIndex; if(wrapped()) { endIndex = frameIndex % MAX_FRAME_SAMPLES; startIndex = (endIndex + 1) % MAX_FRAME_SAMPLES; while(startIndex != endIndex) { writeFrame(startIndex); startIndex = (startIndex + 1) % MAX_FRAME_SAMPLES; } if(currentFrameComplete()) { writeFrame(startIndex); } } else { uint numActualFrames = frameIndex; if(currentFrameComplete()) { numActualFrames++; } startIndex = 0; endIndex = numActualFrames; while(startIndex < endIndex) { writeFrame(startIndex++); } } outStream.close(); }
/*! \internal */ qint64 WebSocket::writeFrames(const QList<QByteArray> &frames) { qint64 written = 0; for (int i = 0; i < frames.size(); ++i) { written += writeFrame(frames[i]); } return written; }
void RGBDFrameRecorder :: saveCurrentFrames(const std::vector<RGBDImage>& images) { for (int image_i = 0; image_i < images.size(); ++image_i) { std::string frame_dir = getNextFrameDirectory(images[image_i]); writeFrame(images[image_i], frame_dir); } ++m_frame_index; }
int VideoIO::main(void) { ///TODO int range = pContext->getConvertingRange(); int i = 0; while(i < range) { if(writingRequested != 0 || writeAllFrame == true) { //write frame if(writingRequested != 0) { for(int i = 0 ; i < writingRequested ; i++) { writeFrame(); } writingRequested = 0; } else { while(pOutputFrameHeap->isEmpty() == false) writeFrame(); } } else { //read frame readFrame(); i++; } } //waiting to textFram2PPM exit // while(writeAllFrame == false) // { // ; //waiting // } //write remain frame while(pOutputFrameHeap->isEmpty() == false) writeFrame(); fprintf(stderr, "[VideoIO] Exit <<<<<<<<<<<<<<<<<<\n"); return 0; }
/*! \internal */ qint64 QWebSocketPrivate::writeFrames(const QList<QByteArray> &frames) { qint64 written = 0; if (Q_LIKELY(m_pSocket)) { QList<QByteArray>::const_iterator it; for (it = frames.cbegin(); it < frames.cend(); ++it) written += writeFrame(*it); } return written; }
/*! \internal */ void QWebSocketPrivate::ping(const QByteArray &payload) { QByteArray payloadTruncated = payload.left(125); m_pingTimer.restart(); QByteArray pingFrame = getFrameHeader(QWebSocketProtocol::OpCodePing, payloadTruncated.size(), 0 /*do not mask*/, true); pingFrame.append(payloadTruncated); qint64 ret = writeFrame(pingFrame); Q_UNUSED(ret); }
void KitSocket::writeKeepAlive(void) { sflap_frame frame; frame.asterisk = '*'; frame.type = SFLAP_KEEP_ALIVE; frame.sequence = htons(++out_sequence & 0xFFFF); frame.data_length = 0; writeFrame(frame); }
void ImageExportSerializer::writeIndex(pugi::xml_node _parent, DataPtr _data) { pugi::xml_node node = _parent.append_child("Index"); node.append_attribute("name").set_value(_data->getPropertyValue("Name").c_str()); std::string value = _data->getPropertyValue("Rate"); if (!value.empty()) node.append_attribute("rate").set_value(value.c_str()); for (Data::VectorData::const_iterator child = _data->getChilds().begin(); child != _data->getChilds().end(); child ++) writeFrame(node, (*child)); }
void QTextOdfWriter::writeFrame(QXmlStreamWriter &writer, const QTextFrame *frame) { Q_ASSERT(frame); const QTextTable *table = qobject_cast<const QTextTable*> (frame); if (table) { // Start a table. writer.writeStartElement(tableNS, QString::fromLatin1("table")); writer.writeEmptyElement(tableNS, QString::fromLatin1("table-column")); writer.writeAttribute(tableNS, QString::fromLatin1("number-columns-repeated"), QString::number(table->columns())); } else if (frame->document() && frame->document()->rootFrame() != frame) { // start a section writer.writeStartElement(textNS, QString::fromLatin1("section")); } QTextFrame::iterator iterator = frame->begin(); QTextFrame *child = 0; int tableRow = -1; while (! iterator.atEnd()) { if (iterator.currentFrame() && child != iterator.currentFrame()) writeFrame(writer, iterator.currentFrame()); else { // no frame, its a block QTextBlock block = iterator.currentBlock(); if (table) { QTextTableCell cell = table->cellAt(block.position()); if (tableRow < cell.row()) { if (tableRow >= 0) writer.writeEndElement(); // close table row tableRow = cell.row(); writer.writeStartElement(tableNS, QString::fromLatin1("table-row")); } writer.writeStartElement(tableNS, QString::fromLatin1("table-cell")); if (cell.columnSpan() > 1) writer.writeAttribute(tableNS, QString::fromLatin1("number-columns-spanned"), QString::number(cell.columnSpan())); if (cell.rowSpan() > 1) writer.writeAttribute(tableNS, QString::fromLatin1("number-rows-spanned"), QString::number(cell.rowSpan())); if (cell.format().isTableCellFormat()) { writer.writeAttribute(tableNS, QString::fromLatin1("style-name"), QString::fromLatin1("T%1").arg(cell.tableCellFormatIndex())); } } writeBlock(writer, block); if (table) writer.writeEndElement(); // table-cell } child = iterator.currentFrame(); ++iterator; } if (tableRow >= 0) writer.writeEndElement(); // close table-row if (table || (frame->document() && frame->document()->rootFrame() != frame)) writer.writeEndElement(); // close table or section element }
void AudioRecorder::updateRecord() { ALint samples; alcGetIntegerv(m_recordDevice, ALC_CAPTURE_SAMPLES, (ALCsizei)sizeof(ALint), &samples); alcCaptureSamples(m_recordDevice, (ALCvoid *)m_recordBuffer, samples); m_sampleArray.append((char*)&m_recordBuffer[0], samples * 2); if (m_sampleArray.size() >= frame_size * 2) { writeFrame((uint8_t*)m_sampleArray.data(), frame_size * 2); m_sampleArray.remove(0, frame_size * 2); } }
void SPHSystem::step() { curframe++; cout << "frame " << curframe << " out of " << params.nframes << endl; if (curframe < params.nframes) { for (int i = 0; i < params.nsteps; ++i) { computeAcceleration(); leapFrog(); checkState(); } if( params.writeLog ) writeFrame(&pressure[0]); } }
void KitSocket::writeData(const QString &data) { emit (debugWriteData(data)); sflap_frame frame; frame.asterisk = '*'; frame.type = SFLAP_DATA; out_sequence++; frame.sequence = htons(out_sequence & 0xFFFF); frame.data_length = htons( data.length() + 1 ); strncpy(frame.data, data.local8Bit().data(), data.local8Bit().length() + 1); writeFrame(frame); }
/*! \internal */ void QWebSocketPrivate::ping(const QByteArray &payload) { QByteArray payloadTruncated = payload.left(125); m_pingTimer.restart(); quint32 maskingKey = 0; if (m_mustMask) maskingKey = generateMaskingKey(); QByteArray pingFrame = getFrameHeader(QWebSocketProtocol::OpCodePing, payloadTruncated.size(), maskingKey, true); if (m_mustMask) QWebSocketProtocol::mask(&payloadTruncated, maskingKey); pingFrame.append(payloadTruncated); qint64 ret = writeFrame(pingFrame); Q_UNUSED(ret); }
void SPHSystem::init() { curframe = 0; if( params.writeLog ) { logstream = ofstream(params.logfile, ios::binary); // write initial state writeHeader(); writeFrame(&pressure[0]); } // step once computeAcceleration(); leapFrogStart(); checkState(); }
bool CompartmentReportCommon::writeFrame(const GIDSet& gids, const float* values, const size_ts& sizes, const double timestamp) { size_t offset = 0; size_t index = 0; for (const auto gid : gids) { if (!writeFrame(gid, values + offset, sizes[index], timestamp)) return false; offset += sizes[index]; ++index; } return true; }
// ajouter une gestion plus fine du mode par defaut // setMono/setColor appelee par ImageTypeSP // setPixelDepth si Mono16 bool SER_Recorder::writeFrameMono(unsigned char *frame) { if (isStreamingActive == false && (offsetX > 0 || offsetY > 0 || serh.ImageWidth != rawWidth || serh.ImageHeight != rawHeight)) { int offset = ((rawWidth * offsetY) + offsetX); uint8_t *srcBuffer = frame + offset; uint8_t *destBuffer = frame; int imageWidth = serh.ImageWidth; int imageHeight = serh.ImageHeight; for (int i = 0; i < imageHeight; i++) memcpy(destBuffer + i * imageWidth, srcBuffer + rawWidth * i, imageWidth); } return writeFrame(frame); }
const QString& AudioRecorder::stopRecord() { m_updateRecordTimer.stop(); alcCaptureStop(m_recordDevice); alcCaptureCloseDevice(m_recordDevice); uint8_t* data = (uint8_t*)m_sampleArray.data(); size_t size = m_sampleArray.size(); if (size > 0) writeFrame((uint8_t*)m_sampleArray.data(), size); m_sampleArray.clear(); cleanupRecorder(); return m_filePath; }
const unsigned char *V4LCamera::getFrame() { unsigned char *ret; if (!isOK()) return testpattern(); if (use_mmap_) { if (0) printf("VIDIOCSYNC frame %u\n", mmap_nextframe_); if (ioctl(fd_, VIDIOCSYNC, &mmap_nextframe_) == -1) { failed_ = true; perror("VIDIOSYNC failed"); } ret = frameptrs_[mmap_nextframe_]; mmap_nextframe_ = (mmap_nextframe_ + 1) % mmap_frames_; struct video_mmap vidmmap; vidmmap.frame = mmap_nextframe_; vidmmap.height = imageHeight(); vidmmap.width = imageWidth(); vidmmap.format = VIDEO_PALETTE_YUV420P; if (0) printf("vidmmap.frame=%u height=%d width=%d format=%u\n", vidmmap.frame, vidmmap.height, vidmmap.width, vidmmap.format); if (ioctl(fd_, VIDIOCMCAPTURE, &vidmmap) == -1) { perror("VIDIOCMCAPTURE failed - reverting to read"); use_mmap_ = false; buf_ = new unsigned char[imageSize()]; } } else { int r = read(fd_, buf_, imageSize()); if (r != imageSize()) failed_ = true; ret = buf_; } if (recfd_ != -1) writeFrame(ret); return ret; }
int8_t PN532_SPI::writeCommand(const uint8_t *header, uint8_t hlen, const uint8_t *body, uint8_t blen) { command = header[0]; writeFrame(header, hlen, body, blen); uint8_t timeout = PN532_ACK_WAIT_TIME; while (!isReady()) { delay(1); timeout--; if (0 == timeout) { DMSG_STR("Time out when waiting for ACK"); return -2; } } if (readAckFrame()) { DMSG_STR("Invalid ACK"); return PN532_INVALID_ACK; } return 0; }
/*! * \brief Pings the server to indicate that the connection is still alive. * * \sa pong() */ void WebSocket::ping() { m_pingTimer.restart(); QByteArray pingFrame = getFrameHeader(WebSocketProtocol::OC_PING, 0, 0, true); writeFrame(pingFrame); }
bool QTextOdfWriter::writeAll() { if (m_createArchive) m_strategy = new QZipStreamStrategy(m_device); else m_strategy = new QXmlStreamStrategy(m_device); if (!m_device->isWritable() && ! m_device->open(QIODevice::WriteOnly)) { qWarning() << "QTextOdfWriter::writeAll: the device can not be opened for writing"; return false; } QXmlStreamWriter writer(m_strategy->contentStream); #ifndef QT_NO_TEXTCODEC if (m_codec) writer.setCodec(m_codec); #endif // prettyfy writer.setAutoFormatting(true); writer.setAutoFormattingIndent(2); writer.writeNamespace(officeNS, QString::fromLatin1("office")); writer.writeNamespace(textNS, QString::fromLatin1("text")); writer.writeNamespace(styleNS, QString::fromLatin1("style")); writer.writeNamespace(foNS, QString::fromLatin1("fo")); writer.writeNamespace(tableNS, QString::fromLatin1("table")); writer.writeNamespace(drawNS, QString::fromLatin1("draw")); writer.writeNamespace(xlinkNS, QString::fromLatin1("xlink")); writer.writeNamespace(svgNS, QString::fromLatin1("svg")); writer.writeStartDocument(); writer.writeStartElement(officeNS, QString::fromLatin1("document-content")); writer.writeAttribute(officeNS, QString::fromLatin1("version"), QString::fromLatin1("1.2")); // add fragments. (for character formats) QTextDocumentPrivate::FragmentIterator fragIt = m_document->docHandle()->begin(); QSet<int> formats; while (fragIt != m_document->docHandle()->end()) { const QTextFragmentData * const frag = fragIt.value(); formats << frag->format; ++fragIt; } // add blocks (for blockFormats) QTextDocumentPrivate::BlockMap &blocks = m_document->docHandle()->blockMap(); QTextDocumentPrivate::BlockMap::Iterator blockIt = blocks.begin(); while (blockIt != blocks.end()) { const QTextBlockData * const block = blockIt.value(); formats << block->format; ++blockIt; } // add objects for lists, frames and tables QVector<QTextFormat> allFormats = m_document->allFormats(); QList<int> copy = formats.toList(); for (QList<int>::Iterator iter = copy.begin(); iter != copy.end(); ++iter) { QTextObject *object = m_document->objectForFormat(allFormats[*iter]); if (object) formats << object->formatIndex(); } writeFormats(writer, formats); writer.writeStartElement(officeNS, QString::fromLatin1("body")); writer.writeStartElement(officeNS, QString::fromLatin1("text")); QTextFrame *rootFrame = m_document->rootFrame(); writeFrame(writer, rootFrame); writer.writeEndElement(); // text writer.writeEndElement(); // body writer.writeEndElement(); // document-content writer.writeEndDocument(); delete m_strategy; m_strategy = 0; return true; }
bool DumpGui::run() { if ( _fileOutputFPS ) { _fileOutputAdvance = static_cast<int>(1000/_fileOutputFPS); } else { _fileOutputAdvance = _interval; _fileOutputFPS = static_cast<int>(1000/_fileOutputAdvance); } log_debug("DumpGui entering main loop with interval of %d ms", _interval); // heart-beat interval, in milliseconds // TODO: extract this value from the swf's FPS // by default and allow overriding it // unsigned int clockAdvance = _interval; const bool doDisplay = _fileStream.is_open(); terminate_request = false; _startTime = _clock.elapsed(); while (!terminate_request) { _clock.advance(clockAdvance); // advance movie now advanceMovie(doDisplay); if (_started) { writeSamples(); // Dump a video frame if it's time for it or no frame // was dumped yet size_t elapsed = _clock.elapsed(); if (!_framecount || (elapsed - _lastVideoFrameDump) >= _fileOutputAdvance) { writeFrame(); } // check if we've reached a timeout if (_timeout && _clock.elapsed() >= _timeout) { break; } } if (_sleepUS) gnashSleep(_sleepUS); if (!_started && !_startTrigger.empty()) { // Check whether to start std::string path; std::string var; if (parsePath(_startTrigger, path, var)) { movie_root& mr = *getStage(); const as_environment& env = mr.getRootMovie().get_environment(); as_object* o = findObject(env, path); if (o) { as_value val; o->get_member(getURI(mr.getVM(), "_ready"), &val); if (val.equals(true, 8)) { log_debug("Starting dump"); _started = true; _startTime = _clock.elapsed(); _lastVideoFrameDump = _startTime; } } } } } const boost::uint32_t total_time = _clock.elapsed() - _startTime; std::cout << "TIME=" << total_time << std::endl; std::cout << "FPS_ACTUAL=" << _fileOutputFPS << std::endl; // In this Gui, quit() does not exit, but it is necessary to catch the // last frame for screenshots. quit(); return true; }
void ReadLightW(Semaphore_Handle Sema, unsigned int *results){ char RMS_str[5]; char RMS_cnt_str[5]; //Set pins as output and set light sensor high. GPIOPinTypeGPIOOutput(GPIO_PORTE_BASE, GPIO_PIN_1); GPIOPinWrite(GPIO_PORTE_BASE, GPIO_PIN_1, GPIO_PIN_1); Timer_start(timer1); //Turn on timer1 for 12 microseconds. //Pend on LightSema Semaphore_pend(Sema, BIOS_WAIT_FOREVER); switch (adState) { case 0: // no line dtected if (*results && (black_count >= 20)){ //make sure it is a full line. Clock_start(DataClock); wasWhite = 0; //record state. adState = 1; //next state. } break; case 1: //first rising edge detected //falling edge of first line if (!(*results) && !wasWhite && (black_count >= 20)) { wasWhite = 1; adState = 2; if ((black_count <= 40) && (black_count >= 20)) { //single line detected if (drive_state == FOLLOW1 || prev_follow_state == FOLLOW1){ //First black line found, progress to follow2 // and start acquiring data. drive_state = FOLLOW2; prev_follow_state = FOLLOW2; RMS_cnt = 0; RMS = 0; Clock_start(DataClock); } else if (drive_state == FOLLOW2 || prev_follow_state == FOLLOW2){ //Second black line found, progress to follow3 // and stop acquiring data. prev_follow_state = FOLLOW3; drive_state = FOLLOW3; Clock_stop(DataClock); //RMS /= RMS_cnt; //finalize RMS //RMS = sqrt(RMS); intToStr(RMS_cnt, RMS_cnt_str, 4); strcpy(FullBufferPtr, "\0"); strcpy(FullBufferPtr, "R=\0"); //Clear the fullBuffer. intToStr(RMS, RMS_str, 4); strcat(FullBufferPtr, RMS_str); strcat(FullBufferPtr, "\nC=\0"); strcat(FullBufferPtr, RMS_cnt_str); strcat(FullBufferPtr, "\n\0"); writeFrame("",FullBufferPtr); //Semaphore_post(TxDataSema); //Let writeFrame continue. //Clock_stop(DataClock); } }else if (black_count > 40){ //double line detected StopDrivingFn(); Clock_stop(DataClock); } black_count = 0; } break; case 2: //first falling edge detected if (*results && wasWhite && (black_count >= 20)){ wasWhite = 0; adState = 3; } break; case 3: //second rising edge detected //falling edge of second line if (!(*results) && !wasWhite && (black_count >= 20)) { wasWhite = 1; adState = 4; if ((black_count <= 40) && (black_count >= 20)) { //single line detected if (drive_state == FOLLOW1 || prev_follow_state == FOLLOW1){ //First black line found, progress to follow2 // and start acquiring data. drive_state = FOLLOW2; prev_follow_state = FOLLOW2; RMS_cnt = 0; RMS = 0; Clock_start(DataClock); } else if (drive_state == FOLLOW2 || prev_follow_state == FOLLOW2){ //First black line found, progress to follow3 // and stop acquiring data. prev_follow_state = FOLLOW3; drive_state = FOLLOW3; //Clock_stop(DataClock); Clock_stop(DataClock); //RMS /= RMS_cnt; //finalize RMS //RMS = sqrt(RMS); intToStr(RMS_cnt, RMS_cnt_str, 4); strcpy(FullBufferPtr, "\0"); strcpy(FullBufferPtr, "R=\0"); //Clear the fullBuffer. intToStr(RMS, RMS_str, 4); strcat(FullBufferPtr, RMS_str); strcat(FullBufferPtr, "\nC=\0"); strcat(FullBufferPtr, RMS_cnt_str); strcat(FullBufferPtr, "\n\0"); writeFrame("",FullBufferPtr); //Semaphore_post(TxDataSema); //Let writeFrame continue. } }else if (black_count > 40){//double line detected //StopDrivingFn(); Clock_stop(DataClock); } black_count = 0; } break; case 4: //second falling edge detected. Done collecting data. //Clock_stop(DataClock); //StopAcquireData(); Clock_stop(DataClock); //RMS /= RMS_cnt; //finalize RMS //RMS = sqrt(RMS); intToStr(RMS_cnt, RMS_cnt_str, 4); strcpy(FullBufferPtr, "\0"); strcpy(FullBufferPtr, "R=\0"); //Clear the fullBuffer. intToStr(RMS, RMS_str, 4); strcat(FullBufferPtr, RMS_str); strcat(FullBufferPtr, "\nC=\0"); strcat(FullBufferPtr, RMS_cnt_str); strcat(FullBufferPtr, "\n\0"); writeFrame("",FullBufferPtr); //Semaphore_post(TxDataSema); //Let writeFrame continue. //push whats remaining in the buffer out to transfer it. strcpy(FullBufferPtr, "\0"); strcat(pingptr1, "\n\0"); strcpy(FullBufferPtr, pingptr1); Semaphore_post(TxDataSema); strcpy(pingptr1, "\0"); numChars = 0; wasWhite = 0; adState = 0; break; default: break; } }
bool CANSocketRaw::write(const struct CANFrameInfo &message) { LOG_TRACE(""); return writeFrame(message.frame); }
void RGBDFrameRecorder :: saveCurrentFrame(const RGBDImage& image) { std::string frame_dir = getNextFrameDirectory(image); writeFrame(image, frame_dir); ++m_frame_index; }
int Traj_GmxTrX::parallelWriteFrame(int set, Frame const& frameOut) { // Seek to given frame. file_.Seek( frameSize_ * set ); return ( writeFrame(set, frameOut) ); }