void V4LFrameSource::run() { RecursiveMutexLock runningLock(m_runningMutex); int64_t frameTS = -1; int64_t lastFrameTS = -1; while (m_isRunning) { runningLock.unlock(); try { FramePtr frame(new Frame()); readFrame(frame.get()); frameTS = frame->getTimestamp(); { RecursiveMutexLock framePtrLock(m_framePtrMutex); m_lastFrame = frame; } emitFrame(frame); lastFrameTS = frameTS; } catch (EOFException e) { break; } catch (IOException e) { std::cout << "ERROR: " << e.what() << std::endl; } runningLock.lock(); } }
void IdleCallback() { XnStatus nRetVal = XN_STATUS_OK; if (g_bPause != TRUE) { // read a frame readFrame(); // capture if needed nRetVal = captureFrame(); if (nRetVal != XN_STATUS_OK) { displayMessage("Error capturing frame: '%s'", xnGetStatusString(nRetVal)); } // add to statistics //statisticsAddFrame(); } if (g_bStep == TRUE) { g_bStep = FALSE; g_bPause = TRUE; } glutPostRedisplay(); }
uint32_t TFramedTransport::readSlow(uint8_t* buf, uint32_t len) { uint32_t want = len; uint32_t have = rBound_ - rBase_; // We should only take the slow path if we can't satisfy the read // with the data already in the buffer. assert(have < want); // If we have some data in the buffer, copy it out and return it. // We have to return it without attempting to read more, since we aren't // guaranteed that the underlying transport actually has more data, so // attempting to read from it could block. if (have > 0) { memcpy(buf, rBase_, have); setReadBuffer(rBuf_.get(), 0); return have; } // Read another frame. if (!readFrame()) { // EOF. No frame available. return 0; } // TODO(dreiss): Should we warn when reads cross frames? // Hand over whatever we have. uint32_t give = std::min(want, static_cast<uint32_t>(rBound_ - rBase_)); memcpy(buf, rBase_, give); rBase_ += give; want -= give; return (len - want); }
int openCommon(openni::Device& device, DeviceConfig config) { g_pPlaybackControl = g_device.getPlaybackControl(); int ret; ret = openStream(device, "depth", openni::SENSOR_DEPTH, config.openDepth, g_depthStream, &g_depthSensorInfo, &g_bIsDepthOn); if (ret != 0) { return ret; } ret = openStream(device, "color", openni::SENSOR_COLOR, config.openColor, g_colorStream, &g_colorSensorInfo, &g_bIsColorOn); if (ret != 0) { return ret; } ret = openStream(device, "IR", openni::SENSOR_IR, config.openIR, g_irStream, &g_irSensorInfo, &g_bIsIROn); if (ret != 0) { return ret; } initConstants(); readFrame(); return 0; }
void AdminConnection::processLogin(){ InputFrame::Ptr recvframe( new InputFrame(version, paddingfilter) ); if (readFrame(recvframe)) { try { if(recvframe->getType() == ft02_Login){ std::string username, password; if ( getAuth( recvframe, username, password ) ) { bool authenticated = false; try{ if(username == Settings::getSettings()->get("admin_user") && password == Settings::getSettings()->get("admin_pass")) authenticated = true; }catch(std::exception e){ } if(authenticated){ sendOK( recvframe, "Welcome" ); INFO("Admin login ok by %s", username.c_str()); logextid = Logger::getLogger()->addLog( LogSink::Ptr( new AdminLogger( boost::dynamic_pointer_cast<AdminConnection>(shared_from_this()) ) ) ); status = READY; } else { throw FrameException( fec_FrameError, "Admin Login Error - bad username or password"); // TODO - should be a const or enum, Login error } } }else{ throw FrameException( fec_FrameError, "Wrong type of frame in this state, wanted login"); } } catch ( FrameException& exception ) { // This might be overkill later, but now let's log it DEBUG( "AdminConnection caught FrameException : %s", exception.what() ); sendFail( recvframe, exception.getErrorCode(), exception.getErrorMessage() ); } } }
void AdminConnection::processNormalFrame() { InputFrame::Ptr frame( new InputFrame(version,paddingfilter) ); if (readFrame(frame)) { try { switch (frame->getType()) { case ftad_CommandDesc_Get: processDescribeCommand(frame); break; case ftad_CommandTypes_Get: processGetCommandTypes(frame); break; case ftad_Command: processCommand(frame); break; default: WARNING("AdminConnection: Discarded frame, not processed, was type %d", frame->getType()); throw FrameException( fec_ProtocolError, "Did not understand that frame type."); break; } } catch ( FrameException& exception ) { // This might be overkill later, but now let's log it DEBUG( "AdminConnection caught FrameException : %s", exception.what() ); sendFail( frame, exception.getErrorCode(), exception.getErrorMessage() ); } } else { DEBUG("noFrame :("); // client closed } }
StackVisitor::StackVisitor(CallFrame* startFrame, VM* vm) { m_frame.m_index = 0; m_frame.m_isWasmFrame = false; CallFrame* topFrame; if (startFrame) { ASSERT(vm); ASSERT(!vm->topCallFrame || reinterpret_cast<void*>(vm->topCallFrame) != vm->topEntryFrame); m_frame.m_entryFrame = vm->topEntryFrame; topFrame = vm->topCallFrame; if (topFrame && topFrame->isStackOverflowFrame()) { topFrame = topFrame->callerFrame(m_frame.m_entryFrame); m_topEntryFrameIsEmpty = (m_frame.m_entryFrame != vm->topEntryFrame); if (startFrame == vm->topCallFrame) startFrame = topFrame; } } else { m_frame.m_entryFrame = 0; topFrame = 0; } m_frame.m_callerIsEntryFrame = false; readFrame(topFrame); // Find the frame the caller wants to start unwinding from. while (m_frame.callFrame() && m_frame.callFrame() != startFrame) gotoNextFrame(); }
int llclose_transmitter(int fd) { info->tentativas = tentativas; while(info->tentativas > 0) { buildFrame(info->flag, "disc"); transmitirFrame(info->frameSend, info->frameSendLength); start_alarm(); info->frameTempLength = readFrame(info->frameTemp); char * type = malloc(5); type = verifyFrameType(info->frameTemp); if (verifyFrame(info->frameTemp, info->frameTempLength, "disc")) { buildFrame(info->flag, "ua"); if(transmitirFrame(info->frameSend, info->frameSendLength)) break; } } if (info->tentativas == 0) { printf("Número de tentativas chegou ao fim. \n"); exit(-1); } sleep(1); if ( tcsetattr(info->fd,TCSANOW,&info->oldtio) == -1) { perror("tcsetattr"); return -1; } close(fd); printf("fechou transmissor\n"); return 1; }
void ofxColorStream ::threadedFunction() { while (isThreadRunning()) { readFrame(); } }
FrameAvailableCode SyncVideoDecoder::getRenderedBmps(vector<BitmapPtr>& pBmps, float timeWanted) { AVG_ASSERT(getState() == DECODING); ScopeTimer timer(RenderToBmpProfilingZone); FrameAvailableCode frameAvailable; if (timeWanted == -1) { readFrame(m_pFrame); frameAvailable = FA_NEW_FRAME; } else { frameAvailable = readFrameForTime(m_pFrame, timeWanted); } if (frameAvailable == FA_USE_LAST_FRAME || isEOF()) { return FA_USE_LAST_FRAME; } else { allocFrameBmps(pBmps); if (pixelFormatIsPlanar(getPixelFormat())) { ScopeTimer timer(CopyImageProfilingZone); for (unsigned i = 0; i < pBmps.size(); ++i) { m_pFrameDecoder->copyPlaneToBmp(pBmps[i], m_pFrame->data[i], m_pFrame->linesize[i]); } } else { m_pFrameDecoder->convertFrameToBmp(m_pFrame, pBmps[0]); } return FA_NEW_FRAME; } }
uint32_t TFramedTransport::readSlow(uint8_t* buf, uint32_t len) { uint32_t want = len; uint32_t have = rBound_ - rBase_; // We should only take the slow path if we can't satisfy the read // with the data already in the buffer. assert(have < want); // Copy out whatever we have. if (have > 0) { memcpy(buf, rBase_, have); want -= have; buf += have; } // Read another frame. if (!readFrame()) { // EOF. No frame available. return 0; } // TODO(dreiss): Should we warn when reads cross frames? // Hand over whatever we have. uint32_t give = std::min(want, static_cast<uint32_t>(rBound_ - rBase_)); memcpy(buf, rBase_, give); rBase_ += give; want -= give; return (len - want); }
int KitSocket::read(char *data, int len) { sflap_frame frame; int i = readFrame(frame); if(i != -1) memcpy( (void *)data, (void *)frame.data, len ); return i; }
// ###################################################################### void SerialPort::connect() { // Check to see if we have a hardcoded device name. If so, then let's just // go ahead and enable that port. If printf("INFO: Looking Device Name [%s]\n", itsDevName.c_str()); if (itsDevName != "") { printf("INFO: Opening %s\n", itsDevName.c_str()); enablePort(itsDevName); } else if (itsDevName == "search") { printf("INFO: Searching for devices\n"); itsCmdDevName = ""; DIR *directory_p; struct dirent *entry_p; // Open the device directory to search for devices whose names match the // search prefix directory_p = ::opendir("/dev"); if (directory_p == NULL) printf("FATAL ERROR: Could Not Open /dev Directory!\n"); // Iterate through the directory entries while ((entry_p = ::readdir(directory_p))) { std::string entryName(entry_p->d_name); if (entryName.find(itsSearchPrefix.c_str()) != std::string::npos) { // If the directory entry name matches our // search prefix, then let's try configuring a // serial // port on that device, sending it an identity request command (0x00), // and comparing the result // with our required device description enablePort("/dev/" + entryName); unsigned char cmd[1] = {0}; write(cmd, 1); std::vector<unsigned char> deviceStringVec = readFrame(cmd[0], 255); std::string deviceString(deviceStringVec.begin(), deviceStringVec.end()); printf("INFO: %s : %s", entryName.c_str(), deviceString.c_str()); if (deviceString == itsDeviceDescription) { itsCmdDevName = "/dev/" + entryName; break; } } } (void)::closedir(directory_p); if (itsCmdDevName == "") { printf( "FATAL ERROR: Could Not Find Serial Device Matching Descriptor " "(%s)\n", itsDeviceDescription.c_str()); } } else { printf("INFO: Opening from cmd line %s\n", itsCmdDevName.c_str()); enablePort(itsCmdDevName); } }
void FFmpegVideo::init(const QString &dev) { openCamera(dev); connect(m_timer, SIGNAL(timeout()), this, SLOT(readFrame())); m_timer->start(100); }
clams::Cloud::Ptr clams::StreamSequenceBase::getCloud(size_t idx) const { Cloud::Ptr pcd(new Cloud); Frame frame; readFrame(idx, &frame); proj_.frameToCloud(frame, pcd.get()); return pcd; }
clams::Cloud::Ptr clams::StreamSequenceBase::getCloud(double timestamp, double* dt) const { Cloud::Ptr pcd(new Cloud); Frame frame; readFrame(timestamp, dt, &frame); proj_.frameToCloud(frame, pcd.get()); return pcd; }
void Editor::resetBuffer() { m_headFrame = 0; m_frameIdx = 0; m_fullBuffer = false; readFrame(true); m_headFrame++; }
Capture::Capture(QWidget *parent) : QWidget(parent), ui(new Ui::Capture) { ui->setupUi(this); //initialize ///111 take_photo_flag = 0; ///111 num = 0; send_frame_flag = 0; //capture = NULL; //timer = new QTimer(this); photos = new Photos(); this->first_open_cam_flag = 0; this->open_cam_flag=0; timer=new QTimer(this); connect(timer,SIGNAL(timeout()),this,SLOT(update())); //initialize socketbind port 63636 send_socket = new QUdpSocket(this); send_socket->bind(63636, QUdpSocket::ShareAddress); //enable button ui->close_pushButton->setDisabled(true); //ui->image_close_pushButton->setDisabled(false); ui->take_photo_pushButton->setDisabled(true); //this->setWindowFlags(Qt::Tool | Qt::X11BypassWindowManagerHint); this->setWindowFlags(Qt::FramelessWindowHint); ui->video_label->setStyleSheet ("background-color: #000000;color: #ffffff;"); //C_translateLanguage();//set the content of button this->picNum = 0; //serial_port this->serial_open_flag = 0; struct PortSettings myComSetting = {BAUD4800,DATA_8,PAR_NONE,STOP_1,FLOW_OFF,10}; myCom = new Posix_QextSerialPort("/dev/ttyUSB0",myComSetting,QextSerialBase::Polling); //myCom->open(QIODevice::ReadWrite); readTimer = new QTimer(this); readTimer->start(10); connect(readTimer,SIGNAL(timeout()),this,SLOT(readMyCom())); connect(send_socket, SIGNAL(readyRead()), this, SLOT(readData())); connect(ui->open_pushButton,SIGNAL(clicked()),this,SLOT(openCapture())); connect(ui->close_pushButton,SIGNAL(clicked()),this,SLOT(closeCapture())); connect(timer,SIGNAL(timeout()),this,SLOT(readFrame())); connect(ui->image_open_pushButton, SIGNAL(clicked()), this, SLOT(openPhotos())); connect(ui->image_close_pushButton, SIGNAL(clicked()), this, SLOT(closePhotos())); connect(photos, SIGNAL(closeImage()), this, SLOT(closePhotos())); connect(ui->take_photo_pushButton, SIGNAL(clicked()), this, SLOT(takePhotos())); }
StackVisitor::StackVisitor(CallFrame* startFrame) { m_frame.m_index = 0; if (startFrame) m_frame.m_VMEntryFrame = startFrame->vm().topVMEntryFrame; else m_frame.m_VMEntryFrame = 0; m_frame.m_callerIsVMEntryFrame = false; readFrame(startFrame); }
Dialog1::Dialog1(QWidget *parent) : QDialog(parent), ui(new Ui::Dialog1) { ui->setupUi(this); timer = new QTimer(this); connect(timer, SIGNAL(timeout()), this, SLOT(readFrame())); //ui->nextLoop->setVisible(false); ui->openCamera_2->setVisible(false); ui->camFrameView->setVisible(false); }
void SyncVideoDecoder::seek(float destTime) { AVG_ASSERT(getState() == DECODING); if (m_bFirstPacket) { readFrame(m_pFrame); } m_pDemuxer->seek(destTime); m_bVideoSeekDone = true; m_pFrameDecoder->handleSeek(); }
void StackVisitor::gotoNextFrame() { #if ENABLE(DFG_JIT) if (m_frame.isInlinedFrame()) { InlineCallFrame* inlineCallFrame = m_frame.inlineCallFrame(); CodeOrigin* callerCodeOrigin = inlineCallFrame->getCallerSkippingDeadFrames(); if (!callerCodeOrigin) { while (inlineCallFrame) { readInlinedFrame(m_frame.callFrame(), &inlineCallFrame->directCaller); inlineCallFrame = m_frame.inlineCallFrame(); } m_frame.m_VMEntryFrame = m_frame.m_CallerVMEntryFrame; readFrame(m_frame.callerFrame()); } else readInlinedFrame(m_frame.callFrame(), callerCodeOrigin); return; } #endif // ENABLE(DFG_JIT) m_frame.m_VMEntryFrame = m_frame.m_CallerVMEntryFrame; readFrame(m_frame.callerFrame()); }
void StackIterator::gotoNextFrame() { #if ENABLE(DFG_JIT) if (m_frame.isInlinedFrame()) { InlineCallFrame* inlineCallFrame = m_frame.inlineCallFrame(); CodeOrigin* callerCodeOrigin = &inlineCallFrame->caller; readInlinedFrame(m_frame.callFrame(), callerCodeOrigin); } else #endif // ENABLE(DFG_JIT) readFrame(m_frame.callerFrame()); }
void StackVisitor::gotoNextFrame() { #if ENABLE(DFG_JIT) if (m_frame.isInlinedFrame()) { InlineCallFrame* inlineCallFrame = m_frame.inlineCallFrame(); CodeOrigin* callerCodeOrigin = &inlineCallFrame->caller; readInlinedFrame(m_frame.callFrame(), callerCodeOrigin); return; } #endif // ENABLE(DFG_JIT) m_frame.m_VMEntryFrame = m_frame.m_CallerVMEntryFrame; readFrame(m_frame.callerFrame()); }
void openCommon() { XnStatus nRetVal = XN_STATUS_OK; g_bIsDepthOn = false; g_bIsImageOn = false; g_bIsIROn = false; g_bIsAudioOn = false; g_bIsPlayerOn = false; NodeInfoList list; nRetVal = g_Context.EnumerateExistingNodes(list); if (nRetVal == XN_STATUS_OK) { for (NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it) { switch ((*it).GetDescription().Type) { case XN_NODE_TYPE_DEVICE: (*it).GetInstance(g_Device); break; case XN_NODE_TYPE_DEPTH: g_bIsDepthOn = true; (*it).GetInstance(g_Depth); break; case XN_NODE_TYPE_IMAGE: g_bIsImageOn = true; (*it).GetInstance(g_Image); break; case XN_NODE_TYPE_IR: g_bIsIROn = true; (*it).GetInstance(g_IR); break; case XN_NODE_TYPE_AUDIO: g_bIsAudioOn = true; (*it).GetInstance(g_Audio); break; case XN_NODE_TYPE_PLAYER: g_bIsPlayerOn = true; (*it).GetInstance(g_Player); } } } XnCallbackHandle hDummy; g_Context.RegisterToErrorStateChange(onErrorStateChanged, NULL, hDummy); initConstants(); readFrame(); }
void loop() { unsigned char data[8]; if (mySerial.available() > 0) { if (readFrame(&mySerial, data) == FRAME_COMPLETE) controller.updateFromDataArray(data); } acc.read(); gyro.read(); alt.start(); quad.computePIDs(); quad.setESCs(); }
void SpecificWorker::compute( ) { static int fps = 0; static QTime reloj = QTime::currentTime(); readFrame(); computeCoordinates(); if (reloj.elapsed() > 1000) { qDebug()<<"Grabbing at:"<<fps<<"fps"; reloj.restart(); fps=0; } fps++; }
bool WebSocket::recv(char *buffer, int length) { if (!connected()) return false; while (length) { while (!buf.length() && !readFrame(buf)) return false; int bytes = qMin(buf.length(), length); memcpy(buffer, buf.data(), bytes); buf.remove(0, bytes); buffer += bytes; length -= bytes; } return true; }
int llwrite(int fd, char * buffer, int length) { comporTramaI(TRANSMITTER, buffer, length); stuffing(info->frameSend, &info->frameSendLength); //printf("partes: %x, %x, %x, %x, %x, %x, %x, %x, %x \n", tramaI[0],tramaI[1],tramaI[2],tramaI[3],tramaI[4],tramaI[5],tramaI[6],tramaI[7],tramaI[8]); transmitirFrame(info->frameSend, info->frameSendLength); //printf("enviar frame I com sequenceNumber = %d \n", info->sequenceNumber); info->tentativas = tentativas; while(info->tentativas > 0) { start_alarm(); info->frameTempLength = readFrame(info->frameTemp); if (info->sequenceNumber == 1) { if (verifyFrame(info->frameTemp, info->frameTempLength, "rr0")) { //printf("recebeu rr corretamente \n"); stop_alarm(); info->tentativas = tentativas; break; } else if (verifyFrame(info->frameTemp, info->frameTempLength, "rej0")) { //printf("recebeu rej0\n"); transmitirFrame(info->frameSend, info->frameSendLength); continue; } } else if (info->sequenceNumber == 0) { if (verifyFrame(info->frameTemp, info->frameTempLength, "rr1")) { //printf("recebeu rr corretamente \n"); stop_alarm(); info->tentativas = tentativas; break; } else if (verifyFrame(info->frameTemp, info->frameTempLength, "rej1")) { //printf("recebeu rej1\n"); transmitirFrame(info->frameSend, info->frameSendLength); continue; } } } if (info->tentativas == 0) { printf("Número de tentativas chegou ao fim. \n"); exit(-1); } info->sequenceNumber = !info->sequenceNumber; //printf("retornar llwrite\n"); return 1; }
void QVideoDecoder::timerEvent(QTimerEvent * te) { if(te->timerId() == m_decode_timer) { if(g_packet_queue.audio_packets.count() < 100 || g_packet_queue.video_packets.count() < 100) { readFrame(); } else { if(g_packet_queue.video_packets.count() > 1) { decodeVideoFrame(); } } } }