void MultiFramedRTPSink ::doSpecialFrameHandling(unsigned /*fragmentationOffset*/, unsigned char* /*frameStart*/, unsigned /*numBytesInFrame*/, struct timeval framePresentationTime, unsigned /*numRemainingBytes*/) { //qDebug()<<"void MultiFramedRTPSink ::doSpecialFrameHandling 61 "; // default implementation: If this is the first frame in the packet, // use its presentationTime for the RTP timestamp: if (isFirstFrameInPacket()) { setTimestamp(framePresentationTime); } }
Event::Event(EventManager::EventType type, QVariantMap &map) : _type(type) , _valid(true) { if (!map.contains("flags") || !map.contains("timestamp")) { qWarning() << "Received invalid serialized event:" << map; setValid(false); return; } setFlags(static_cast<EventManager::EventFlags>(map.take("flags").toInt())); // TODO sanity check? setTimestamp(QDateTime::fromTime_t(map.take("timestamp").toUInt())); }
void DVVideoRTPSink::doSpecialFrameHandling(unsigned fragmentationOffset, unsigned char* /*frameStart*/, unsigned /*numBytesInFrame*/, struct timeval framePresentationTime, unsigned numRemainingBytes) { if (numRemainingBytes == 0) { // This packet contains the last (or only) fragment of the frame. // Set the RTP 'M' ('marker') bit: setMarkerBit(); } // Also set the RTP timestamp: setTimestamp(framePresentationTime); }
/** Default constructor. */ LogTreeItem::LogTreeItem(tc::Severity type, const QString &message, const QDateTime ×tamp) : QTreeWidgetItem() { static quint32 seqnum = 0; /* Set this message's sequence number */ _seqnum = seqnum++; /* Set the item's log time */ setTimestamp(timestamp); /* Set the item's severity and appropriate color. */ setSeverity(type); /* Set the item's message text. */ setMessage(message); }
/** * @brief タッチセンサがオンを検出した時に回転して回避する運動 * @param dir 方向(右がtrue、左がfalse) */ void ControlEducatorVehicle::rotate_move(bool dir) { m_target_velocity_out.data.vx = 0; m_target_velocity_out.data.vy = 0; if (dir)m_target_velocity_out.data.va = m_rotate_speed; else m_target_velocity_out.data.va = -m_rotate_speed; setTimestamp(m_target_velocity_out); m_target_velocity_outOut.write(); double sec, usec; usec = modf(m_rotate_time, &sec); coil::TimeValue ts((int)sec, (int)(usec*1000000.0)); coil::sleep(ts); stop_robot(); }
/** * @brief タッチセンサがオンを検出した時に後退して離れる運動 */ void ControlEducatorVehicle::back_move() { m_target_velocity_out.data.vx = -m_back_speed; m_target_velocity_out.data.vy = 0; m_target_velocity_out.data.va = 0; setTimestamp(m_target_velocity_out); m_target_velocity_outOut.write(); double sec, usec; usec = modf(m_back_time, &sec); coil::TimeValue ts((int)sec, (int)(usec*1000000.0)); coil::sleep(ts); stop_robot(); }
// invoke a method int oDateTime::invokeMethod(qlong pMethodId, EXTCompInfo* pECI) { switch (pMethodId) { case 1: { // add seconds EXTParamInfo* param1 = ECOfindParamNum( pECI, 1 ); EXTfldval tmpData((qfldval) param1->mData); int tmpSeconds = tmpData.getLong(); setTimestamp(mTimestamp + tmpSeconds); return 1L; }; break; default: { return oBaseNVComponent::invokeMethod(pMethodId, pECI); }; break; } };
void VLCVideoTextureObjChunk::checkForSync(void) { if ((libvlc == NULL) || (vlcmediaplayer == NULL)) return; if (libvlc_media_player_is_playing(vlcmediaplayer)!=1) return; // only the master sets the timestamp if (getIsMaster()==true) { OSG::TimeStamp now = OSG::getTimeStamp(); if (OSG::getTimeStampMsecs(now-lastSync) > getUpdatefreq()) { lastSync=now; setTimestamp(libvlc_media_player_get_time(vlcmediaplayer)+NETWORK_LATENCY); commitChanges(); } } }
/* * When a transaction begins, we update its core and epoch */ virtual void OnBeginTransaction() { if (m_newTx) { m_newTx = false; /* * If this is the first time this transaction begins, we * initialize its timestamp, and set the transaction as read only */ if (getTimestamp()==NULL) { setReadOnly(true); struct timeval t; gettimeofday(&t, NULL); setTimestamp(t.tv_sec); } } m_epoch = stm::scheduler::BiModalScheduler::instance()->getCurrentEpoch(m_iCore); }
/* * COMMAND CENTRE */ String Controller::doCommand(String string) { //Log.Debug("Command: %s", string); String s = NULL; int i = (int) stringToTime(string); char command; if (string.length() > 0) { command = string.charAt(0); } else { return s; } switch (command) { case 'A': // status request only break; case 'L': // move left moveLeft(i); break; case 'R': // move right moveRight(i); break; case 'I': // update interval setInterval(i); break; case 'J': // update step size setStepSize(i); break; case 'S': // update start/end/rewind times setTimes(string); break; case 'T': // update date and time setTimestamp(stringToTime(string)); break; default: // returns NULL return s; } s = getStatus(); return s; }
void Timestamp::setMonth(int month) { if (month > 12) throw invalid_argument(StringUtil::substitute("month is over 12: {0}", month)); //달력 int year = getYear(); int newDate = getDate(); array<int, 12> &monthArray = getLastDateArray(year); //해당 월의 마지막 일을 초과할 때, 조정한다 //EX-> 4월인데 31일이면 4월 30일로 조정 if (newDate > monthArray[month - 1]) newDate = monthArray[month - 1]; setTimestamp(year, month, newDate, getHour(), getMinute(), getSecond()); };
RTC::ReturnCode_t WavPlayer::onExecute(RTC::UniqueId ec_id) { RTC_DEBUG(("onExecute start")); coil::TimeValue now = coil::gettimeofday(); long bufferlen = long((now - m_timer) * m_samplerate); if ( bufferlen <= 0 ) return RTC::RTC_OK; m_timer = now; short *buffer = new short[bufferlen]; sf_readf_short(sfr, buffer, bufferlen) ; m_out_data.data.length(bufferlen * 2); //!< set outport data length memcpy((void *)&(m_out_data.data[0]), (void *)&(buffer[0]), bufferlen * 2); setTimestamp(m_out_data); m_out_dataOut.write(); delete [] buffer; RTC_DEBUG(("onExecute finish")); return RTC::RTC_OK; }
RTC::ReturnCode_t ControlEducatorVehicle::onActivated(RTC::UniqueId ec_id) { vx = 0; vy = 0; va = 0; touch_r = false; touch_l = false; range = 0; m_angle.data = 0; setTimestamp(m_angle); m_angleOut.write(); stop_flag = false; return RTC::RTC_OK; }
RTC::ReturnCode_t PulseAudioInput::onExecute(RTC::UniqueId ec_id) { RTC_DEBUG(("onExecute start")); m_mutex.lock(); RTC_DEBUG(("onExecute:mutex lock")); if( m_simple ) { int r; simple_recast *psimple = (simple_recast *)m_simple; pa_threaded_mainloop_lock( psimple->mainloop ); RTC_DEBUG(("pa_threaded_mainloop_lock()")); while ( !psimple->read_data ) { r = pa_stream_peek( psimple->stream, &psimple->read_data, &psimple->read_length ); if ( !psimple->read_data ) { RTC_DEBUG(("pa_stream_peek():no readable data. wait start.")); pa_threaded_mainloop_wait(psimple->mainloop); } } m_out_data.data.length( psimple->read_length ); //!< set outport data length memcpy((void *)&(m_out_data.data[0]), (const uint8_t*) psimple->read_data, psimple->read_length); r = pa_stream_drop( psimple->stream ); if ( r < 0 ) { RTC_WARN(("pa_stream_drop():capture stream drop failed.")); } psimple->read_data = NULL; psimple->read_length = 0; psimple->read_index = 0; setTimestamp( m_out_data ); m_out_dataOut.write(); RTC_DEBUG(("AudioDataOut port:ON_BUFFER_WRITE")); pa_threaded_mainloop_unlock( psimple->mainloop ); RTC_DEBUG(("pa_threaded_mainloop_unlock()")); } m_mutex.unlock(); RTC_DEBUG(("onExecute:mutex unlock")); RTC_DEBUG(("onExecute finish")); return RTC::RTC_OK; }
NetworkManager::ConnectionSettings::ConnectionSettings(const NetworkManager::ConnectionSettings::Ptr &other) : d_ptr(new ConnectionSettingsPrivate(this)) { Q_D(ConnectionSettings); setId(other->id()); setUuid(other->uuid()); setInterfaceName(other->interfaceName()); setConnectionType(other->connectionType()); setPermissions(other->permissions()); setAutoconnect(other->autoconnect()); setAutoconnectPriority(other->autoconnectPriority()); setTimestamp(other->timestamp()); setReadOnly(other->readOnly()); setZone(other->zone()); setMaster(other->master()); setSlaveType(other->slaveType()); setGatewayPingTimeout(other->gatewayPingTimeout()); d->initSettings(other); }
void VP8VideoRTPSink ::doSpecialFrameHandling(unsigned fragmentationOffset, unsigned char* /*frameStart*/, unsigned /*numBytesInFrame*/, struct timeval framePresentationTime, unsigned numRemainingBytes) { // Set the "VP8 Payload Descriptor" (just the minimal required 1-byte version): u_int8_t vp8PayloadDescriptor = fragmentationOffset == 0 ? 0x10 : 0x00; // X = R = N = 0; PartID = 0; S = 1 iff this is the first (or only) fragment of the frame setSpecialHeaderBytes(&vp8PayloadDescriptor, 1); if (numRemainingBytes == 0) { // This packet contains the last (or only) fragment of the frame. // Set the RTP 'M' ('marker') bit: setMarkerBit(); } // Also set the RTP timestamp: setTimestamp(framePresentationTime); }
void NewPoi::init() { if (FMapLocationSelector) ui->selectLocation->setIcon(IconStorage::staticStorage(RSR_STORAGE_MAPICONS)->getIcon(MPI_NEWCENTER)); ui->pbTimestamp->setIcon(IconStorage::staticStorage(RSR_STORAGE_MENUICONS)->getIcon(MNI_CLIENTINFO_TIME)); fillCountryMap(); //--- Setup country names and codes --- ui->boxCountry->addItem(tr("None")); ui->boxCountryCode->addItem(tr("None")); QStringList keys=FCountryCodeMap.keys(); for(QStringList::const_iterator it=keys.constBegin(); it!=keys.constEnd(); it++) { ui->boxCountry->addItem(FCountryCodeMap[*it], *it); ui->boxCountryCode->addItem(FCountryIconStorage->getIcon(*it), *it); } ui->boxCountry->setCurrentIndex(0); //--- Setup POI types --- QHash<QString, QString> lstTranslated = FPoi->getTranslatedTypes(); QMap<QString, QString> lstAllType = FPoi->getTypeMap(); QStringList rootTypes(FPoi->getTypeMap().keys()); rootTypes.removeDuplicates(); ui->boxType->setItemDelegate(new TypeItemDelegate); ui->boxType->addItem(FPoi->getIcon(MNI_POI_NONE), lstTranslated.value("none"), "dir:none"); for(QStringList::const_iterator it=rootTypes.constBegin(); it!=rootTypes.constEnd(); it++) { QStringList lst = lstAllType.values(*it); ui->boxType->addItem(FPoi->getTypeIcon(*it), lstTranslated.value(*it), "dir:"+*it); for(QStringList::const_iterator pit=lst.constBegin(); pit!=lst.constEnd(); pit++) { QString id=*it+':'+*pit; ui->boxType->addItem(FPoi->getTypeIcon(id), lstTranslated.value(id), id); } } ui->boxType->setEditable(false); setTimestamp(QDateTime()); }
void MessageCenter::process(AudioSampleBuffer& buffer, MidiBuffer& eventBuffer) { softTimestamp = Time::getHighResolutionTicks() - lastTime; setTimestamp(eventBuffer,getTimestamp()); if (needsToSendTimestampMessage) { String eventString = "Software time: " + String(getTimestamp(true)) + "@" + String(Time::getHighResolutionTicksPerSecond()) + "Hz"; CharPointer_UTF8 data = eventString.toUTF8(); addEvent(eventBuffer, MESSAGE, 0, 0, 0, data.length() + 1, //It doesn't hurt to send the end-string null and can help avoid issues (uint8*)data.getAddress()); needsToSendTimestampMessage = false; } if (newEventAvailable) { //int numBytes = 0; String eventString = messageCenterEditor->getLabelString(); CharPointer_UTF8 data = eventString.toUTF8(); addEvent(eventBuffer, MESSAGE, 0, 0, 0, data.length()+1, //It doesn't hurt to send the end-string null and can help avoid issues (uint8*) data.getAddress()); newEventAvailable = false; } }
void H264VideoRTPSink::doSpecialFrameHandling(unsigned /*fragmentationOffset*/, unsigned char* /*frameStart*/, unsigned /*numBytesInFrame*/, struct timeval frameTimestamp, unsigned /*numRemainingBytes*/) { // Set the RTP 'M' (marker) bit iff // 1/ The most recently delivered fragment was the end of (or the only fragment of) an NAL unit, and // 2/ This NAL unit was the last NAL unit of an 'access unit' (i.e. video frame). if (fOurFragmenter != NULL) { H264VideoStreamFramer* framerSource = (H264VideoStreamFramer*)(fOurFragmenter->inputSource()); // This relies on our fragmenter's source being a "H264VideoStreamFramer". if (fOurFragmenter->lastFragmentCompletedNALUnit() && framerSource != NULL && framerSource->pictureEndMarker()) { setMarkerBit(); framerSource->pictureEndMarker() = False; } } setTimestamp(frameTimestamp); }
//SET METHODS void Timestamp::setYear(int year) { int month = getMonth(); int newDate = getDate(); //윤달에 29일인데 addYear을 하려는 경우 if ( month == 2 && newDate == 29 //현재가 윤달 29일 && !( year == 4 && !((year % 100 == 0 && year % 400 != 0)) ) //바꿀 연도의 2월은 윤달이 아님 ) newDate = 28; setTimestamp(year, month, newDate, getHour(), getMinute(), getSecond()); };
Timestamp::Timestamp(const string &str) { init(); string val = StringUtil::trim(str); int year, month, date; int hour = 0, min = 0, sec = 0; string ymd; //시분초까지 있을 때 if (val.find(" ") != string::npos) { ymd = move(StringUtil::between(val, "", " ")); string &hms = StringUtil::between(val, " "); vector<string> &hmsVec = StringUtil::split(hms, ":"); hour = stoi(hmsVec.at(0)); min = stoi(hmsVec.at(1)); sec = stoi(hmsVec.at(2)); } else ymd = move(val); //년월일 설정 vector<string> &ymdVec = StringUtil::split(ymd, "-"); year = stoi(ymdVec.at(0)); month = stoi(ymdVec.at(1)); date = stoi(ymdVec.at(2)); try { setTimestamp(year, month, date, hour, min, sec); } catch (invalid_argument &error) { throw error; } };
RTC::ReturnCode_t EchoCanceler::onExecute(RTC::UniqueId ec_id) { RTC_DEBUG(("onExecute start")); if((m_indata.size() > BUFFER_MAX) || (m_outdata.size() > BUFFER_MAX)) { RTC_INFO(("One of buffers exceeded the maximum value. Start clear buffers.")); BufferClr(); } if (( m_indata.size() >= ECHOLEN) && (m_outdata.size() >= ECHOLEN)) { m_mutex.lock(); RTC_DEBUG(("onExecute:mutex lock")); int i; short *inbuffer = new short[ECHOLEN]; short *outbuffer = new short[ECHOLEN]; short *result = new short[ECHOLEN]; for ( i = 0; i < ECHOLEN; i++ ) { inbuffer[i] = m_indata.front(); m_indata.pop_front(); outbuffer[i] = m_outdata.front(); m_outdata.pop_front(); result[i] = 0; } m_mutex.unlock(); RTC_DEBUG(("onExecute:mutex unlock")); speex_echo_cancellation(mp_sest, inbuffer, outbuffer, result); delete[] inbuffer; delete[] outbuffer; m_fout.data.length(ECHOLEN * 2); for ( i = 0; i < ECHOLEN; i++ ) { short val = result[i]; m_fout.data[i*2] = (unsigned char)(val & 0x00ff); m_fout.data[i*2+1] = (unsigned char)((val & 0xff00) >> 8); } delete[] result; setTimestamp( m_fout ); m_foutOut.write(); RTC_DEBUG(("onExecute:writing %d samples", m_fout.data.length() / 2)); } else {
void Message::toMessage(string msg) { /* local variables */ int i, len, x; string temp = ""; time_t t_temp; /* algorithm */ if (msg[0]=='4') { i = 2; len = msg.length(); //sender temp = getSubstr(msg, i, ';'); setSender(temp); temp = ""; //receiver i++; temp = getSubstr(msg, i, ';'); setReceiver(temp); temp = ""; //timestamp i++; temp = getSubstr(msg, i, ';'); istringstream buffer(temp); int x; buffer >> x; t_temp = x; setTimestamp(t_temp); temp = ""; //message body i++; temp = getSubstr(msg, i, ';'); text = temp; temp = ""; //type i++; temp = getSubstrInt(msg, i, len); type = temp; }
//----------------------------------------------------------------------------- // Function: StickyNote::parseValuesFrom() //----------------------------------------------------------------------------- void StickyNote::parseValuesFrom(QDomNode const &node) { int childCount = node.childNodes().count(); for (int i = 0; i < childCount; ++i) { QDomNode childNode = node.childNodes().at(i); if (childNode.nodeName() == positionExtension_->type()) { QPointF position = XmlUtils::parsePoint(childNode); positionExtension_->setPosition(position); setPos(position); } else if (childNode.nodeName() == contentExtension_->type()) { QString content = childNode.childNodes().at(0).nodeValue(); setText(content); } else if (childNode.nodeName() == associationExtensions_->type()) { int associationCount = childNode.childNodes().count(); for(int j = 0; j < associationCount; j++) { QDomNode assocationNode = childNode.childNodes().at(j); QPointF position = XmlUtils::parsePoint(assocationNode); QSharedPointer<Kactus2Position> associationEnd(new Kactus2Position(position)); associationExtensions_->addToGroup(associationEnd); } } else if (childNode.nodeName() == timestampExtension_->type()) { QString timestamp = childNode.childNodes().at(0).nodeValue(); setTimestamp(timestamp); } } }
void YubikoOtpKeyConfig::load() { BOOST_LOG_NAMED_SCOPE("YubikoOtpKeyConfig::load"); const string myInFile = checkFileName(false); ptree myTree; read_json(myInFile, myTree); const string myVer(myTree.get<string>(K_NM_DOC_VERS)); BOOST_LOG_TRIVIAL(info)<< K_NM_VERS << ":" << myVer; setPrivateId(myTree.get<string>(K_NM_DOC_PRIV_ID)); setPublicId(myTree.get<string>(K_NM_DOC_PUB_ID)); setSecretKey(myTree.get<string>(K_NM_DOC_SEC_KEY)); setTimestamp(UTimestamp(myTree.get<uint64_t>(K_NM_DOC_TIMESTAMP))); setCounter(myTree.get<uint8_t>(K_NM_DOC_SES_CNTR)); setCrc(myTree.get<uint16_t>(K_NM_DOC_CRC)); setRandom(myTree.get<uint16_t>(K_NM_DOC_RANDOM)); setUseCounter(myTree.get<uint8_t>(K_NM_DOC_USE_CNTR)); setDescription(myTree.get<string>(K_NM_DOC_DESC)); if (myVer != "0.0.1") { const string mySysUser { myTree.get<string>(K_NM_DOC_SYS_USER) }; if (!mySysUser.empty()) setSysUser(mySysUser); } itsChangedFlag = false; }
//----------------------------------------------------------------------------- // Function: StickyNote::StickyNote() //----------------------------------------------------------------------------- StickyNote::StickyNote(QGraphicsItem* parent): QGraphicsItemGroup(parent), Associable(), oldPos_(), extension_(), positionExtension_(), contentExtension_(), associationExtensions_(), timestampExtension_(), textArea_(0), timeLabel_(0), associationButton_(0) { initializeExtensions(); setItemOptions(); createGluedEdge(); createWritableArea(); createAssociationButton(); setTimestamp(getFormattedTimestamp()); connect(textArea_, SIGNAL(contentChanged()), this, SLOT(onTextEdited()), Qt::UniqueConnection); }
void MapBlock::deSerialize(std::istream &is, u8 version, bool disk) { if(!ser_ver_supported(version)) throw VersionMismatchException("ERROR: MapBlock format not supported"); TRACESTREAM(<<"MapBlock::deSerialize "<<PP(getPos())<<std::endl); m_day_night_differs_expired = false; if(version <= 21) { deSerialize_pre22(is, version, disk); return; } u8 flags = readU8(is); is_underground = (flags & 0x01) ? true : false; m_day_night_differs = (flags & 0x02) ? true : false; m_lighting_expired = (flags & 0x04) ? true : false; m_generated = (flags & 0x08) ? false : true; /* Bulk node data */ TRACESTREAM(<<"MapBlock::deSerialize "<<PP(getPos()) <<": Bulk node data"<<std::endl); u32 nodecount = MAP_BLOCKSIZE*MAP_BLOCKSIZE*MAP_BLOCKSIZE; u8 content_width = readU8(is); u8 params_width = readU8(is); if(content_width != 1 && content_width != 2) throw SerializationError("MapBlock::deSerialize(): invalid content_width"); if(params_width != 2) throw SerializationError("MapBlock::deSerialize(): invalid params_width"); MapNode::deSerializeBulk(is, version, data, nodecount, content_width, params_width, true); /* NodeMetadata */ TRACESTREAM(<<"MapBlock::deSerialize "<<PP(getPos()) <<": Node metadata"<<std::endl); // Ignore errors try{ std::ostringstream oss(std::ios_base::binary); decompressZlib(is, oss); std::istringstream iss(oss.str(), std::ios_base::binary); if(version >= 23) m_node_metadata.deSerialize(iss, m_gamedef); else content_nodemeta_deserialize_legacy(iss, &m_node_metadata, &m_node_timers, m_gamedef); } catch(SerializationError &e) { errorstream<<"WARNING: MapBlock::deSerialize(): Ignoring an error" <<" while deserializing node metadata at (" <<PP(getPos())<<": "<<e.what()<<std::endl; } /* Data that is only on disk */ if(disk) { // Node timers if(version == 23){ // Read unused zero readU8(is); } else if(version >= 24){ TRACESTREAM(<<"MapBlock::deSerialize "<<PP(getPos()) <<": Node timers"<<std::endl); m_node_timers.deSerialize(is); } // Static objects TRACESTREAM(<<"MapBlock::deSerialize "<<PP(getPos()) <<": Static objects"<<std::endl); m_static_objects.deSerialize(is); // Timestamp TRACESTREAM(<<"MapBlock::deSerialize "<<PP(getPos()) <<": Timestamp"<<std::endl); setTimestamp(readU32(is)); m_disk_timestamp = m_timestamp; // Dynamically re-set ids based on node names TRACESTREAM(<<"MapBlock::deSerialize "<<PP(getPos()) <<": NameIdMapping"<<std::endl); NameIdMapping nimap; nimap.deSerialize(is); correctBlockNodeIds(&nimap, data, m_gamedef); }
void FileReader::process (AudioSampleBuffer& buffer, MidiBuffer& events) { setTimestamp (events, timestamp); const int samplesNeeded = int (float (buffer.getNumSamples()) * (getDefaultSampleRate() / 44100.0f)); // FIXME: needs to account for the fact that the ratio might not be an exact // integer value int samplesRead = 0; while (samplesRead < samplesNeeded) { int samplesToRead = samplesNeeded - samplesRead; if ( (currentSample + samplesToRead) > stopSample) { samplesToRead = stopSample - currentSample; if (samplesToRead > 0) input->readData (readBuffer + samplesRead, samplesToRead); input->seekTo (startSample); currentSample = startSample; } else { input->readData (readBuffer + samplesRead, samplesToRead); currentSample += samplesToRead; } samplesRead += samplesToRead; } for (int i = 0; i < currentNumChannels; ++i) { input->processChannelData (readBuffer, buffer.getWritePointer (i, 0), i, samplesNeeded); } timestamp += samplesNeeded; setNumSamples (events, samplesNeeded); // code for testing events: // // =========================================================================== // if (counter == 100) // { // //std::cout << "Adding on event for node id: " << nodeId << std::endl; // addEvent (events, // MidiBuffer // TTL, // eventType // 0, // sampleNum // 1, // eventID // 0); // eventChannel // ++counter; // } // else if (counter > 120) // { // //std::cout << "Adding off event!" << std::endl; // addEvent (events, // MidiBuffer // TTL, // eventType // 0, // sampleNum // 0, // eventID // 0); // eventChannel // counter = 0; // } // else // { // ++counter; // } // // =========================================================================== }
RTC::ReturnCode_t PresentationLPInterface::onExecute(RTC::UniqueId ec_id) { int i,j,res; int channels; cv::Mat image; cv::Point ptImg; //laser point position in the image coordinates cv::Point ptSc; //laser point position in the screen coordinates double intrinsic[3][3]; //camera intrinsic parameter double extrinsic[3][4]; //camera extrinsic parameter bool continuous; bool is_change_area = false; cv::Rect change_area[3]; int slide_width, slide_height; //Set background update flag if new update trigger comes if(m_BgUpdateTriggerIn.isNew()){ m_BgUpdateTriggerIn.read(); bg_update = true; } //New image data if(m_ImageIn.isNew()){ m_ImageIn.read(); //--- Convert ImageData to cv::Mat --- if(m_Image.data.image.format == Img::CF_RGB){ channels = 3; image.create(m_Image.data.image.height, m_Image.data.image.width, CV_8UC3); }else if(m_Image.data.image.format == Img::CF_GRAY){ channels = 1; image.create(m_Image.data.image.height, m_Image.data.image.width, CV_8UC1); }else{ std::cerr << "Invalid data: Image" << std::endl; return RTC::RTC_OK; } for(i=0; i<m_Image.data.image.height; ++i){ std::memcpy(&image.data[i*image.step], &m_Image.data.image.raw_data[i*m_Image.data.image.width*channels], sizeof(unsigned char)*m_Image.data.image.width*channels); } if(channels==3){ cv::cvtColor(image, image, CV_RGB2BGR); } // --- End convert ImageData to cv::Mat --- // --- Initialize backgournd image if necessary if(bg_update || lpd.backgroundEmpty()){ lpd.setBackgroundImage(image); bg_update = false; return RTC::RTC_OK; } // --- End initialize backgournd image if necessary --- // --- Main processing --- // *** set parameters lpd.setThreshold(m_DetectLuminanceDiff, m_DetectBgUpdate); // *** end set parameters //detection // *** read reconstruction parameters //read intrinsic parameters intrinsic[0][0] = m_Image.data.intrinsic.matrix_element[0]; intrinsic[0][1] = m_Image.data.intrinsic.matrix_element[1]; intrinsic[0][2] = m_Image.data.intrinsic.matrix_element[3]; intrinsic[1][0] = 0; intrinsic[1][1] = m_Image.data.intrinsic.matrix_element[2]; intrinsic[1][2] = m_Image.data.intrinsic.matrix_element[4]; intrinsic[2][0] = 0; intrinsic[2][1] = 0; intrinsic[2][2] = 1; //read extrinsic parameters for(i=0;i<3;i++){ for(j=0;j<4;j++){ extrinsic[i][j] = m_Image.data.extrinsic[i][j]; } } // *** end read reconstruction parameters //get slide width and height try{ slide_width = m_SlideSizeInfo->getWidth(); slide_height = m_SlideSizeInfo->getHeight(); }catch(CORBA::SystemException &e){ std::cerr << "ServicePort not connected." << std::endl; return RTC::RTC_ERROR; } if(slide_width<=0 || slide_height<=0){ std::cerr << "Invalid data form ServicePort: width=" << slide_width << ", height=" << slide_height << std::endl; return RTC::RTC_OK; } //res = lpd.detect(image, ptImg); res = lpd.detect(image, ptSc, cv::Point(0,0), cv::Point(slide_width,slide_height), m_ScreenZ, intrinsic, extrinsic); if(res==1){ //detected //coordinate transformation //ptSc = lpd.img_to_worldZ(ptImg, m_ScreenZ, intrinsic, extrinsic); //std::cout << "Im:" << ptImg.x << ',' << ptImg.y << "-> Sc:" << ptSc.x << ',' << ptSc.y << std::endl; std::cout << "-> Sc:" << ptSc.x << ',' << ptSc.y << std::endl; // *** generate commands for presentation component //check detected time setTimestamp(m_DrawPositions); //get current time if((m_DrawPositions.tm.nsec-time_old.nsec)/1.0e9 + (m_DrawPositions.tm.sec-time_old.sec) < m_DetectUpdateTime){ continuous = true; }else{ continuous = false; } //set slide change region based on the slide width and height if(m_SlideChangeArea=="top"||m_SlideChangeArea=="bottom"){ for(i=0;i<3;i++){ change_area[i].x = slide_width*i/3; change_area[i].y = 0; change_area[i].width = slide_width/3; change_area[i].height = m_SlideChangeAreaSize; } if(m_SlideChangeArea=="bottom"){ for(i=0;i<3;i++){ change_area[i].y = slide_height-m_SlideChangeAreaSize; //bottom } } }else if(m_SlideChangeArea=="left"||m_SlideChangeArea=="right"){ for(i=0;i<3;i++){ change_area[i].x = 0; //left change_area[i].y = slide_height*i/3; change_area[i].width = m_SlideChangeAreaSize; change_area[i].height = slide_height/3; } if(m_SlideChangeArea=="right"){ for(i=0;i<3;i++){ change_area[i].x = slide_width-m_SlideChangeAreaSize; //right } } } //chack if the detected point is on the region for(i=0;i<3;i++){ if(change_area[i].contains(ptSc)){ is_change_area = true; if((area_index == i) && (continuous)){ area_count++; }else{ area_count = 1; area_index = i; } if(area_count >= m_SlideChangeCount){ //output slide change command m_SlideRelativeCommand.data = area_index -1; setTimestamp(m_SlideRelativeCommand); m_SlideRelativeCommandOut.write(); //reset counter and position ptSc_old.x = -1; ptSc_old.y = -1; time_old.sec = 0; time_old.nsec = 0; area_index = -1; area_count = 0; cv::waitKey(m_SlideChangeWait*1000); return RTC::RTC_OK; //no need to send draw positions since the slide will be changed } break; } } if(!is_change_area){ //if not detected in slide change area - reset counter area_index = -1; area_count = 0; } // *** end generate commands for presentation component // *** output //output drawing positions if(!continuous || ptSc_old.x < 0){ //we do not have old point - output current point m_DrawPositions.data.length(2); m_DrawPositions.data[0] = ptSc.x; m_DrawPositions.data[1] = ptSc.y; }else{ //we have old point - output both previous and current points m_DrawPositions.data.length(4); m_DrawPositions.data[0] = ptSc_old.x; m_DrawPositions.data[1] = ptSc_old.y; m_DrawPositions.data[2] = ptSc.x; m_DrawPositions.data[3] = ptSc.y; } setTimestamp(m_DrawPositions); m_DrawPositionsOut.write(); // *** end output //store current position as old data for next step time_old = m_DrawPositions.tm; ptSc_old = ptSc; }//end if(res==1) // --- End main processing --- } //end if(m_ImageIn.isNew()) lpd.setBackgroundImage(image); cv::waitKey(30); return RTC::RTC_OK; }
Round::Registry::Registry() { setTimestamp(0); setLogicalTimestamp(0); }