// This version removes an individual RepoId (value) from the RepoIdSet // associated with the "key" RepoId in our map_. int OpenDDS::DCPS::RepoIdSetMap::remove(RepoId key,RepoId value) { DBG_ENTRY_LVL("RepoIdSetMap","remove",6); RepoIdSet_rch id_set; int result = OpenDDS::DCPS::find(map_, key, id_set); if (result != 0) { // We couldn't find the id_set for the supplied key. GuidConverter converter(key); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: RepoIdSetMap::remove: ") ACE_TEXT("unable to locate RepoIdSet for key %C.\n"), OPENDDS_STRING(converter).c_str()),-1); } // Now we can attempt to remove the value RepoId from the id_set. result = id_set->remove_id(value); if (result != 0) { // We couldn't find the supplied RepoId value as a member of the id_set. GuidConverter key_converter(key); GuidConverter value_converter(value); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: RepoIdSetMap::remove: ") ACE_TEXT("RepoIdSet for key %C does not contain ") ACE_TEXT("value %C.\n"), OPENDDS_STRING(key_converter).c_str(), OPENDDS_STRING(value_converter).c_str()),-1); } return 0; }
bool ReceiveListenerSet::exist(const RepoId& local_id, bool& last) { GuardType guard(this->lock_); last = true; TransportReceiveListener_rch listener; if (find(map_, local_id, listener) == -1) { GuidConverter converter(local_id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ReceiveListenerSet::exist: ") ACE_TEXT("could not find local %C.\n"), OPENDDS_STRING(converter).c_str())); return false; } if (!listener) { GuidConverter converter(local_id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ReceiveListenerSet::exist: ") ACE_TEXT("listener for local %C is nil.\n"), OPENDDS_STRING(converter).c_str())); return false; } last = map_.size() == 1; return true; }
WriterInfo::WriterInfo(WriterInfoListener* reader, const PublicationId& writer_id, const ::DDS::DataWriterQos& writer_qos) : last_liveliness_activity_time_(ACE_OS::gettimeofday()), seen_data_(false), historic_samples_timer_(NO_TIMER), remove_association_timer_(NO_TIMER), removal_deadline_(ACE_Time_Value::zero), last_historic_seq_(SequenceNumber::SEQUENCENUMBER_UNKNOWN()), waiting_for_end_historic_samples_(false), scheduled_for_removal_(false), notify_lost_(false), state_(NOT_SET), reader_(reader), writer_id_(writer_id), writer_qos_(writer_qos), handle_(DDS::HANDLE_NIL) { #ifndef OPENDDS_NO_OBJECT_MODEL_PROFILE this->reset_coherent_info(); #endif if (DCPS_debug_level >= 5) { GuidConverter writer_converter(writer_id); GuidConverter reader_converter(reader->subscription_id_); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) WriterInfo::WriterInfo: ") ACE_TEXT("writer %C added to reader %C.\n"), OPENDDS_STRING(writer_converter).c_str(), OPENDDS_STRING(reader_converter).c_str())); } }
/// This gets invoked when a TransportClient::remove_associations() /// call has been made. Because this DataLink can be shared amongst /// different TransportClient objects, and different threads could /// be "managing" the different TransportClient objects, we need /// to make sure that this release_reservations() works in conjunction /// with a simultaneous call (in another thread) to one of this /// DataLink's make_reservation() methods. void DataLink::release_reservations(RepoId remote_id, RepoId local_id, DataLinkSetMap& released_locals) { DBG_ENTRY_LVL("DataLink", "release_reservations", 6); if (DCPS_debug_level > 9) { GuidConverter local(local_id); GuidConverter remote(remote_id); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::release_reservations() - ") ACE_TEXT("releasing association local: %C ") ACE_TEXT("<--> with remote %C.\n"), OPENDDS_STRING(local).c_str(), OPENDDS_STRING(remote).c_str())); } //let the specific class release its reservations //done this way to prevent deadlock of holding pub_sub_maps_lock_ //then obtaining a specific class lock in release_reservations_i //which reverses lock ordering of the active send logic of needing //the specific class lock before obtaining the over arching DataLink //pub_sub_maps_lock_ this->release_reservations_i(remote_id, local_id); GuardType guard(this->pub_sub_maps_lock_); ReceiveListenerSet_rch& rls = assoc_by_remote_[remote_id]; if (rls->size() == 1) { assoc_by_remote_.erase(remote_id); release_remote_i(remote_id); } else { rls->remove(local_id); } RepoIdSet& ris = assoc_by_local_[local_id]; if (ris.size() == 1) { DataLinkSet_rch& links = released_locals[local_id]; if (links.is_nil()) links = new DataLinkSet; links->insert_link(this); { GuardType guard(this->released_assoc_by_local_lock_); released_assoc_by_local_[local_id].insert(remote_id); } assoc_by_local_.erase(local_id); } else { ris.erase(remote_id); } if (assoc_by_local_.empty()) { VDBG_LVL((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::release_reservations: ") ACE_TEXT("release_datalink due to no remaining pubs or subs.\n")), 5); this->impl_->release_datalink(this); } }
int OpenDDS::DCPS::RepoIdSetMap::insert(RepoId key, RepoId value) { DBG_ENTRY_LVL("RepoIdSetMap","insert",6); RepoIdSet_rch id_set = this->find_or_create(key); if (id_set.is_nil()) { // find_or_create failure GuidConverter converter(key); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: RepoIdSetMap::insert: ") ACE_TEXT("failed to find_or_create RepoIdSet ") ACE_TEXT("for RepoId %C.\n"), OPENDDS_STRING(converter).c_str()),-1); } int result = id_set->insert_id(value, key); if (result == -1) { GuidConverter value_converter(value); GuidConverter key_converter(key); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: RepoIdSetMap::insert: ") ACE_TEXT("failed to insert RepoId %C ") ACE_TEXT("into RepoIdSet for RepoId %C.\n"), OPENDDS_STRING(value_converter).c_str(), OPENDDS_STRING(key_converter).c_str())); } else { // It could be already bound, but we accept it since the subscriber // could send the acks for the same id multiple times. // Success. Leave now. return 0; } // Deal with possibility that the id_set just got created - just for us. // If so, we need to "undo" the creation. if (id_set->size() == 0) { if (unbind(map_, key) != 0) { GuidConverter converter(key); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: RepoIdSetMap::insert: ") ACE_TEXT("failed to unbind (undo create) an empty ") ACE_TEXT("RepoIdSet for RepoId %C.\n"), OPENDDS_STRING(converter).c_str())); } } return -1; }
int OpenDDS::DCPS::ReceiveListenerSetMap::insert (RepoId publisher_id, RepoId subscriber_id, TransportReceiveListener* receive_listener) { DBG_ENTRY_LVL("ReceiveListenerSetMap","insert",6); ReceiveListenerSet_rch listener_set = this->find_or_create(publisher_id); if (listener_set.is_nil()) { // find_or_create failure GuidConverter converter(publisher_id); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ReceiveListenerSetMap::insert: ") ACE_TEXT("failed to find_or_create entry for ") ACE_TEXT("publisher %C.\n"), OPENDDS_STRING(converter).c_str()), -1); } int result = listener_set->insert(subscriber_id, receive_listener); if (result == 0 || result == 1) { return 0; } GuidConverter sub_converter(subscriber_id); GuidConverter pub_converter(publisher_id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ReceiveListenerSetMap::insert: ") ACE_TEXT("failed to insert subscriber %C for ") ACE_TEXT("publisher %C.\n"), OPENDDS_STRING(sub_converter).c_str(), OPENDDS_STRING(pub_converter).c_str())); // Deal with possibility that the listener_set just got // created - and just for us. This is to make sure we don't leave any // empty ReceiveListenerSets in our map_. if (listener_set->size() == 0) { listener_set = this->remove_set(publisher_id); if (listener_set.is_nil()) { ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ReceiveListenerSetMap::insert: ") ACE_TEXT("failed to remove (undo create) ReceiveListenerSet ") ACE_TEXT("for publisher %C.\n"), OPENDDS_STRING(pub_converter).c_str())); } } return -1; }
void RecorderImpl::remove_associations(const WriterIdSeq& writers, bool notify_lost) { DBG_ENTRY_LVL("RecorderImpl", "remove_associations", 6); if (writers.length() == 0) { return; } if (DCPS_debug_level >= 1) { GuidConverter reader_converter(subscription_id_); GuidConverter writer_converter(writers[0]); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) RecorderImpl::remove_associations: ") ACE_TEXT("bit %d local %C remote %C num remotes %d \n"), is_bit_, OPENDDS_STRING(reader_converter).c_str(), OPENDDS_STRING(writer_converter).c_str(), writers.length())); } if (!this->entity_deleted_.value()) { // stop pending associations for these writer ids this->stop_associating(writers.get_buffer(), writers.length()); // writers which are considered non-active and can // be removed immediately WriterIdSeq non_active_writers; { CORBA::ULong wr_len = writers.length(); ACE_WRITE_GUARD(ACE_RW_Thread_Mutex, write_guard, this->writers_lock_); for (CORBA::ULong i = 0; i < wr_len; i++) { PublicationId writer_id = writers[i]; WriterMapType::iterator it = this->writers_.find(writer_id); if (it != this->writers_.end() && it->second->active(TheServiceParticipant->pending_timeout())) { remove_association_sweeper_->schedule_timer(it->second, notify_lost); } else { push_back(non_active_writers, writer_id); } } } remove_associations_i(non_active_writers, notify_lost); } else { remove_associations_i(writers, notify_lost); } }
void SingleSendBuffer::retain_all(RepoId pub_id) { if (Transport_debug_level > 5) { GuidConverter converter(pub_id); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) SingleSendBuffer::retain_all() - ") ACE_TEXT("copying out blocks for publication: %C\n"), OPENDDS_STRING(converter).c_str() )); } for (BufferMap::iterator it(this->buffers_.begin()); it != this->buffers_.end();) { if (it->second.first && it->second.second) { if (retain_buffer(pub_id, it->second) == REMOVE_ERROR) { GuidConverter converter(pub_id); ACE_ERROR((LM_WARNING, ACE_TEXT("(%P|%t) WARNING: ") ACE_TEXT("SingleSendBuffer::retain_all: ") ACE_TEXT("failed to retain data from publication: %C!\n"), OPENDDS_STRING(converter).c_str())); release(it++); } else { ++it; } } else { const FragmentMap::iterator fm_it = fragments_.find(it->first); if (fm_it != fragments_.end()) { for (BufferMap::iterator bm_it = fm_it->second.begin(); bm_it != fm_it->second.end();) { if (retain_buffer(pub_id, bm_it->second) == REMOVE_ERROR) { GuidConverter converter(pub_id); ACE_ERROR((LM_WARNING, ACE_TEXT("(%P|%t) WARNING: ") ACE_TEXT("SingleSendBuffer::retain_all: failed to ") ACE_TEXT("retain fragment data from publication: %C!\n"), OPENDDS_STRING(converter).c_str())); release(bm_it++); } else { ++bm_it; } } } ++it; } } }
std::ostream& Subscriber::rawData( std::ostream& str) const { // Configure the raw data gathering and extract the raw latency data // container. OpenDDS::DCPS::DataReaderImpl* readerImpl = dynamic_cast< OpenDDS::DCPS::DataReaderImpl*>( this->reader_.in()); if( readerImpl == 0) { ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: Subscriber::Subscriber() - ") ACE_TEXT("failed to derive reader implementation.\n") )); throw BadReaderException(); } int index = 0; for( OpenDDS::DCPS::DataReaderImpl::StatsMapType::const_iterator current = readerImpl->raw_latency_statistics().begin(); current != readerImpl->raw_latency_statistics().end(); ++current, ++index) { OpenDDS::DCPS::GuidConverter converter(current->first); str << std::endl << " Writer[ " << OPENDDS_STRING(converter) << "]" << std::endl; #ifndef OPENDDS_SAFETY_PROFILE current->second.raw_data( str); #endif //OPENDDS_SAFETY_PROFILE } return str; }
DDS::ReturnCode_t PublisherImpl::writer_enabled(const char* topic_name, DataWriterImpl* writer) { ACE_GUARD_RETURN(ACE_Recursive_Thread_Mutex, guard, this->pi_lock_, DDS::RETCODE_ERROR); DDS::DataWriter_var writer_var = DDS::DataWriter::_duplicate(writer); writers_not_enabled_.erase(writer_var); datawriter_map_.insert(DataWriterMap::value_type(topic_name, writer)); const RepoId publication_id = writer->get_publication_id(); std::pair<PublicationMap::iterator, bool> pair = publication_map_.insert(PublicationMap::value_type(publication_id, writer)); if (pair.second == false) { GuidConverter converter(publication_id); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ") ACE_TEXT("PublisherImpl::writer_enabled: ") ACE_TEXT("insert publication %C failed.\n"), OPENDDS_STRING(converter).c_str()), DDS::RETCODE_ERROR); } // Increase ref count when the servant is added to the // datawriter/publication map. writer->_add_ref(); return DDS::RETCODE_OK; }
bool RecorderImpl::lookup_instance_handles(const WriterIdSeq& ids, DDS::InstanceHandleSeq & hdls) { if (DCPS_debug_level > 9) { CORBA::ULong const size = ids.length(); OPENDDS_STRING separator = ""; OPENDDS_STRING buffer; for (unsigned long i = 0; i < size; ++i) { buffer += separator + OPENDDS_STRING(GuidConverter(ids[i])); separator = ", "; } ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) RecorderImpl::lookup_instance_handles: ") ACE_TEXT("searching for handles for writer Ids: %C.\n"), buffer.c_str())); } CORBA::ULong const num_wrts = ids.length(); hdls.length(num_wrts); for (CORBA::ULong i = 0; i < num_wrts; ++i) { hdls[i] = this->participant_servant_->get_handle(ids[i]); } return true; }
int OpenDDS::DCPS::ReceiveListenerSetMap::remove(RepoId publisher_id, RepoId subscriber_id) { DBG_ENTRY_LVL("ReceiveListenerSetMap","remove",6); ReceiveListenerSet_rch listener_set; if (OpenDDS::DCPS::find(map_, publisher_id, listener_set) != 0) { return 0; } int result = listener_set->remove(subscriber_id); // Ignore the result ACE_UNUSED_ARG(result); if (listener_set->size() == 0) { if (unbind(map_, publisher_id) != 0) { GuidConverter converter(publisher_id); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ReceiveListenerSetMap::remove: ") ACE_TEXT("failed to remove empty ReceiveListenerSet for ") ACE_TEXT("publisher %C.\n"), OPENDDS_STRING(converter).c_str()), -1); } } return 0; }
int OpenDDS::DCPS::RepoIdSetMap::release_publisher(RepoId subscriber_id, RepoId publisher_id) { DBG_ENTRY_LVL("RepoIdSetMap","release_publisher",6); RepoIdSet_rch id_set; if (OpenDDS::DCPS::find(map_, subscriber_id, id_set) != 0) { GuidConverter converter(subscriber_id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: RepoIdSetMap::release_publisher: ") ACE_TEXT("subscriber_id %C not found in map.\n"), OPENDDS_STRING(converter).c_str())); // Return 1 to indicate that the subscriber_id is no longer associated // with any publishers at all. return 1; } int result = id_set->remove_id(publisher_id); // Ignore the result ACE_UNUSED_ARG(result); VDBG_LVL((LM_DEBUG, "(%P|%t) RepoId size: %d.\n", id_set->size()), 5); // Return 1 if set is empty, 0 if not empty. //return (id_set->size() == 0) ? 1 : 0; if (id_set->size() == 0) { if (unbind(map_, subscriber_id) != 0) { GuidConverter converter(publisher_id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: RepoIdSetMap::release_publisher: ") ACE_TEXT("failed to remove an empty ") ACE_TEXT("ReceiveListenerSet for publisher_id %C.\n"), OPENDDS_STRING(converter).c_str())); } // We always return 1 if we know the publisher_id is no longer // associated with any ReceiveListeners. return 1; } // There are still ReceiveListeners associated with the publisher_id. // We return a 0 in this case. return 0; }
void WriterInfo::set_group_info(const CoherentChangeControl& info) { if (!(this->publisher_id_ == info.publisher_id_) || this->group_coherent_ != info.group_coherent_) { GuidConverter sub_id(this->reader_->subscription_id_); GuidConverter pub_id(this->writer_id_); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: WriterInfo::set_group_info()") ACE_TEXT(" reader %C writer %C incorrect coherent info !\n"), OPENDDS_STRING(sub_id).c_str(), OPENDDS_STRING(pub_id).c_str())); } this->writer_coherent_samples_ = info.coherent_samples_; this->group_coherent_samples_ = info.group_coherent_samples_; }
OPENDDS_STRING to_dds_string(unsigned int to_convert, bool as_hex) { const char* fmt; if (as_hex) { fmt = "%02x"; const int buff_size = 3; // note +1 for null terminator char buf[buff_size]; ACE_OS::snprintf(&buf[0], buff_size, fmt, to_convert); return OPENDDS_STRING(buf); } else { fmt = "%u"; const int buff_size = 20 + 1; // note +1 for null terminator char buf[buff_size]; ACE_OS::snprintf(&buf[0], buff_size, fmt, to_convert); return OPENDDS_STRING(buf); } }
OPENDDS_STRING to_dds_string(::CORBA::UShort to_convert) { const char* fmt = "%hu"; const int buff_size = 5 + 1; // note +1 for null terminator char buf[buff_size]; ACE_OS::snprintf(&buf[0], buff_size, fmt, to_convert); return OPENDDS_STRING(buf); }
OPENDDS_STRING to_dds_string(long long to_convert) { const char* fmt = "%lld"; const int buff_size = 20 + 1; // note +1 for null terminator char buf[buff_size]; ACE_OS::snprintf(&buf[0], buff_size, fmt, to_convert); return OPENDDS_STRING(buf); }
OPENDDS_STRING TransportImpl::dump_to_str() { if (this->config_.is_nil()) { return OPENDDS_STRING(" (not configured)\n"); } else { return this->config_->dump_to_str(); } }
/// This method is called when the (remote) subscriber is being /// released. This method will return a 0 if the subscriber_id is /// successfully disassociated with the publisher_id *and* there /// are still other subscribers associated with the publisher_id. /// This method will return 1 if, after the disassociation, the /// publisher_id is no longer associated with any subscribers (which /// also means it's element was removed from our map_). int OpenDDS::DCPS::ReceiveListenerSetMap::release_subscriber(RepoId publisher_id, RepoId subscriber_id) { DBG_ENTRY_LVL("ReceiveListenerSetMap","release_subscriber",6); ReceiveListenerSet_rch listener_set; if (OpenDDS::DCPS::find(map_, publisher_id, listener_set) != 0) { GuidConverter converter(publisher_id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ReciveListenerSetMap::release_subscriber: ") ACE_TEXT("publisher %C not found in map_.\n"), OPENDDS_STRING(converter).c_str())); // Return 1 to indicate that the publisher_id is no longer associated // with any subscribers at all. return 1; } int result = listener_set->remove(subscriber_id); // Ignore the result ACE_UNUSED_ARG(result); if (listener_set->size() == 0) { if (unbind(map_, publisher_id) != 0) { GuidConverter converter(publisher_id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ReceiveListenerSetMap::release_subscriber: ") ACE_TEXT("failed to remove empty ReceiveListenerSet for ") ACE_TEXT("publisher %C.\n"), OPENDDS_STRING(converter).c_str())); } // We always return 1 if we know the publisher_id is no longer // associated with any ReceiveListeners. return 1; } // There are still ReceiveListeners associated with the publisher_id. // We return a 0 in this case. return 0; }
int DataLink::make_reservation(const RepoId& remote_publication_id, const RepoId& local_subscription_id, TransportReceiveListener* receive_listener) { DBG_ENTRY_LVL("DataLink", "make_reservation", 6); if (DCPS_debug_level > 9) { GuidConverter local(local_subscription_id), remote(remote_publication_id); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::make_reservation() - ") ACE_TEXT("creating association local subscription %C ") ACE_TEXT("<--> with remote publication %C.\n"), OPENDDS_STRING(local).c_str(), OPENDDS_STRING(remote).c_str())); } { GuardType guard(strategy_lock_); if (!send_strategy_.is_nil()) { send_strategy_->link_released(false); } } { GuardType guard(pub_sub_maps_lock_); assoc_by_local_[local_subscription_id].insert(remote_publication_id); ReceiveListenerSet_rch& rls = assoc_by_remote_[remote_publication_id]; if (rls.is_nil()) rls = new ReceiveListenerSet; rls->insert(local_subscription_id, receive_listener); recv_listeners_[local_subscription_id] = receive_listener; } return 0; }
std::ostream& operator<<( std::ostream& str, const Test::Subscriber& value) { ::OpenDDS::DCPS::LatencyStatisticsSeq statistics; value.reader_->get_latency_stats( statistics); str << " --- statistical summary ---" << std::endl; for( unsigned long index = 0; index < statistics.length(); ++index) { OpenDDS::DCPS::GuidConverter converter(statistics[ index].publication); str << " Writer[ " << OPENDDS_STRING(converter) << "]" << std::endl; str << " samples: " << statistics[ index].n << std::endl; str << " mean: " << statistics[ index].mean << std::endl; str << " minimum: " << statistics[ index].minimum << std::endl; str << " maximum: " << statistics[ index].maximum << std::endl; str << " variance: " << statistics[ index].variance << std::endl; } return str; }
DDS::ReturnCode_t PublisherImpl::delete_contained_entities() { // mark that the entity is being deleted set_deleted(true); while (true) { PublicationId pub_id = GUID_UNKNOWN; DataWriterImpl* a_datawriter = 0; { ACE_GUARD_RETURN(ACE_Recursive_Thread_Mutex, guard, this->pi_lock_, DDS::RETCODE_ERROR); if (datawriter_map_.empty()) { break; } else { a_datawriter = datawriter_map_.begin()->second; pub_id = a_datawriter->get_publication_id(); } } const DDS::ReturnCode_t ret = delete_datawriter(a_datawriter); if (ret != DDS::RETCODE_OK) { GuidConverter converter(pub_id); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ") ACE_TEXT("PublisherImpl::") ACE_TEXT("delete_contained_entities: ") ACE_TEXT("failed to delete ") ACE_TEXT("datawriter %C.\n"), OPENDDS_STRING(converter).c_str()),ret); } } // the publisher can now start creating new publications set_deleted(false); return DDS::RETCODE_OK; }
// This version removes an entire RepoIdSet from the map_. OpenDDS::DCPS::RepoIdSet* OpenDDS::DCPS::RepoIdSetMap::remove_set(RepoId key) { DBG_ENTRY_LVL("RepoIdSetMap","remove_set",6); RepoIdSet_rch value; if (unbind(map_, key, value) != 0) { if (DCPS_debug_level > 4) { GuidConverter converter(key); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) RepeIdSetMap::remove_set: ") ACE_TEXT("RepoId %C not found in map.\n"), OPENDDS_STRING(converter).c_str())); } return 0; } return value._retn(); }
DDS::ReturnCode_t PublisherImpl::delete_datawriter(DDS::DataWriter_ptr a_datawriter) { DataWriterImpl* dw_servant = dynamic_cast<DataWriterImpl*>(a_datawriter); if (dw_servant) { // marks entity as deleted and stops future associating dw_servant->prepare_to_delete(); } if (!dw_servant) { ACE_ERROR((LM_ERROR, "(%P|%t) PublisherImpl::delete_datawriter - dynamic cast to DataWriterImpl failed\n" )); return DDS::RETCODE_ERROR; } { DDS::Publisher_var dw_publisher(dw_servant->get_publisher()); if (dw_publisher.in() != this) { RepoId id = dw_servant->get_publication_id(); GuidConverter converter(id); ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) PublisherImpl::delete_datawriter: ") ACE_TEXT("the data writer %C doesn't ") ACE_TEXT("belong to this subscriber \n"), OPENDDS_STRING(converter).c_str())); return DDS::RETCODE_PRECONDITION_NOT_MET; } } #ifndef OPENDDS_NO_PERSISTENCE_PROFILE // Trigger data to be persisted, i.e. made durable, if so // configured. This needs be called before unregister_instances // because unregister_instances may cause instance dispose. if (!dw_servant->persist_data() && DCPS_debug_level >= 2) { ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ") ACE_TEXT("PublisherImpl::delete_datawriter, ") ACE_TEXT("failed to make data durable.\n"))); } #endif // Unregister all registered instances prior to deletion. DDS::Time_t source_timestamp = time_value_to_time(ACE_OS::gettimeofday()); dw_servant->unregister_instances(source_timestamp); // Wait for any control messages to be transported during // unregistering of instances. dw_servant->wait_pending(); dw_servant->wait_control_pending(); RepoId publication_id = GUID_UNKNOWN; { ACE_GUARD_RETURN(ACE_Recursive_Thread_Mutex, guard, this->pi_lock_, DDS::RETCODE_ERROR); publication_id = dw_servant->get_publication_id(); PublicationMap::iterator it = publication_map_.find(publication_id); if (it == publication_map_.end()) { GuidConverter converter(publication_id); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ") ACE_TEXT("PublisherImpl::delete_datawriter, ") ACE_TEXT("datawriter %C not found.\n"), OPENDDS_STRING(converter).c_str()), DDS::RETCODE_ERROR); } // We can not erase the datawriter from datawriter map by the topic name // because the map might have multiple datawriters with the same topic // name. // Find the iterator to the datawriter in the datawriter map and erase // by the iterator. DataWriterMap::iterator writ; DataWriterMap::iterator the_writ = datawriter_map_.end(); for (writ = datawriter_map_.begin(); writ != datawriter_map_.end(); ++writ) { if (writ->second == it->second) { the_writ = writ; break; } } if (the_writ != datawriter_map_.end()) { datawriter_map_.erase(the_writ); } publication_map_.erase(it); // Release pi_lock_ before making call to transport layer to avoid // some deadlock situations that threads acquire locks(PublisherImpl // pi_lock_, TransportClient reservation_lock and TransportImpl // lock_) in reverse order. ACE_GUARD_RETURN(reverse_lock_type, reverse_monitor, this->reverse_pi_lock_, DDS::RETCODE_ERROR); // Wait for pending samples to drain prior to removing associations // and unregistering the publication. dw_servant->wait_pending(); // Call remove association before unregistering the datawriter // with the transport, otherwise some callbacks resulted from // remove_association may lost. dw_servant->remove_all_associations(); dw_servant->cleanup(); } // not just unregister but remove any pending writes/sends. dw_servant->unregister_all(); Discovery_rch disco = TheServiceParticipant->get_discovery(this->domain_id_); if (!disco->remove_publication( this->domain_id_, this->participant_->get_id(), publication_id)) { ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: ") ACE_TEXT("PublisherImpl::delete_datawriter, ") ACE_TEXT("publication not removed from discovery.\n")), DDS::RETCODE_ERROR); } // Decrease ref count after the servant is removed from the maps. dw_servant->_remove_ref(); participant_->remove_adjust_liveliness_timers(); return DDS::RETCODE_OK; }
DDS::ReturnCode_t PublisherImpl::set_qos(const DDS::PublisherQos & qos) { OPENDDS_NO_OBJECT_MODEL_PROFILE_COMPATIBILITY_CHECK(qos, DDS::RETCODE_UNSUPPORTED); if (Qos_Helper::valid(qos) && Qos_Helper::consistent(qos)) { if (qos_ == qos) return DDS::RETCODE_OK; // for the not changeable qos, it can be changed before enable if (!Qos_Helper::changeable(qos_, qos) && enabled_ == true) { return DDS::RETCODE_IMMUTABLE_POLICY; } else { qos_ = qos; DwIdToQosMap idToQosMap; { ACE_GUARD_RETURN(ACE_Recursive_Thread_Mutex, guard, this->pi_lock_, DDS::RETCODE_ERROR); for (PublicationMap::iterator iter = publication_map_.begin(); iter != publication_map_.end(); ++iter) { DDS::DataWriterQos qos; iter->second->get_qos(qos); RepoId id = iter->second->get_publication_id(); std::pair<DwIdToQosMap::iterator, bool> pair = idToQosMap.insert(DwIdToQosMap::value_type(id, qos)); if (pair.second == false) { GuidConverter converter(id); ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) ") ACE_TEXT("PublisherImpl::set_qos: ") ACE_TEXT("insert id %C to DwIdToQosMap ") ACE_TEXT("failed.\n"), OPENDDS_STRING(converter).c_str()), DDS::RETCODE_ERROR); } } } DwIdToQosMap::iterator iter = idToQosMap.begin(); while (iter != idToQosMap.end()) { Discovery_rch disco = TheServiceParticipant->get_discovery(this->domain_id_); const bool status = disco->update_publication_qos( participant_->get_domain_id(), participant_->get_id(), iter->first, iter->second, this->qos_); if (!status) { ACE_ERROR_RETURN((LM_ERROR, ACE_TEXT("(%P|%t) PublisherImpl::set_qos, ") ACE_TEXT("failed. \n")), DDS::RETCODE_ERROR); } ++iter; } } return DDS::RETCODE_OK; } else { return DDS::RETCODE_INCONSISTENT_POLICY; } }
void RecorderImpl::remove_associations_i(const WriterIdSeq& writers, bool notify_lost) { DBG_ENTRY_LVL("RecorderImpl", "remove_associations_i", 6); if (writers.length() == 0) { return; } if (DCPS_debug_level >= 1) { GuidConverter reader_converter(subscription_id_); GuidConverter writer_converter(writers[0]); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) RecorderImpl::remove_associations_i: ") ACE_TEXT("bit %d local %C remote %C num remotes %d \n"), is_bit_, OPENDDS_STRING(reader_converter).c_str(), OPENDDS_STRING(writer_converter).c_str(), writers.length())); } DDS::InstanceHandleSeq handles; ACE_GUARD(ACE_Recursive_Thread_Mutex, guard, this->publication_handle_lock_); // This is used to hold the list of writers which were actually // removed, which is a proper subset of the writers which were // requested to be removed. WriterIdSeq updated_writers; CORBA::ULong wr_len; //Remove the writers from writer list. If the supplied writer //is not in the cached writers list then it is already removed. //We just need remove the writers in the list that have not been //removed. { ACE_WRITE_GUARD(ACE_RW_Thread_Mutex, write_guard, this->writers_lock_); wr_len = writers.length(); for (CORBA::ULong i = 0; i < wr_len; i++) { PublicationId writer_id = writers[i]; WriterMapType::iterator it = this->writers_.find(writer_id); if (it != this->writers_.end()) { it->second->removed(); remove_association_sweeper_->cancel_timer(it->second); } if (this->writers_.erase(writer_id) == 0) { if (DCPS_debug_level >= 1) { GuidConverter converter(writer_id); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) RecorderImpl::remove_associations_i: ") ACE_TEXT("the writer local %C was already removed.\n"), OPENDDS_STRING(converter).c_str())); } } else { push_back(updated_writers, writer_id); } } } wr_len = updated_writers.length(); // Return now if the supplied writers have been removed already. if (wr_len == 0) { return; } if (!is_bit_) { // The writer should be in the id_to_handle map at this time. Note // it if it not there. if (this->lookup_instance_handles(updated_writers, handles) == false) { if (DCPS_debug_level > 4) { ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) RecorderImpl::remove_associations_i: ") ACE_TEXT("lookup_instance_handles failed.\n"))); } } for (CORBA::ULong i = 0; i < wr_len; ++i) { id_to_handle_map_.erase(updated_writers[i]); } } for (CORBA::ULong i = 0; i < updated_writers.length(); ++i) { this->disassociate(updated_writers[i]); } // Mirror the add_associations SUBSCRIPTION_MATCHED_STATUS processing. if (!this->is_bit_) { // Derive the change in the number of publications writing to this reader. int matchedPublications = static_cast<int>(this->id_to_handle_map_.size()); this->subscription_match_status_.current_count_change = matchedPublications - this->subscription_match_status_.current_count; // Only process status if the number of publications has changed. if (this->subscription_match_status_.current_count_change != 0) { this->subscription_match_status_.current_count = matchedPublications; /// Section 7.1.4.1: total_count will not decrement. /// @TODO: Reconcile this with the verbiage in section 7.1.4.1 this->subscription_match_status_.last_publication_handle = handles[ wr_len - 1]; // set_status_changed_flag(DDS::SUBSCRIPTION_MATCHED_STATUS, true); // DDS::DataReaderListener_var listener // = listener_for(DDS::SUBSCRIPTION_MATCHED_STATUS); if (listener_.in()) { listener_->on_recorder_matched( this, this->subscription_match_status_); // Client will look at it so next time it looks the change should be 0 this->subscription_match_status_.total_count_change = 0; this->subscription_match_status_.current_count_change = 0; } // notify_status_condition(); } } // If this remove_association is invoked when the InfoRepo // detects a lost writer then make a callback to notify // subscription lost. if (notify_lost) { this->notify_subscription_lost(handles); } // if (this->monitor_) { // this->monitor_->report(); // } }
void DataLink::data_received_i(ReceivedDataSample& sample, const RepoId& readerId, const RepoIdSet& incl_excl, ReceiveListenerSet::ConstrainReceiveSet constrain) { DBG_ENTRY_LVL("DataLink", "data_received_i", 6); // Which remote publication sent this message? const RepoId& publication_id = sample.header_.publication_id_; // Locate the set of TransportReceiveListeners associated with this // DataLink that are interested in hearing about any samples received // from the remote publisher_id. if (DCPS_debug_level > 9) { const GuidConverter converter(publication_id); const GuidConverter reader(readerId); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::data_received_i: ") ACE_TEXT("from publication %C received sample: %C to readerId %C (%s).\n"), OPENDDS_STRING(converter).c_str(), to_string(sample.header_).c_str(), OPENDDS_STRING(reader).c_str(), constrain == ReceiveListenerSet::SET_EXCLUDED ? "SET_EXCLUDED" : "SET_INCLUDED")); } if (Transport_debug_level > 9) { const GuidConverter converter(publication_id); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::data_received_i: ") ACE_TEXT("from publication %C received sample: %C.\n"), OPENDDS_STRING(converter).c_str(), to_string(sample.header_).c_str())); } ReceiveListenerSet_rch listener_set; { GuardType guard(this->pub_sub_maps_lock_); AssocByRemote::iterator iter = assoc_by_remote_.find(publication_id); if (iter != assoc_by_remote_.end()) listener_set = iter->second; if (listener_set.is_nil() && this->default_listener_) { this->default_listener_->data_received(sample); return; } } if (listener_set.is_nil()) { // Nobody has any interest in this message. Drop it on the floor. if (Transport_debug_level > 4) { const GuidConverter converter(publication_id); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::data_received_i: ") ACE_TEXT(" discarding sample from publication %C due to no listeners.\n"), OPENDDS_STRING(converter).c_str())); } return; } if (readerId != GUID_UNKNOWN) { listener_set->data_received(sample, readerId); return; } #ifndef OPENDDS_NO_CONTENT_SUBSCRIPTION_PROFILE if (sample.header_.content_filter_ && sample.header_.content_filter_entries_.length()) { ReceiveListenerSet subset(*listener_set.in()); subset.remove_all(sample.header_.content_filter_entries_); subset.data_received(sample, incl_excl, constrain); } else { #endif // OPENDDS_NO_CONTENT_SUBSCRIPTION_PROFILE if (DCPS_debug_level > 9) { // Just get the set to do our dirty work by having it iterate over its // collection of TransportReceiveListeners, and invoke the data_received() // method on each one. OPENDDS_STRING included_ids; bool first = true; RepoIdSet::const_iterator iter = incl_excl.begin(); while(iter != incl_excl.end()) { included_ids += (first ? "" : "\n") + OPENDDS_STRING(GuidConverter(*iter)); first = false; ++iter; } ACE_DEBUG((LM_DEBUG, "(%P|%t) DataLink::data_received_i - normal data received to each subscription in listener_set %s ids:%C\n", constrain == ReceiveListenerSet::SET_EXCLUDED ? "exclude" : "include", included_ids.c_str())); } listener_set->data_received(sample, incl_excl, constrain); #ifndef OPENDDS_NO_CONTENT_SUBSCRIPTION_PROFILE } #endif // OPENDDS_NO_CONTENT_SUBSCRIPTION_PROFILE }
void DataLink::notify(ConnectionNotice notice) { DBG_ENTRY_LVL("DataLink", "notify", 6); VDBG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::notify: this(%X) notify %C\n"), this, connection_notice_as_str(notice))); GuardType guard(this->pub_sub_maps_lock_); // Notify the datawriters // the lost publications due to a connection problem. for (IdToSendListenerMap::iterator itr = send_listeners_.begin(); itr != send_listeners_.end(); ++itr) { TransportSendListener* tsl = itr->second; if (tsl != 0) { if (Transport_debug_level > 0) { GuidConverter converter(itr->first); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::notify: ") ACE_TEXT("notify pub %C %C.\n"), OPENDDS_STRING(converter).c_str(), connection_notice_as_str(notice))); } const RepoIdSet& rids = assoc_by_local_[itr->first]; if (assoc_by_local_.empty() || rids.empty()) { if (Transport_debug_level) { GuidConverter converter(itr->first); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::notify: ") ACE_TEXT("try to notify pub %C %C - no associations to notify.\n"), OPENDDS_STRING(converter).c_str(), connection_notice_as_str(notice))); } break; } ReaderIdSeq subids; set_to_seq(rids, subids); switch (notice) { case DISCONNECTED: tsl->notify_publication_disconnected(subids); break; case RECONNECTED: tsl->notify_publication_reconnected(subids); break; case LOST: tsl->notify_publication_lost(subids); break; default: ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: DataLink::notify: ") ACE_TEXT("unknown notice to TransportSendListener\n"))); break; } } else { if (Transport_debug_level > 0) { GuidConverter converter(itr->first); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::notify: ") ACE_TEXT("not notify pub %C %C \n"), OPENDDS_STRING(converter).c_str(), connection_notice_as_str(notice))); } } } // Notify the datareaders registered with TransportImpl // the lost subscriptions due to a connection problem. for (IdToRecvListenerMap::iterator itr = recv_listeners_.begin(); itr != recv_listeners_.end(); ++itr) { TransportReceiveListener* trl = itr->second; if (trl != 0) { if (Transport_debug_level > 0) { GuidConverter converter(itr->first); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::notify: ") ACE_TEXT("notify sub %C %C.\n"), OPENDDS_STRING(converter).c_str(), connection_notice_as_str(notice))); } const RepoIdSet& rids = assoc_by_local_[itr->first]; if (assoc_by_local_.empty() || rids.empty()) { if (Transport_debug_level) { GuidConverter converter(itr->first); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::notify: ") ACE_TEXT("try to notify sub %C %C - no associations to notify.\n"), OPENDDS_STRING(converter).c_str(), connection_notice_as_str(notice))); } break; } WriterIdSeq pubids; set_to_seq(rids, pubids); switch (notice) { case DISCONNECTED: trl->notify_subscription_disconnected(pubids); break; case RECONNECTED: trl->notify_subscription_reconnected(pubids); break; case LOST: trl->notify_subscription_lost(pubids); break; default: ACE_ERROR((LM_ERROR, ACE_TEXT("(%P|%t) ERROR: DataLink::notify: ") ACE_TEXT("unknown notice to datareader.\n"))); break; } } else { if (Transport_debug_level > 0) { GuidConverter converter(itr->first); ACE_DEBUG((LM_DEBUG, ACE_TEXT("(%P|%t) DataLink::notify: ") ACE_TEXT("not notify sub %C subscription lost.\n"), OPENDDS_STRING(converter).c_str())); } } } }