/// Stop the handling of connectons. void do_stop() { while(do_background_work(0)) { if(threads::get_self_ptr()) hpx::this_thread::suspend(hpx::threads::pending, "mpi::parcelport::do_stop"); } stopped_ = true; MPI_Barrier(util::mpi_environment::communicator()); }
void parcelport::put_parcels(std::vector<parcel> const & parcels, std::vector<write_handler_type> const& handlers) { do_background_work(); // schedule message handler if (parcels.size() != handlers.size()) { HPX_THROW_EXCEPTION(bad_parameter, "parcelport::put_parcels", "mismatched number of parcels and handlers"); return; } naming::locality locality_id = parcels[0].get_destination_locality(); #if defined(HPX_DEBUG) // make sure all parcels go to the same locality for (std::size_t i = 1; i != parcels.size(); ++i) { BOOST_ASSERT(locality_id.get_rank() == parcels[i].get_destination_locality().get_rank()); } #endif parcel_cache_.set_parcel(parcels, handlers); }
void parcelport::send_early_parcel(parcel& p) { do_background_work(); // schedule message handler parcel_cache_.set_parcel(p, write_handler_type(), 0); }
void parcelport::put_parcel(parcel const& p, write_handler_type const& f) { do_background_work(); // schedule message handler parcel_cache_.set_parcel(p, f); }