/*! \brief Constructor where Teuchos communicator is specified \param ia the problem input adapter \param p the parameter list \param problemComm the problem communicator \param soln the solution \param graphModel the graph model The constructor does global communication to compute the metrics. The rest of the methods are local. */ EvaluatePartition(const Adapter *ia, ParameterList *p, const RCP<const Comm<int> > &problemComm, const PartitioningSolution<Adapter> *soln, const RCP<const GraphModel<typename Adapter::base_adapter_t> > &graphModel= Teuchos::null): numGlobalParts_(0), targetGlobalParts_(0), numNonEmpty_(0), metrics_(), metricsConst_(), graphMetrics_(), graphMetricsConst_() { sharedConstructor(ia, p, problemComm, soln, graphModel); }
/*! \brief Constructor for MPI builds \param ia the problem input adapter \param p the parameter list \param comm the problem communicator \param soln the solution \param graphModel the graph model The constructor does global communication to compute the metrics. The rest of the methods are local. */ EvaluatePartition(const Adapter *ia, ParameterList *p, MPI_Comm comm, const PartitioningSolution<Adapter> *soln, const RCP<const GraphModel<typename Adapter::base_adapter_t> > &graphModel= Teuchos::null): numGlobalParts_(0), targetGlobalParts_(0), numNonEmpty_(0), metrics_(), metricsConst_(), graphMetrics_(), graphMetricsConst_() { RCP<Teuchos::OpaqueWrapper<MPI_Comm> > wrapper = Teuchos::opaqueWrapper(comm); RCP<const Comm<int> > problemComm = rcp<const Comm<int> >(new Teuchos::MpiComm<int>(wrapper)); sharedConstructor(ia, p, problemComm, soln, graphModel); }
CommandBuffer::CommandBuffer(Device &device) : Device(device) { sharedConstructor(); }
CommandBuffer::CommandBuffer(Device &device, Program &program, Arguments &arguments) : Device(device) { sharedConstructor(); arguments.bindTo(*this); program.bindTo(*this); }