示例#1
0
文件: Futures.cpp 项目: CCJY/ACE
  int svc (void)
  {
    ACE_TRACE ("svc");

    ACE_DEBUG ((LM_INFO, ACE_TEXT ("(%t) Manager started\n")));

    // Create pool when you get in the first time.
    create_worker_pool ();

    while (!done ())
      {
        ACE_Time_Value tv ((long)MAX_TIMEOUT);
        tv += ACE_OS::time (0);

        // Get the next message
        ACE_Method_Request *request = this->queue_.dequeue (&tv);
        if (request == 0)
          {
            shut_down ();
            break;
          }

        // Choose a worker.
        Worker *worker = choose_worker ();

        // Ask the worker to do the job.
        worker->perform (request);
      }

    return 0;
  }
示例#2
0
void LDA<Scalar>::partial_fit(std::shared_ptr<corpus::Corpus> corpus) {
    // Shuffle the documents for a randomized pass through
    corpus->shuffle();

    // Queue all the documents
    for (size_t i=0; i<corpus->size(); i++) {
        queue_in_.emplace_back(corpus, i);
    }

    // create the thread pool
    create_worker_pool();

    // Extract variational parameters and calculate the doc_m_step
    for (size_t i=0; i<corpus->size(); i++) {
        std::shared_ptr<parameters::Parameters> variational_parameters;
        size_t index;

        std::tie(variational_parameters, index) = extract_vp_from_queue();

        // tell the thread safe event dispatcher to process the events from the
        // workers
        process_worker_events();

        // perform the online part of m step
        m_step_->doc_m_step(
            corpus->at(index),
            variational_parameters,
            model_parameters_  // output
        );
    }

    // destroy the thread pool
    destroy_worker_pool();

    // Perform any corpuswise action related to e step
    e_step_->e_step();

    // perform the batch part of m step
    m_step_->m_step(
        model_parameters_  // output
    );

    // inform the world that the epoch is over
    get_event_dispatcher()->template dispatch<events::EpochProgressEvent<Scalar> >(model_parameters_);
}
示例#3
0
  int svc (void)
  {
    ACE_TRACE ("Manager::svc");

    ACE_DEBUG ((LM_INFO, ACE_TEXT ("(%t) Manager started\n")));

    // Create pool.
    create_worker_pool ();

    while (!done ())
      {
        ACE_Message_Block *mb = 0;
        ACE_Time_Value tv ((long)MAX_TIMEOUT);
        tv += ACE_OS::time (0);

        // Get a message request.
        if (this->getq (mb, &tv) < 0)
          {
            shut_down ();
            break;
          }

        // Choose a worker.
        Worker *worker = 0;
        {
          ACE_GUARD_RETURN (ACE_Thread_Mutex,
                            worker_mon, this->workers_lock_, -1);

          while (this->workers_.is_empty ())
            workers_cond_.wait ();

          this->workers_.dequeue_head (worker);
        }

        // Ask the worker to do the job.
        worker->putq (mb);
      }

    return 0;
  }
示例#4
0
typename LDA<Scalar>::MatrixX LDA<Scalar>::transform(const Eigen::MatrixXi& X) {
    // cast the parameters to what is needed
    auto model = std::static_pointer_cast<parameters::ModelParameters<Scalar> >(
        model_parameters_
    );

    // make some room for the transformed data
    MatrixX gammas(model->beta.rows(), X.cols());

    // make a corpus to use
    auto corpus = get_corpus(X);

    // Queue all the documents
    for (size_t i=0; i<corpus->size(); i++) {
        queue_in_.emplace_back(corpus, i);
    }

    // create the thread pool
    create_worker_pool();

    // Extract variational parameters and calculate the doc_e_step
    for (size_t i=0; i<corpus->size(); i++) {
        std::shared_ptr<parameters::Parameters> vp;
        size_t index;

        std::tie(vp, index) = extract_vp_from_queue();
        gammas.col(index) = std::static_pointer_cast<parameters::VariationalParameters<Scalar> >(vp)->gamma;

        // tell the thread safe event dispatcher to process the events from the
        // workers
        process_worker_events();
    }

    // destroy the thread pool
    destroy_worker_pool();

    return gammas;
}