예제 #1
0
int main(int argc, char** argv)
{
  

  Col<uint32_t> a;
  a<<1<<2<<4<<3<<1<<6<<7<<2<<4;

  cout<<a.t()<<endl;
  cout<<(a==1).t()<<endl;
  cout<<sum(a==1)<<endl;

  mat A;
  A.randn(10,2);
  A.rows(0,4) += 4;
  cout<<A<<endl;

  Col<uint32_t> b=a;
  b.resize(b.n_elem+1);
  b(b.n_elem-1)=99;
  cout<<a.t()<<endl;
  cout<<b.t()<<endl;

  colvec c(10);
  c.zeros();
  c(4)=math::inf();
  c(5)=math::nan();
  cout<<c<<endl;
  cout<<is_finite(c(3))<<endl;
  cout<<is_finite(c(4))<<endl;
  cout<<is_finite(c(5))<<endl;
  cout<<is_finite(c)<<endl;

  colvec d(2);
  d.ones();
  c.insert_rows(c.n_elem,d);
  cout<<c<<endl;

  colvec l(2);
  l << -8.0 << -16.0;
  cout<<l.t()<<endl;
  cout<<exp(l).t()<<endl;

  return 0;
}
예제 #2
0
inline
bool
hess
  (
         Mat<typename T1::elem_type>&    U,
         Mat<typename T1::elem_type>&    H,
  const Base<typename T1::elem_type,T1>& X,
  const typename arma_blas_type_only<typename T1::elem_type>::result* junk = 0
  )
  {
  arma_extra_debug_sigprint();
  arma_ignore(junk);
  
  arma_debug_check( void_ptr(&U) == void_ptr(&H), "hess(): 'U' is an alias of 'H'" );
  
  typedef typename T1::elem_type eT;
  
  Col<eT> tao;
  
  const bool status = auxlib::hess(H, X.get_ref(), tao);
  
  if(H.n_rows == 0)
    {
    U.reset();
    }
  else
  if(H.n_rows == 1)
    {
    U.ones(1, 1);
    }
  else
  if(H.n_rows == 2)
    {
    U.eye(2, 2);
    }
  else
    {
    U.eye(size(H));
    
    Col<eT> v;
    
    for(uword i=0; i < H.n_rows-2; ++i)
      {
      // TODO: generate v in a more efficient manner; 
      // TODO: the .ones() operation is an overkill, as most of v is overwritten afterwards
      
      v.ones(H.n_rows-i-1);
      
      v(span(1, H.n_rows-i-2)) = H(span(i+2, H.n_rows-1), i);
      
      U(span::all, span(i+1, H.n_rows-1)) -= tao(i) * (U(span::all, span(i+1, H.n_rows-1)) * v * v.t());
      }
    
    U(span::all, H.n_rows-1) = U(span::all, H.n_rows-1) * (eT(1) - tao(H.n_rows-2));
    
    for(uword i=0; i < H.n_rows-2; ++i)
      {
      H(span(i+2, H.n_rows-1), i).zeros();
      }
    }
  
  if(status == false)
    {
    U.soft_reset();
    H.soft_reset();
    arma_debug_warn("hess(): decomposition failed");
    }
  
  return status;
  }
예제 #3
0
int main(int argc, char** argv)
{
  CLI::ParseCommandLine(argc, argv);

  // Get reference dataset filename.
  const string trainingDataFilename = CLI::GetParam<string>("train_file");
  mat trainingData;
  data::Load(trainingDataFilename, trainingData, true);

  const string labelsFilename = CLI::GetParam<string>("labels_file");
  // Load labels.
  mat labelsIn;

  // Did the user pass in labels?
  if (CLI::HasParam("labels_file"))
  {
    // Load labels.
    const string labelsFilename = CLI::GetParam<string>("labels_file");
    data::Load(labelsFilename, labelsIn, true);
  }
  else
  {
    // Use the last row of the training data as the labels.
    Log::Info << "Using the last dimension of training set as labels." << endl;
    labelsIn = trainingData.row(trainingData.n_rows - 1).t();
    trainingData.shed_row(trainingData.n_rows - 1);
  }

  // Do the labels need to be transposed?
  if (labelsIn.n_rows == 1)
  {
    labelsIn = labelsIn.t();
  }

  // Normalize the labels.
  Col<size_t> labels;
  vec mappings;
  data::NormalizeLabels(labelsIn.unsafe_col(0), labels, mappings);

  // Load test dataset.
  const string testingDataFilename = CLI::GetParam<string>("test_file");
  mat testingData;
  data::Load(testingDataFilename, testingData, true);
  if (testingData.n_rows != trainingData.n_rows)
  {
    Log::Fatal << "Test data dimensionality (" << testingData.n_rows << ") "
        << "must be the same as training data (" << trainingData.n_rows - 1
        << ")!" << std::endl;
  }

  int iterations = CLI::GetParam<int>("iterations");
  
  // Create and train the classifier.
  Timer::Start("Training");
  Perceptron<> p(trainingData, labels.t(), iterations);
  Timer::Stop("Training");

  // Time the running of the Perceptron Classifier.
  Row<size_t> predictedLabels(testingData.n_cols);
  Timer::Start("Testing");
  p.Classify(testingData, predictedLabels);
  Timer::Stop("Testing");

  // Un-normalize labels to prepare output.
  vec results;
  data::RevertLabels(predictedLabels.t(), mappings, results);

  // saving the predictedLabels in the transposed manner in output
  const string outputFilename = CLI::GetParam<string>("output");
  data::Save(outputFilename, results, true, false);
}
예제 #4
0
int main(int argc, char* argv[])
{
    CLI::ParseCommandLine(argc, argv);

    // Check input parameters.
    const string trainingDataFilename = CLI::GetParam<string>("train_file");
    mat trainingData;
    data::Load(trainingDataFilename, trainingData, true);

    // Normalize labels.
    Col<size_t> labels;
    Row<size_t> labelst;
    vec mappings;

    // Did the user pass in labels?
    const string labelsFilename = CLI::GetParam<string>("labels_file");
    if (labelsFilename != "")
    {
        // Load labels.
        mat rawLabels;
        data::Load(labelsFilename, rawLabels, true, false);

        // Do the labels need to be transposed?
        if (rawLabels.n_rows == 1)
            rawLabels = rawLabels.t();

        data::NormalizeLabels(rawLabels.unsafe_col(0), labels, mappings);
    }
    else
    {
        // Use the last row of the training data as the labels.
        Log::Info << "Using last dimension of training data as training labels."
                  << std::endl;
        vec rawLabels = trans(trainingData.row(trainingData.n_rows - 1));
        data::NormalizeLabels(rawLabels, labels, mappings);
        // Remove the label row.
        trainingData.shed_row(trainingData.n_rows - 1);
    }
    labelst = labels.t();

    const string testingDataFilename = CLI::GetParam<std::string>("test_file");
    mat testingData;
    data::Load(testingDataFilename, testingData, true);

    if (testingData.n_rows != trainingData.n_rows)
        Log::Fatal << "Test data dimensionality (" << testingData.n_rows << ") "
                   << "must be the same as training data (" << trainingData.n_rows
                   << ")!" << std::endl;

    const bool incrementalVariance = CLI::HasParam("incremental_variance");

    // Create and train the classifier.
    Timer::Start("training");
    NaiveBayesClassifier<> nbc(trainingData, labelst, mappings.n_elem,
                               incrementalVariance);
    Timer::Stop("training");

    // Time the running of the Naive Bayes Classifier.
    Row<size_t> results;
    Timer::Start("testing");
    nbc.Classify(testingData, results);
    Timer::Stop("testing");

    // Un-normalize labels to prepare output.
    vec rawResults;
    data::RevertLabels(results.t(), mappings, rawResults);

    // Output results.  Transpose: one result per line.
    const string outputFilename = CLI::GetParam<string>("output");
    data::Save(outputFilename, rawResults, true, false);
}