void mexFunctionTest(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
{
    if (nrhs < 2 || nrhs > 3)
    {
        mexPrintf("Usage: [score] = SQBTreesPredict( model, feats, [maxIters] )\n");
        mexPrintf("\tfeats must be of type SINGLE\n");
        mexPrintf("\tmaxIters is optional and limits the amount of weak learners evaluated for classification.");
        mexErrMsgTxt("Incorrect input format.\n");
    }

    if (nlhs != 1)
        mexErrMsgTxt("Two output args expected");

#define mFeats (prhs[1])
#define mModel (prhs[0])
#define mMaxIters (prhs[2])

    MatlabInputMatrix<FeatsType> pFeats( mFeats, 0, 0, "feats" );

    //const unsigned maxIters = pMaxIters.data()[0];

    TreeBoosterType TB;

    // load model
    TB.loadFromMatlab( mModel );

    unsigned maxIters = TB.numWeakLearners();
    if (nrhs >= 3)
    {
        MatlabInputMatrix<unsigned int> pMaxIters( mMaxIters, 1, 1, "maxiters" );
        unsigned inputMaxIters = pMaxIters.data()[0];

        if (inputMaxIters <= 0)
            mexErrMsgTxt("maxIters must be higher than zero.");

        if (inputMaxIters > maxIters)
            mexPrintf("-- WARNING: maxIters is greater than the number of weaklearners used!\n");
        else
        {
            maxIters = inputMaxIters;
            mexPrintf("Limiting number of weak learners to %d\n", (int)maxIters);
        }
    }

    // for now just copy the values
    gFeatArrayType feats = Eigen::Map< const gFeatArrayType >( pFeats.data(), pFeats.rows(), pFeats.cols() );

    TreeBoosterType::ResponseArrayType newScores;
    TB.predict( TreeBoosterType::SampleListType(feats),
                TreeBoosterType::FeatureValueObjectType(feats),
                newScores,
                maxIters );

    MatlabOutputMatrix<double>   outMatrix( &plhs[0], feats.rows(), 1 );
    for (unsigned i=0; i < feats.rows(); i++)
        outMatrix.data()[i] = newScores.coeff(i);
}
Example #2
0
void mexFunctionTest(TreeBoosterType &TB) //int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
{
  printf("Testing!\n");
//    if (nrhs < 2 || nrhs > 3)
//    {
//        mexPrintf("Usage: [score] = SQBTreesPredict( model, feats, [maxIters] )\n");
//        mexPrintf("\tfeats must be of type SINGLE\n");
//        mexPrintf("\tmaxIters is optional and limits the amount of weak learners evaluated for classification.");
//        mexErrMsgTxt("Incorrect input format.\n");
//    }

//    if (nlhs != 1)
//        mexErrMsgTxt("Two output args expected");

//    #define mFeats (prhs[1])
//    #define mModel (prhs[0])
//    #define mMaxIters (prhs[2])

//    MatlabInputMatrix<FeatsType> pFeats( mFeats, 0, 0, "feats" );

//    //const unsigned maxIters = pMaxIters.data()[0];

//    TreeBoosterType TB;

//    // load model


    static const char *input_file =
      "/cvlabdata1/home/pglowack/Work/Vaa3D-BuiltWithDefaultScripts/vaa3d_tools/"
        "bigneuron_ported/AmosSironi_PrzemyslawGlowacki/SQBTree_plugin/aaaaaaaa.cfg";

    libconfig::Config cfg;

    // Read the file. If there is an error, report it and exit.
    try
    {
      cfg.readFile(input_file);
    }
    catch(const libconfig::FileIOException &fioex)
    {
      std::cerr << "I/O error while reading file." << std::endl;
//      return(EXIT_FAILURE);
    }
    catch(const libconfig::ParseException &pex)
    {
      std::cerr << "Parse error at " << pex.getFile() << ":" << pex.getLine()
                << " - " << pex.getError() << std::endl;
//      return(EXIT_FAILURE);
    }

    libconfig::Setting &root = cfg.getRoot();

    libconfig::Setting &regressor = root["regressor"];

    TB.loadFromLibconfig(regressor);

    unsigned maxIters = TB.numWeakLearners();
//    if (nrhs >= 3)
//    {
//        MatlabInputMatrix<unsigned int> pMaxIters( mMaxIters, 1, 1, "maxiters" );
//        unsigned inputMaxIters = pMaxIters.data()[0];

//        if (inputMaxIters <= 0)
//            mexErrMsgTxt("maxIters must be higher than zero.");

//        if (inputMaxIters > maxIters)
//            mexPrintf("-- WARNING: maxIters is greater than the number of weaklearners used!\n");
//        else
//        {
//            maxIters = inputMaxIters;
//            mexPrintf("Limiting number of weak learners to %d\n", (int)maxIters);
//        }
//    }


    // for now just copy the values
    FeatsType testFeaturesArray[] = {3, 1, 2,
                                 6, 3, 4,
                                 -1, -2, -4,
                                 -2, -3, -1
                                };
    unsigned int testFeaturesColsNo = 3;
    unsigned int testFeaturesRowsNo = 4;
    gFeatArrayType feats = Eigen::Map< const gFeatArrayType >( testFeaturesArray, testFeaturesRowsNo, testFeaturesColsNo );

    TreeBoosterType::ResponseArrayType newScores;
    TB.predict( TreeBoosterType::SampleListType(feats),
                TreeBoosterType::FeatureValueObjectType(feats),
                newScores,
                maxIters );

    for (unsigned i=0; i < feats.rows(); i++) {
      std::cout << newScores.coeff(i) << std::endl;
//      outMatrix.data()[i] = newScores.coeff(i);
    }

//    MatlabOutputMatrix<double>   outMatrix( &plhs[0], feats.rows(), 1 );
//    for (unsigned i=0; i < feats.rows(); i++)
//        outMatrix.data()[i] = newScores.coeff(i);
//}


//void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
//{
//#ifdef SQBTREES_TRAIN
//    mexFunctionTrain(nlhs, plhs, nrhs, prhs);
//#else
//    mexFunctionTest(nlhs, plhs, nrhs, prhs);
//#endif
}