Пример #1
0
void WeakDiscreteTreeLearner::sortIndexesBasedOnDataPositions(indices_t &positions,
                                                              const size_t featureIndex) const
{

    const FeaturesResponses &featuresResponses = _trainingData->getFeatureResponses();
    std::sort(positions.begin(), positions.end(), comparator(featuresResponses, featureIndex));

    return;
}
Пример #2
0
int WeakDiscreteTreeLearner::createNode(
        const weights_t &weights,
        const indices_t &indices, const size_t start, const size_t end,
        TreeNode::shared_ptr &node, double &minError,
        const bool isLeft, const int root_node_bottom_height, const int root_node_left_width) const
{
    TrainingData::point_t modelWindow = _trainingData->getModelWindow();
    const int
            shrinking_factor = bootstrapping::integral_channels_computer_t::get_shrinking_factor(),
            modelWidth = modelWindow.x() / shrinking_factor ,
            modelHeight = modelWindow.y() / shrinking_factor;


    //for all features get responses on every image
    //find feature with lowest error
    const size_t numFeatures = _trainingData->getFeaturesPoolSize();
    std::vector<std::pair<double, size_t> >
            minErrorsForSearch(numFeatures, std::make_pair(std::numeric_limits<double>::max(), 0));

    indices_t indicesCrop;
    const int binsize = 1000;
    if(start > end)
    {
        throw std::invalid_argument("WeakDiscreteTreeLearner::createNode received start > end but expected start <= end");
    }

    indicesCrop.resize(end - start);
    std::copy(indices.begin() + start, indices.begin() + end, indicesCrop.begin());

    if (indicesCrop.size() == 0)
    {
        return -1;
    }


    int return_value = 0;
    //find max valid feature index
    size_t max_valid_feature_index= 0;
    for (size_t featureIndex = numFeatures-1; featureIndex >=0; --featureIndex)
    {
        if (_trainingData->getFeatureValidity(featureIndex))
        {
            max_valid_feature_index = featureIndex+1;
            break;
        }


    }
    //biasing features not yet supported
    double _pushBias = 0;
#pragma omp parallel for reduction(+:return_value) schedule(guided)
    for (size_t featureIndex = 0; featureIndex < max_valid_feature_index; ++featureIndex)
    {
        if (_trainingData->getFeatureValidity(featureIndex)){
            const int minv = (*_mins)[featureIndex], maxv = (*_maxs)[featureIndex];
            double error = std::numeric_limits<double>::max();
            return_value += getErrorEstimate(weights, indicesCrop, featureIndex, binsize, minv, maxv, error);
            minErrorsForSearch[featureIndex] = std::make_pair(error, featureIndex);
        }
    } // end of "for each feature"
    //if ( *(std::min_element(minErrorsForSearch.begin(),minErrorsForSearch.end(), sort_pair())) >2)//== mm)
    if (return_value < 0)
    {
        //create dummy node and return;
        //node = TreeNode::shared_ptr(new TreeNode(minthr, alphamin, _features[minFeat], minFeat, indicesCrop, splitIndexMin ));
        return -1;
    }
    const int deltaH=0;
    //get kth minimal elements
    //std::sort(minErrorsForSearch.begin(), minErrorsForSearch.end(), sort_pair());
    //thrust::sort(thrust::reinterpret_tag<thrust::omp::tag>(minErrorsForSearch.begin()),
    //             thrust::reinterpret_tag<thrust::omp::tag>(minErrorsForSearch.end()),
    //             sort_pair());

    std::sort(minErrorsForSearch.begin(),
                minErrorsForSearch.end(),
                 sort_pair());


    //search the feature that is highest up in the image
    int miny = std::numeric_limits<int>::max();
    int minx = std::numeric_limits<int>::max();
    int minFeatureIndex = -1;
    double errorth;

    if (minErrorsForSearch.size() > 0){
        errorth= std::min(0.49999999999, minErrorsForSearch[0].first* (1.0+_pushBias));
    }
    else{
        throw std::runtime_error("minErrorsForSearch has size 0: no features in pool?");
    }
    for (size_t i = 0; i< minErrorsForSearch.size(); ++i){
        const int this_feat_idx = minErrorsForSearch[i].second;
        double error= minErrorsForSearch[i].first;
        if (_pushBias ==0){
            minFeatureIndex = this_feat_idx;

            break;
        }


        if (error > errorth)
            break;


        const Feature this_feat = _trainingData->getFeature(this_feat_idx);
        int y = this_feat.y + this_feat.height;
        int x = this_feat.x + this_feat.width;


    }
    if (minFeatureIndex == -1)
    {
        //create dummy node and return;
        //node = TreeNode::shared_ptr(new TreeNode(minthr, alphamin, _features[minFeat], minFeat, indicesCrop, splitIndexMin ));
        return -1;
    }






    sortIndexesBasedOnDataPositions(indicesCrop, minFeatureIndex);
    int splitIndexMin = -1;
    int minThreshold = -1;
    int alphaMin = -1;

    if (findThreshold(weights, indicesCrop, minFeatureIndex, minError, minThreshold,
                      alphaMin, splitIndexMin) == -1)
    {
        return -1;
    }

    if(splitIndexMin < 0)
    {
        throw std::runtime_error("WeakDiscreteTreeLearner::createNode, findThreshold failed to find an adequate split index");
    }

    // set the shared_ptr to point towards a new node instance
    node.reset(new TreeNode(minThreshold, alphaMin,
                            _trainingData->getFeature(minFeatureIndex),
                            minFeatureIndex, indicesCrop, splitIndexMin, isLeft));
    return 0;
}