예제 #1
0
void add_mtime(reclist& thisi, dvec& b, dvec& c, bool debug) {

    if((b.size()==0) & (c.size()==0)) return;

    double maxtime  = thisi.back()->time();
    double mintime = thisi.at(0)->time();
    std::sort(b.begin(), b.end());
    std::sort(c.begin(), c.end());

    b.insert(b.end(), c.begin(), c.end() );
    std::sort(b.begin(), b.end());

    b.erase(unique(b.begin(), b.end()), b.end());

    std::size_t i = 0;

    bool dropmin = true;
    bool dropmax = true;

    // add mtimes from argument
    for(i=0; i < b.size(); ++i) {

        if(b.at(i) <= mintime) {
            if(debug && dropmin) {
                Rcpp::Rcout << "dropping mtimes <=  min observation time" << std::endl;
                dropmin = false;
            }
            continue;
        }

        if(b.at(i) >= maxtime)  {
            if(debug && dropmax) {
                Rcpp::Rcout << "dropping mtimes >= to max observation time" << std::endl;
                dropmax = false;
            }
            break;
        }

        rec_ptr obs = boost::make_shared<datarecord>(100,b[i],0,-100,0);
        obs->output(false);
        obs->from_data(false);
        thisi.push_back(obs);
    }

    std::sort(thisi.begin(), thisi.end(), CompByTimePosRec);
}
예제 #2
0
파일: DNN.cpp 프로젝트: kSkip/free-time
void DNN::backPropSet(const dvec& input,const dvec& output)
{

    unsigned int i;

    feedForward(input);

    unsigned int L = activations.size() - 1;

    /*
     * Start with the final layer
     */
    std::vector<Matrix> d(L+1);

    /*
     * Copy contents
     */
    Matrix out(output.size(),1);

    for(i=0;i<output.size();++i) out(i,0) = output.at(i);

    /*
     * Final layer error
     */
    Matrix DC = Matrix::apply(quad_dC_dA,activations.at(L),out);
    d.at(L)   = Matrix::had(DC,activations.at(L));

    /*
     * Backpropagate
     */
    for(i=L;i>0;--i)
    {

        Matrix wd = weights.at(i-1).T() * d.at(i);
        d.at(i-1)    = Matrix::had( wd, activations.at(i-1) );

    }

    /*
     * Calculate the gradient cost for this set
     */
    for(i=L;i>0;--i)
    {

        bGradient.at(i-1) = bGradient.at(i-1) + d.at(i);

        Matrix wg = d.at(i) * activations.at(i-1).T();
        wGradient.at(i-1) = wGradient.at(i-1) + wg;

    }

}
예제 #3
0
파일: DNN.cpp 프로젝트: kSkip/free-time
void DNN::feedForward(const dvec & inputs)
{

    unsigned int layers = activations.size();
    unsigned int i;

    for(i=0;i<inputs.size();++i)
    {
        activations.at(0)(i,0) = sigmoid(inputs.at(i));
    }

    for(i=1;i<layers;++i)
    {
        activations.at(i) = (weights.at(i-1)*activations.at(i-1) + bias.at(i-1)).apply(sigmoid);
    }

}