Esempio n. 1
0
bool LocallyWeightedRegression::learnWeights(const VectorXd &xInputVector, const VectorXd &yTargetVector)
{
    if (!initialized_)
    {
        printf("ERROR: LWR model is not initialized.\n");
        return initialized_;
    }

    if(xInputVector.size() != yTargetVector.size())
    {
        printf("ERROR: Input (%i) and target (%i) vector have different sizes.\n", xInputVector.size(), yTargetVector.size());
        return false;
    }

    MatrixXd basisFunctionMatrix = MatrixXd::Zero(xInputVector.size(), centers_.size());
    if (!generateBasisFunctionMatrix(xInputVector, basisFunctionMatrix))
    {
        printf("ERROR: Could not generate basis function matrix.\n");
        return false;
    }

    MatrixXd tmpMatrixA = MatrixXd::Zero(xInputVector.size(), numRfs_);
    tmpMatrixA = xInputVector.cwise().square() * MatrixXd::Ones(1, numRfs_);
    tmpMatrixA = tmpMatrixA.cwise() * basisFunctionMatrix;

    VectorXd tmpMatrixSx = VectorXd::Zero(numRfs_, 1);
    tmpMatrixSx = tmpMatrixA.colwise().sum();

    MatrixXd tmpMatrixB = MatrixXd::Zero(xInputVector.size(), numRfs_);
    tmpMatrixB = xInputVector.cwise() * yTargetVector * MatrixXd::Ones(1, numRfs_);
    tmpMatrixB = tmpMatrixB.cwise() * basisFunctionMatrix;

    VectorXd tmpMatrixSxtd = VectorXd::Zero(numRfs_, 1);
    tmpMatrixSxtd = tmpMatrixB.colwise().sum();

    double ridgeRegression = 0.0000000001;
    thetas_ = tmpMatrixSxtd.cwise() / (tmpMatrixSx.cwise() + ridgeRegression);

    return true;
}
Esempio n. 2
0
// exponentiates and normalizes a vector
void expAndNormalize(VectorXd& v)
{
    if (v.size() == 0) return;

    double maxValue = v[0];
    for (int i = 1; i < v.size(); i++) {
        if (v[i] > maxValue)
	    maxValue = v[i];
    }

    v = (v.cwise() - maxValue).cwise().exp();
    double Z = v.sum();
    v /= Z;
}