Exemple #1
1
bool link_ode(
	size_t                     size       ,
	size_t                     repeat     ,
	CppAD::vector<double>      &x         ,
	CppAD::vector<double>      &jacobian
)
{	// -------------------------------------------------------------
	// setup

	size_t n = size;
	assert( x.size() == n );

	size_t m = 0;
	CppAD::vector<double> f(n);

	while(repeat--)
	{ 	// choose next x value
		uniform_01(n, x);

		// evaluate function
		CppAD::ode_evaluate(x, m, f);

	}
	size_t i;
	for(i = 0; i < n; i++)
		jacobian[i] = f[i];
	return true;
}
void forest::train(const vector<vector<float>>& x, const vector<float>& y, const size_t num_trees, const size_t mtry, const size_t seed)
{
	// Initialize
	resize(num_trees);
	rng.seed(seed);
	u01 = [&]()
	{
		return uniform_01(rng);
	};

	// Aggregate the statistics over all trees of the random forest
	const size_t num_samples = x.size();
	const size_t num_variables = x.front().size();
	incPurity.resize(num_variables, 0);
	incMSE.resize(num_variables, 0);
	impSD.resize(num_variables, 0);
	vector<float> oobPreds(num_samples, 0);
	vector<size_t> oobTimes(num_samples, 0);
	for (auto& t : *this)
	{
		t.train(x, y, mtry, u01, incPurity, incMSE, impSD, oobPreds, oobTimes);
	}

	// Normalize incPurity, incMSE, impSD
	for (size_t i = 0; i < num_variables; ++i)
	{
		incPurity[i] /= size();
		incMSE[i] /= size();
		impSD[i] = sqrt((impSD[i] / size() - incMSE[i] * incMSE[i]) / size());
	}

	// Normalize oobPreds to calculate MSE
	mse = 0;
	size_t jout = 0;
	for (size_t s = 0; s < num_samples; ++s)
	{
		if (!oobTimes[s]) continue;
		const auto p = oobPreds[s] / oobTimes[s];
		mse += (p - y[s]) * (p - y[s]);
		++jout;
	}
	mse /= jout;

	// Calculate rsq
	float yAvg = 0;
	for (size_t i = 0; i < num_samples; ++i)
	{
		yAvg += y[i];
	}
	yAvg /= num_samples;
	float yVar = 0;
	for (size_t i = 0; i < num_samples; ++i)
	{
		yVar += (y[i] - yAvg) * (y[i] - yAvg);
	}
	rsq = 1 - mse * num_samples / yVar;
}
Exemple #3
0
bool link_ode(
	size_t                     size       ,
	size_t                     repeat     ,
	CppAD::vector<double>      &x         ,
	CppAD::vector<double>      &jacobian
)
{
	// speed test global option values
	if( global_option["atomic"] )
		return false;

	// optimization options: no conditional skips or compare operators
	std::string options="no_compare_op";
	// --------------------------------------------------------------------
	// setup
	assert( x.size() == size );
	assert( jacobian.size() == size * size );

	typedef CppAD::AD<double>       ADScalar;
	typedef CppAD::vector<ADScalar> ADVector;

	size_t j;
	size_t p = 0;              // use ode to calculate function values
	size_t n = size;           // number of independent variables
	size_t m = n;              // number of dependent variables
	ADVector  X(n), Y(m);      // independent and dependent variables
	CppAD::ADFun<double>  f;   // AD function

	// -------------------------------------------------------------
	if( ! global_option["onetape"] ) while(repeat--)
	{	// choose next x value
		uniform_01(n, x);
		for(j = 0; j < n; j++)
			X[j] = x[j];

		// declare the independent variable vector
		Independent(X);

		// evaluate function
		CppAD::ode_evaluate(X, p, Y);

		// create function object f : X -> Y
		f.Dependent(X, Y);

		if( global_option["optimize"] )
			f.optimize(options);

		// skip comparison operators
		f.compare_change_count(0);

		jacobian = f.Jacobian(x);
	}
	else
	{	// an x value
		uniform_01(n, x);
		for(j = 0; j < n; j++)
			X[j] = x[j];

		// declare the independent variable vector
		Independent(X);

		// evaluate function
		CppAD::ode_evaluate(X, p, Y);

		// create function object f : X -> Y
		f.Dependent(X, Y);

		if( global_option["optimize"] )
			f.optimize(options);

		// skip comparison operators
		f.compare_change_count(0);

		while(repeat--)
		{	// get next argument value
			uniform_01(n, x);

			// evaluate jacobian
			jacobian = f.Jacobian(x);
		}
	}
	return true;
}