Ejemplo n.º 1
0
int main(void){  
      int units;
      printf("How many pounds to a firkin of butter?\n");
      scanf("%d", &units);
      while ( units != 56) 
            critic(&units);
      printf("You must have looked it up!\n");  
      return 0;
}  
Ejemplo n.º 2
0
void GPolynomialSingleLabel::train(GMatrix& features, GMatrix& labels)
{
	GAssert(labels.cols() == 1);
	init(features.cols());
	GPolynomialRegressCritic critic(this, features, labels);
	//GStochasticGreedySearch search(&critic);
	GMomentumGreedySearch search(&critic);
	search.searchUntil(100, 30, .01);
	setCoefficients(search.currentVector());
	fromBezierCoefficients();
}
Ejemplo n.º 3
0
Archivo: global.c Proyecto: run100/c
int main()
{
  extern int units;

  scanf("%d", &units);
  while (units != 56) {
    critic();
  }
  printf("You must have looked it up!\n");
  return 0;
}
Ejemplo n.º 4
0
int main()
{
	extern int units;//同一文件的可选二次声明

	printf("How many pounds to a frikin of butter?\n");
	scanf("%d",&units);
	while(units != 56)
	{
		critic();
	}
	printf("You must have looked it up.\n");
	return 0;
}
Ejemplo n.º 5
0
int main(void)
{
	extern int		units;	/* an optional redeclaration */

	printf("How many pounds to a firkin of butter?\n");

	scanf("%d", &units);

	while (units != 56) critic();

	printf("You must have looked it up!\n");

	return 0;
}
Ejemplo n.º 6
0
std::unique_ptr<ARACAgent> FactoryOfAgents::makeARACAgent() const
{
    // State-value function critic
    LinearRegressor linearRegV(dimObservation);

    // Initialize critics
    Critic critic(linearRegV);

    // Boltzmann Policy
    std::vector<double> possibleAction {-1.0, 1.0};
    BoltzmannPolicy policy(dimObservation, possibleAction);

    // Stochastic Actor
    StochasticActor actor(policy);

    // ARAC Agent
    return std::unique_ptr<ARACAgent>(new ARACAgent(actor,
                                                    critic,
                                                    *baselineLearningRatePtr,
                                                    *criticLearningRatePtr,
                                                    *actorLearningRatePtr,
                                                    lambda));
}
Ejemplo n.º 7
0
std::unique_ptr<ARACAgent> FactoryOfAgents::makePGPEAgent() const
{
    // State-value function critic
    LinearRegressor linearRegV(dimObservation);

    // Initialize critics
    Critic critic(linearRegV);

    // Binary policy
    BinaryPolicy controller(dimObservation);
    GaussianDistribution distribution(controller.getDimParameters());
    PGPEPolicy policy(controller, distribution, 1.0);

    // Stochastic Actor
    StochasticActor actor(policy);

    // ARAC Agent
    return std::unique_ptr<ARACAgent> (new ARACAgent(actor,
                                                     critic,
                                                     *baselineLearningRatePtr,
                                                     *criticLearningRatePtr,
                                                     *actorLearningRatePtr,
                                                     lambda));
}