void MatrixErrorValueCalculator::calculateParallelErrorValue(vector< ModelTuningParameters > & paramList) {

	vector< double > errorValues(paramList.size());

    vector< ModelResults > results = model->runParallelModel(paramList);

	for (unsigned int i = 0; i < paramList.size(); i++) {
		showMessage("Model VdVdtMatrices\n",5,fixedParams);

    	for (int nTrace = 0; nTrace < results[i].getLength(); nTrace++) {
        	modelVdVdtMatrix->readFrom(results[i][nTrace]);
        	errorValues[i] += results[i][nTrace].getWeight() * expVdVdtMatrices[nTrace]->compare(*modelVdVdtMatrix);
			showMessage(modelVdVdtMatrix->toString() + "\n",5,fixedParams);        	
    	}

    	numberOfEvaluations++;

    	errorValueHistory.push_back(pair< ModelTuningParameters, double >(paramList[i],errorValues[i]));

		showMessage("Eval: " + str(numberOfEvaluations) + " Generation: " + str(numberOfGenerations) + " Calculated error value of: " + paramList[i].toString() + ": " + str(errorValues[i]) + "\n",3,fixedParams);

    	if (exportFileStream.is_open()) {
        	exportFileStream << numberOfGenerations << " "<< numberOfEvaluations << " " << errorValues[i] << " ";
        	for (int j = 0; j < paramList[i].getLength(); j++) {
            	exportFileStream << (paramList[i][j]) << " ";
        	}
        	exportFileStream << endl;
    	}

    	paramList[i].setErrorValue(errorValues[i]);

	}
	
	numberOfGenerations++;

}
示例#2
0
文件: LMA.cpp 项目: lespeholt/OpenANN
bool LMA::step()
{
  OPENANN_CHECK(opt);
  if(iteration < 0)
    initialize();
  OPENANN_CHECK(n > 0);

  try
  {
    while(alglib_impl::minlmiteration(state.c_ptr(), &envState))
    {
      if(state.needfi)
      {
        for(unsigned i = 0; i < n; i++)
          parameters(i) = state.x[i];
        opt->setParameters(parameters);
        for(unsigned i = 0; i < opt->examples(); i++)
          state.fi[i] = opt->error(i);
        if(iteration != state.c_ptr()->repiterationscount)
        {
          iteration = state.c_ptr()->repiterationscount;
          opt->finishedIteration();
          return true;
        }
        continue;
      }
      if(state.needfij)
      {
        for(unsigned i = 0; i < n; i++)
          parameters(i) = state.x[i];
        opt->setParameters(parameters);
        for(int ex = 0; ex < opt->examples(); ex++)
        {
          opt->errorGradient(ex, errorValues(ex), gradient);
          state.fi[ex] = errorValues(ex);
          for(unsigned d = 0; d < opt->dimension(); d++)
            state.j[ex][d] = gradient(d);
        }
        if(iteration != state.c_ptr()->repiterationscount)
        {
          iteration = state.c_ptr()->repiterationscount;
          opt->finishedIteration();
          return true;
        }
        continue;
      }
      if(state.xupdated)
        continue;
      throw alglib::ap_error("ALGLIB: error in 'minlmoptimize' (some "
                             "derivatives were not provided?)");
    }
    alglib_impl::ae_state_clear(&envState);
  }
  catch(alglib_impl::ae_error_type)
  {
    throw OpenANNException(envState.error_msg);
  }

  reset();
  return false;
}