Exemple #1
0
/**
 * Return the calculated value stored in this derived quantity
 * for the parameter year. If the year does not exist as a standard
 * value we'll calculate how many years to go back in to
 * the initialisation phase values.
 *
 * Note: We cannot go back more than 1 phase. If this condition
 * is triggered we return the first value from the phase instead
 * of going back.
 *
 * @param year The year to get the derived quantity value for.
 * @return The derived quantity value
 */
Double DerivedQuantity::GetValue(unsigned year) {
  LOG_FINEST() << "get value for year: " << year;
  if (override_values_.find(year) != override_values_.end()) {
    values_[year] = override_values_[year];
    return override_values_[year];
  }
  if (values_.find(year) != values_.end())
    return values_[year];
  if (initialisation_values_.size() == 0)
    return 0.0;

  // Calculate how many years to go back. At this point
  // either we're in the init phases or we're going back
  // in to the init phases.
  unsigned years_to_go_back = model_->start_year() - year;

  Double result = 0.0;
  if (years_to_go_back == 0) {
    LOG_WARNING() << "Years to go back is 0 in derived quantity " << label_ << " when it shouldn't be";
    result = (*initialisation_values_.rbegin()->rbegin());
  } else if (initialisation_values_.rbegin()->size() > years_to_go_back) {
    result = initialisation_values_.rbegin()->at(initialisation_values_.rbegin()->size() - years_to_go_back);
  } else if (initialisation_values_.size() == 1) {
    result = (*initialisation_values_.rbegin()->begin()); // first value of last init phase
  } else {
    result = (*(initialisation_values_.rbegin() + 1)->begin()); // first value of last init phase
  }

  // Make an exception for intialisation phases such as derived which only requires to go back one year
  if (model_->b0_initialised(label_)) {
    result = (*initialisation_values_.rbegin()->rbegin());
  }

  LOG_FINEST() << "years_to_go_back: " << years_to_go_back
      << "; year: " << year
      << "; result: " << result
      << "; .begin(): " << (*initialisation_values_.rbegin()->rbegin())
      << ": .size(): " << initialisation_values_.rbegin()->size();

  return result;
}
Exemple #2
0
void MortalityEventBiomass::DoExecute() {
  if (catch_years_[model_->current_year()] == 0)
    return;

  LOG_TRACE();

  /**
   * Work our how much of the stock is vulnerable
   */
  Double vulnerable = 0.0;
  unsigned i = 0;
  for (auto categories : partition_) {
    categories->UpdateMeanWeightData();
    unsigned offset = 0;
    for (Double& data : categories->data_) {
      Double temp = data * selectivities_[i]->GetResult(categories->min_age_ + offset, categories->age_length_);
      vulnerable += temp * categories->mean_weight_per_[categories->min_age_ + offset];
      ++offset;
    }

    ++i;
  }

  /**
   * Work out the exploitation rate to remove (catch/vulnerable)
   */
  Double exploitation = catch_years_[model_->current_year()] / utilities::doublecompare::ZeroFun(vulnerable);
  if (exploitation > u_max_) {
    exploitation = u_max_;
    if (penalty_)
      penalty_->Trigger(label_, catch_years_[model_->current_year()], vulnerable * u_max_);

  } else if (exploitation < 0.0) {
    exploitation = 0.0;
  }

  LOG_FINEST() << "year: " << model_->current_year() << "; exploitation: " << AS_DOUBLE(exploitation);

  /**
   * Remove the stock now. The amount to remove is
   * vulnerable * exploitation
   */
  i = 0;
  for (auto categories : partition_) {
    unsigned offset = 0;
    for (Double& data : categories->data_) {
      data -= data * selectivities_[i]->GetResult(categories->min_age_ + offset, categories->age_length_) * exploitation;
      ++offset;
    }
    ++i;
  }
}
Exemple #3
0
/**
 * Execute any reports that have the year and
 * time step label as their execution parameters.
 * Note: All these reports are only in the execute phase.
 *
 * @param year The current year for the model
 * @param time_step_label The last time step to be completed
 */
void Manager::Execute(unsigned year, const string& time_step_label) {
  LOG_TRACE();
  LOG_FINEST() << "year: " << year << "; time_step_label: " << time_step_label << "; reports: " << time_step_reports_[time_step_label].size();

  RunMode::Type run_mode = model_->run_mode();
  bool tabular = model_->global_configuration().print_tabular();
  for(auto report : time_step_reports_[time_step_label]) {
    if ( (RunMode::Type)(report->run_mode() & run_mode) != run_mode) {
      LOG_FINEST() << "Skipping report: " << report->label() << " because run mode is not right";
      continue;
    }
    if (!report->HasYear(year)) {
      LOG_FINEST() << "Skipping report: " << report->label() << " because it does not have year " << year;
      continue;
    }

    if (tabular)
      report->ExecuteTabular();
    else
      report->Execute();
  }
}
/**
 * This method is called at the start of the targetted
 * time step for this observation.
 *
 * At this point we need to build our cache for the partition
 * structure to use with any interpolation
 */
void ProportionsMigrating::PreExecute() {
  cached_partition_->BuildCache();
  LOG_FINEST() << "Entering observation " << label_;


  if (cached_partition_->Size() != proportions_[model_->current_year()].size()) {
    LOG_MEDIUM() << "Cached size " << cached_partition_->Size() << " partition size = " << proportions_[model_->current_year()].size();
    LOG_CODE_ERROR() << "cached_partition_->Size() != proportions_[model->current_year()].size()";

  }
  if (partition_->Size() != proportions_[model_->current_year()].size())
    LOG_CODE_ERROR() << "partition_->Size() != proportions_[model->current_year()].size()";
}
/**
 * Store the value from our addressable for this year
 */
void Project::StoreValue(unsigned current_year) {
  if (addressable_ != nullptr)
    stored_values_[current_year] = *addressable_;
  else if (addressable_map_ != nullptr)
    stored_values_[current_year] = (*addressable_map_)[current_year];
  else if (addressable_vector_ != nullptr) {
    unsigned index = current_year - model_->start_year();
    if (index >= addressable_vector_->size()) {
      LOG_CODE_ERROR() << "Could not store value for @project parameter " << parameter_ << " in year "
      << current_year << " because index exceeded size of vector " << index << " : " << addressable_vector_->size();
    }
    stored_values_[current_year] = addressable_vector_->at(index);
  }
  LOG_FINEST() << "Storing value = " << stored_values_[current_year];
}
void SumToOne::DoBuild() {
  LOG_TRACE();
  for (auto& estimate_label : estimate_labels_) {
    Estimate* estimate = model_->managers().estimate()->GetEstimateByLabel(estimate_label);
    if (estimate == nullptr) {
      LOG_ERROR_P(PARAM_ESTIMATE_LABELS) << "Estimate " << estimate_label << " could not be found. Have you defined it?";
      return;
    } else {
      LOG_FINE() << "transform with objective = " << transform_with_jacobian_ << " estimate transform " << estimate->transform_for_objective() << " together = " << !transform_with_jacobian_ && !estimate->transform_for_objective();
      if (!transform_with_jacobian_ && !estimate->transform_for_objective()) {
        LOG_ERROR_P(PARAM_LABEL) << "You have specified a transformation that does not contribute a jacobian, and the prior parameters do not refer to the transformed estimate, in the @estimate" << estimate_label_ << ". This is not advised, and may cause bias estimation. Please address the user manual if you need help";
      }
      if (estimate->transform_with_jacobian_is_defined()) {
        if (transform_with_jacobian_ != estimate->transform_with_jacobian()) {
          LOG_ERROR_P(PARAM_LABEL) << "This parameter is not consistent with the equivalent parameter in the @estimate block " << estimate_label_ << ". please make sure these are both true or both false.";
        }
      }
      estimates_.push_back(estimate);
    }
  }


  // Validate that the parameters sum to one.
  Double total = 0.0;

  for (auto& estimate : estimates_) {
    LOG_FINEST() << "transformation value = " << estimate->value();
    total += estimate->value();
  }
  if (total != 1.0)
    LOG_ERROR_P(PARAM_ESTIMATE_LABELS) << "The estiamtes you supplied to not sum to 1.0, they sum to " << total << ", please check initial values of these parameters";

  // Check that the bounds are sensible
  if (parameters_.Get(PARAM_UPPER_BOUND)->has_been_defined() & parameters_.Get(PARAM_LOWER_BOUND)->has_been_defined()) {
    for (unsigned i = 0; i < estimates_.size(); ++i) {
      if (estimates_[i]->lower_bound() < 0.0 || estimates_[i]->lower_bound() > 1.0)
        LOG_ERROR_P(PARAM_LOWER_BOUND) << "You cannot specify a lower bound less than 0 and greater than 1.0";
      if (estimates_[i]->upper_bound() < 0.0 || estimates_[i]->upper_bound() > 1.0)
        LOG_ERROR_P(PARAM_UPPER_BOUND) << "You cannot specify a upper bound less than 0 and greater than 1.0";
    }
  }
  LOG_MEDIUM() << "total = " << total;

  // Turn off the last estimate
  LOG_FINE() << "Turning off parameter, this won't be estimated, and will be an outcome of other parameters " << estimates_[estimates_.size() - 1]->parameter() << " in the estimation";
  estimates_[estimates_.size() - 1]->set_estimated(false);
  LOG_MEDIUM() << "flagged estimated = " << estimates_[estimates_.size() - 1]->estimated();
}
Exemple #7
0
void Manager::Prepare() {
  LOG_TRACE();
  RunMode::Type run_mode = model_->run_mode();
  bool tabular = model_->global_configuration().print_tabular();
  for (auto report : objects_) {
    if ( (RunMode::Type)(report->run_mode() & run_mode) != run_mode) {
      LOG_FINEST() << "Skipping report: " << report->label() << " because run mode is not right";
      continue;
    }

    if (tabular)
      report->PrepareTabular();
    else
      report->Prepare();
  }
}
Exemple #8
0
/**
 * Build our reports then
 * organise the reports stored in our
 * object list into different containers
 * based on their type.
 */
void Manager::Build() {
  LOG_FINEST() << "objects_.size(): " << objects_.size();
  for (auto report : objects_) {
    report->Build();

    if ((RunMode::Type)(report->run_mode() & RunMode::kInvalid) == RunMode::kInvalid)
      LOG_CODE_ERROR() << "Report: " << report->label() << " has not been properly configured to have a run mode";

    if (report->model_state() != State::kExecute) {
      LOG_FINE() << "Adding report " << report->label() << " to state reports";
      state_reports_[report->model_state()].push_back(report);
    } else {
      LOG_FINE() << "Adding report " << report->label() << " to time step reports";
      time_step_reports_[report->time_step()].push_back(report);
    }
  }
}
Exemple #9
0
void Manager::Validate(Model* model) {
  LOG_TRACE();
  base::Manager<niwa::timesteps::Manager, niwa::TimeStep>::Validate();
  model_ = model;

  // Order our time steps based on the parameter given to the model
  vector<string> time_steps = model->time_steps();
  for(string time_step_label : time_steps) {
    for(auto time_step : objects_) {
      if (time_step->label() == time_step_label) {
        ordered_time_steps_.push_back(time_step);
        break;
      }
    }
  }

  LOG_FINEST() << "ordered_time_steps_.size(): " << ordered_time_steps_.size();
}
/**
 * This method will take the current age population for this category stored
 * in this->data_ and populate this->length_data_ by using the age length
 * proportions generated and stored against the Partition class. The age
 * length proportions are generated during the build phase.
 *
 * @parameter selectivity The selectivity to apply to the age data
 */
void Category::PopulateAgeLengthMatrix(Selectivity* selectivity) {
  LOG_FINEST() << "About to populate the length data for category " << name_ << " in year " << model_->current_year();

  if (selectivity == nullptr)
    LOG_CODE_ERROR() << "selectivity == nullptr";
  if (age_length_ == nullptr)
    LOG_CODE_ERROR() << "In category " << name_ << " there is no age length object to have calculated the age length proportions";
  if (age_length_matrix_.size() == 0)
    LOG_CODE_ERROR() << "No memory has been allocated for the age_length_matrix for category " << name_;

  auto& age_length_proportions = model_->partition().age_length_proportions(name_);
  unsigned year = model_->current_year() - model_->start_year();
  vector<unsigned> length_bins = model_->length_bins();
  unsigned time_step_index = model_->managers().time_step()->current_time_step();

  LOG_FINEST() << "Year: " << year << "; time_step: " << time_step_index << "; length_bins: " << length_bins.size();
  LOG_FINEST() << "Years in proportions: " << age_length_proportions.size();
  LOG_FINEST() << "Timesteps in current year: " << age_length_proportions[year].size();

  if (year > age_length_proportions.size())
    LOG_CODE_ERROR() << "year > age_length_proportions.size()";
  if (time_step_index > age_length_proportions[year].size())
    LOG_CODE_ERROR() << "time_step_index > age_length_proportions[year].size()";
  vector<vector<Double>>& proportions_for_now = age_length_proportions[year][time_step_index];

  unsigned size = model_->length_plus() == true ? model_->length_bins().size() : model_->length_bins().size() - 1;
  LOG_FINEST() << "Calculating age length data";
  for (unsigned age = min_age_; age <= max_age_; ++age) {
    unsigned i = age - min_age_;
    if (i >= proportions_for_now.size())
      LOG_CODE_ERROR() << "i >= proportions_for_now.size()";
    if (i >= data_.size())
      LOG_CODE_ERROR() << "i >= data_.size()";
    if (i >= age_length_matrix_.size())
      LOG_CODE_ERROR() << "(i >= age_length_matrix_.size())";

    vector<Double>& ages_at_length = proportions_for_now[i];

    for (unsigned bin = 0; bin < size; ++bin) {
      if (bin >= age_length_matrix_[i].size())
        LOG_CODE_ERROR() << "bin (" << bin << ") >= age_length_matrix_[i].size(" << age_length_matrix_[i].size() << ")";
      if (bin >= ages_at_length.size())
        LOG_CODE_ERROR() << "bin >= ages_at_length.size()";

      age_length_matrix_[i][bin] = selectivity->GetAgeResult(age, age_length_) * data_[i] * ages_at_length[bin];
    }
  }

  LOG_FINEST() << "Finished populating the length data for category " << name_ << " in year " << model_->current_year();
}
Exemple #11
0
/**
 * Build our partition structure now. This involves getting
 * the category information and building the raw structure.
 *
 * We're not interested in the range of years that each
 * category has because this will be addressed with the
 * accessor objects.
 */
void Partition::Build() {
  Categories* categories                    = model_->categories();
  vector<string> category_names             = categories->category_names();

  for(string category : category_names) {
    LOG_FINEST() << "Adding category " << category << " to the partition";

    partition::Category* new_category = new partition::Category(model_);
    new_category->name_       = category;
    new_category->min_age_    = categories->min_age(category);
    new_category->max_age_    = categories->max_age(category);
    new_category->years_      = categories->years(category);
    new_category->age_length_ = categories->age_length(category);

    unsigned age_spread = (categories->max_age(category) - categories->min_age(category)) + 1;
    new_category->data_.resize(age_spread, 0.0);

    partition_[category] = new_category;
  }
}
/**
 * Build any runtime relationships
 * - Build the partition accessor
 * - Build our list of selectivities
 * - Build our ratios for the number of time steps
 */
void TagLoss::DoBuild() {
  partition_.Init(category_labels_);

  for (string label : selectivity_names_) {
    Selectivity* selectivity = model_->managers().selectivity()->GetSelectivity(label);
    if (!selectivity)
      LOG_ERROR_P(PARAM_SELECTIVITIES) << ": selectivity " << label << " does not exist. Have you defined it?";

    selectivities_.push_back(selectivity);
  }

  /**
   * Organise our time step ratios. Each time step can
   * apply a different ratio of M so here we want to verify
   * we have enough and re-scale them to 1.0
   */
  vector<TimeStep*> time_steps = model_->managers().time_step()->ordered_time_steps();
  LOG_FINEST() << "time_steps.size(): " << time_steps.size();
  vector<unsigned> active_time_steps;
  for (unsigned i = 0; i < time_steps.size(); ++i) {
    if (time_steps[i]->HasProcess(label_))
      active_time_steps.push_back(i);
  }

  if (ratios_.size() == 0) {
    for (unsigned i : active_time_steps)
      time_step_ratios_[i] = 1.0;
  } else {
    if (ratios_.size() != active_time_steps.size())
      LOG_FATAL_P(PARAM_TIME_STEP_RATIO) << " length (" << ratios_.size()
          << ") does not match the number of time steps this process has been assigned to (" << active_time_steps.size() << ")";

    for (Double value : ratios_) {
      if (value < 0.0 || value > 1.0)
        LOG_ERROR_P(PARAM_TIME_STEP_RATIO) << " value (" << value << ") must be between 0.0 (inclusive) and 1.0 (inclusive)";
    }

    for (unsigned i = 0; i < ratios_.size(); ++i)
      time_step_ratios_[active_time_steps[i]] = ratios_[i];
  }
}
Exemple #13
0
/**
 * Execute our maturation rate process.
 */
void TransitionCategory::DoExecute() {
  LOG_TRACE();

  auto from_iter     = from_partition_.begin();
  auto to_iter       = to_partition_.begin();
  Double amount      = 0.0;

  LOG_FINEST() << "transition_rates_.size(): " << transition_rates_.size() << "; from_partition_.size(): " << from_partition_.size()
      << "; to_partition_.size(): " << to_partition_.size();
  if (from_partition_.size() != to_partition_.size()) {
    LOG_FATAL() << "The list of categories for the Transition Category process are not of equal size in year " << model_->current_year()
    << ". We have " << from_partition_.size() << " and " << to_partition_.size() << " categories to transition between";
  }

  if (transition_rates_.size() != from_partition_.size()) {
    LOG_FINE() << "Re-building the transition rates because the partition size has changed";
    transition_rates_.resize(from_partition_.size());
    for (unsigned i = 0; i < transition_rates_.size(); ++i) {
      Double proportion = proportions_.size() > 1 ? proportions_[i] : proportions_[0];
      unsigned min_age   = (*from_iter)->min_age_;

      for (unsigned j = 0; j < (*from_iter)->data_.size(); ++j) {
        transition_rates_[i].push_back(proportion * selectivities_[i]->GetResult(min_age + j, (*from_iter)->age_length_));
        if (selectivities_[i]->GetResult(min_age + j, (*from_iter)->age_length_) > 1.0)
          LOG_ERROR() << " Selectivity result is greater than 1.0, check selectivity";
      }
    }
  }

  for (unsigned i = 0; from_iter != from_partition_.end() && to_iter != to_partition_.end(); ++from_iter, ++to_iter, ++i) {

    for (unsigned offset = 0; offset < (*from_iter)->data_.size(); ++offset) {
      amount = transition_rates_[i][offset] * (*from_iter)->data_[offset];

      (*from_iter)->data_[offset] -= amount;
      (*to_iter)->data_[offset] += amount;
      if ((*from_iter)->data_[offset] < 0.0)
        LOG_FATAL() << "Maturation rate caused a negative partition if ((*from_iter)->data_[offset] < 0.0) ";
    }
  }
}
/**
 * This method is called at the end of a model iteration
 * to calculate the score for the observation.
 */
void ProportionsMigrating::CalculateScore() {
  /**
   * Simulate or generate results
   * During simulation mode we'll simulate results for this observation
   */
  if (model_->run_mode() == RunMode::kSimulation) {
    likelihood_->SimulateObserved(comparisons_);
  } else {
    /**
     * The comparisons are already proportions so the can be sent straight to the likelihood
     */
    for (unsigned year : years_) {
      scores_[year] = likelihood_->GetInitialScore(comparisons_, year);
      likelihood_->GetScores(comparisons_);
      for (obs::Comparison comparison : comparisons_[year]) {
        LOG_FINEST() << "[" << year << "]+ likelihood score: " << comparison.score_;
        scores_[year] += comparison.score_;
      }
    }
  }
}
/**
 * This method collapses the Numbers at length by age matrix to numbers at age for a category
 */
void Category::CollapseAgeLengthDataToLength() {
  LOG_TRACE();

  if (age_length_matrix_.size() == 0)
    LOG_CODE_ERROR() << "if (age_length_matrix_.size() == 0)";

  LOG_FINE() << "age_length_matrix_.size(): " << age_length_matrix_.size();
  LOG_FINE() << "age_length_matrix_[0].size(): " << age_length_matrix_[0].size();
  length_data_.assign(model_->length_bins().size(), 0.0);
  for (unsigned i = 0; i < age_length_matrix_.size(); ++i) {
    for (unsigned j = 0; j < age_length_matrix_[i].size(); ++j) {
      if (j >= length_data_.size())
        LOG_CODE_ERROR() << "j >= length_data_.size()";

      length_data_[j] += age_length_matrix_[i][j];
    }
  }

  for (unsigned i = 0; i < length_data_.size(); ++i)
    LOG_FINEST() << "length_data_[" << i << "]: " << length_data_[i];
}
/**
 * Get the score for this penalty
 *
 * @return Penalty score
 */
Double ElementDifference::GetScore() {
  LOG_TRACE();
  vector<Double> values;
  vector<Double> second_values;
  // first parameter
  if (addressable_vector_ != nullptr)
    values.assign((*addressable_vector_).begin(), (*addressable_vector_).end());
  else if (addressable_ptr_vector_ != nullptr) {
    for (auto ptr : (*addressable_ptr_vector_))
      values.push_back((*ptr));
  } else if (addressable_map_ != nullptr) {
    for (auto iter : (*addressable_map_))
      values.push_back(iter.second);
  } else if (addressable_ != nullptr) {
    values.push_back((*addressable_));
  } else
    LOG_CODE_ERROR() << "(second_addressable_map_ != 0) && (second_addressable_vector_ != 0)";
  // Second parameter
  if (second_addressable_vector_ != nullptr)
    second_values.assign((*second_addressable_vector_).begin(), (*second_addressable_vector_).end());
  else if (second_addressable_ptr_vector_ != nullptr) {
    for (auto ptr : (*second_addressable_ptr_vector_))
      second_values.push_back((*ptr));
  } else if (second_addressable_map_ != nullptr) {
    for (auto iter : (*second_addressable_map_))
      second_values.push_back(iter.second);
  } else if (second_addressable_ != nullptr) {
      second_values.push_back((*second_addressable_));
  } else
    LOG_CODE_ERROR() << "(second_addressable_map_ != 0) && (second_addressable_vector_ != 0)";

  Double score = 0.0;
  LOG_FINEST() << "size of first vector = " << values.size() << " size of second vector";
  for(unsigned i = 0; i < values.size(); ++i)
    score += pow(values[i] - second_values[i], 2);
  return score * multiplier_;
}
Exemple #17
0
/**
 * Execute the time step during the initialisation phases
 */
void TimeStep::ExecuteForInitialisation(const string& phase_label) {
  LOG_FINEST() << "Executing for initialisation phase: " << phase_label << " with " << initialisation_block_executors_.size() << " executors";
  for (unsigned index = 0; index < initialisation_processes_[phase_label].size(); ++index) {
    if (initialisation_mortality_blocks_[phase_label].first == index) {
      for (auto executor : initialisation_block_executors_) {
        executor->PreExecute();
      }
    }

    initialisation_processes_[phase_label][index]->Execute(0u, "");

    if (initialisation_mortality_blocks_[phase_label].second == index) {
      for (auto executor : initialisation_block_executors_) {
        executor->Execute();
      }
    }
  }
  if (initialisation_mortality_blocks_[phase_label].first == processes_.size()){
     for (auto executor : initialisation_block_executors_) {
       executor->PreExecute();
       executor->Execute();
     }
   }
}
Exemple #18
0
/**
 * Execute the time step
 */
void TimeStep::Execute(unsigned year) {
  LOG_TRACE();

  for (auto executor : executors_[year])
      executor->PreExecute();

  for (unsigned index = 0; index < processes_.size(); ++index) {
    if (index == mortality_block_.first) {
      for (auto executor : block_executors_[year])
        executor->PreExecute();
    }

    for(auto executor : process_executors_[year][index])
      executor->PreExecute();

    LOG_FINEST() << "Executing process: " << processes_[index]->label();
    processes_[index]->Execute(year, label_);

    for(auto executor : process_executors_[year][index])
      executor->Execute();

    if (index == mortality_block_.second) {
      for (auto executor : block_executors_[year])
        executor->Execute();
    }
  }
  if (mortality_block_.first == processes_.size()){
    for (auto executor : block_executors_[year]) {
      executor->PreExecute();
      executor->Execute();
    }
  }

  for (auto executor : executors_[year])
    executor->Execute();
}
void ProcessRemovalsByLength::Execute() {
  LOG_TRACE();
  /**
   * Verify our cached partition and partition sizes are correct
   */
//  auto categories = model_->categories();
  unsigned year = model_->current_year();
  unsigned year_index = year - model_->start_year();
  unsigned time_step = model_->managers().time_step()->current_time_step();
  auto cached_partition_iter = cached_partition_->Begin();
  auto partition_iter = partition_->Begin(); // vector<vector<partition::Category> >
  map<unsigned, map<string, map<string, vector<Double>>>> &Removals_at_age = mortality_instantaneous_->catch_at();

  /**
   * Loop through the provided categories. Each provided category (combination) will have a list of observations
   * with it. We need to build a vector of proportions for each length using that combination and then
   * compare it to the observations.
   */
  for (unsigned category_offset = 0; category_offset < category_labels_.size(); ++category_offset, ++partition_iter, ++cached_partition_iter) {
    LOG_FINEST() << "category: " << category_labels_[category_offset];
    Double start_value = 0.0;
    Double end_value = 0.0;
    Double number_at_age = 0.0;

//    LOG_WARNING() << "This is bad code because it allocates memory in the middle of an execute";
    vector<Double> expected_values(number_bins_, 0.0);
    vector<Double> numbers_at_length;
    vector<vector<Double>> age_length_matrix;

    /**
     * Loop through the 2 combined categories building up the
     * expected proportions values.
     */
    auto category_iter = partition_iter->begin();
    auto cached_category_iter = cached_partition_iter->begin();
    for (; category_iter != partition_iter->end(); ++cached_category_iter, ++category_iter) {
//      AgeLength* age_length = categories->age_length((*category_iter)->name_);

//      LOG_WARNING() << "This is bad code because it allocates memory in the middle of an execute";
      age_length_matrix.resize((*category_iter)->data_.size());

      vector<Double> age_frequencies(length_bins_.size(), 0.0);
      const auto& age_length_proportions = model_->partition().age_length_proportions((*category_iter)->name_)[year_index][time_step];

      for (unsigned data_offset = 0; data_offset < (*category_iter)->data_.size(); ++data_offset) {
        unsigned age = ((*category_iter)->min_age_ + data_offset);

        // Calculate the age structure removed from the fishing process
        number_at_age = Removals_at_age[year][method_][(*category_iter)->name_][data_offset];
        LOG_FINEST() << "Numbers at age = " << age << " = " << number_at_age << " start value : " << start_value << " end value : " << end_value;
        // Implement an algorithm similar to DoAgeLengthConversion() to convert numbers at age to numbers at length
        // This is different to DoAgeLengthConversion as this number is now not related to the partition
//        Double mu= (*category_iter)->mean_length_by_time_step_age_[time_step][age];

//        LOG_FINEST() << "mean = " << mu << " cv = " << age_length->cv(year, time_step, age) << " distribution = " << age_length->distribution_label() << " and length plus group = " << length_plus_;
//        age_length->CummulativeNormal(mu, age_length->cv(year, time_step, age), age_frequencies, length_bins_, length_plus_);

//        LOG_WARNING() << "This is bad code because it allocates memory in the middle of an execute";
        age_length_matrix[data_offset].resize(number_bins_);

        // Loop through the length bins and multiple the partition of the current age to go from
        // length frequencies to age length numbers
        for (unsigned j = 0; j < number_bins_; ++j) {
          age_length_matrix[data_offset][j] = number_at_age * age_length_proportions[data_offset][j];
          LOG_FINEST() << "The proportion of fish in length bin : " << length_bins_[j] << " = " << age_frequencies[j];
        }
      }

      if (age_length_matrix.size() == 0)
        LOG_CODE_ERROR()<< "if (age_length_matrix_.size() == 0)";

      numbers_at_length.assign(age_length_matrix[0].size(), 0.0);
      for (unsigned i = 0; i < age_length_matrix.size(); ++i) {
        for (unsigned j = 0; j < age_length_matrix[i].size(); ++j) {
          numbers_at_length[j] += age_length_matrix[i][j];
        }
      }

      for (unsigned length_offset = 0; length_offset < number_bins_; ++length_offset) {
        LOG_FINEST() << " numbers for length bin : " << length_bins_[length_offset] << " = " << numbers_at_length[length_offset];
        expected_values[length_offset] += numbers_at_length[length_offset];

        LOG_FINE() << "----------";
        LOG_FINE() << "Category: " << (*category_iter)->name_ << " at length " << length_bins_[length_offset];
        LOG_FINE() << "start_value: " << start_value << "; end_value: " << end_value << "; final_value: " << numbers_at_length[length_offset];
        LOG_FINE() << "expected_value becomes: " << expected_values[length_offset];
      }
    }

    if (expected_values.size() != proportions_[model_->current_year()][category_labels_[category_offset]].size())
      LOG_CODE_ERROR()<< "expected_values.size(" << expected_values.size() << ") != proportions_[category_offset].size("
      << proportions_[model_->current_year()][category_labels_[category_offset]].size() << ")";

      /**
       * save our comparisons so we can use them to generate the score from the likelihoods later
       */
    for (unsigned i = 0; i < expected_values.size(); ++i) {
      SaveComparison(category_labels_[category_offset], 0, length_bins_[i], expected_values[i], proportions_[model_->current_year()][category_labels_[category_offset]][i],
          process_errors_by_year_[model_->current_year()], error_values_[model_->current_year()][category_labels_[category_offset]][i], 0.0, delta_, 0.0);
    }
  }
}
/**
 * Execute our mortality event object.
 *
 */
void MortalityInitialisationEventBiomass::DoExecute() {
  LOG_TRACE();
  unsigned time_step_index = model_->managers().time_step()->current_time_step();

  // only apply if initialisation phase
  if (model_->state() == State::kInitialise) {
    /**
     * Work our how much of the stock is available or vulnerable to exploit
     */
    Double vulnerable = 0.0;
    unsigned i = 0;
    for (auto categories : partition_) {
      unsigned j = 0;
      //categories->UpdateMeanWeightData();
      for (Double& data : categories->data_) {
        Double temp = data * selectivities_[i]->GetAgeResult(categories->min_age_ + j, categories->age_length_);
        vulnerable += temp * categories->mean_weight_by_time_step_age_[time_step_index][categories->min_age_ + j];
        ++j;
      }
      ++i;
    }
    /**
     * Work out the exploitation rate to remove (catch/vulnerable)
     */
    Double exploitation = 0;
    LOG_FINEST() << "vulnerable biomass = " << vulnerable << " catch = " << catch_;
    exploitation = catch_ / utilities::doublecompare::ZeroFun(vulnerable);
    if (exploitation > u_max_) {
      exploitation = u_max_;
      if (penalty_)
        penalty_->Trigger(label_, catch_, vulnerable*u_max_);

    } else if (exploitation < 0.0) {
      exploitation = 0.0;
    }
    LOG_FINEST() << "; exploitation: " << AS_DOUBLE(exploitation);

    /**
     * Remove the stock now. The amount to remove is
     * vulnerable * exploitation
     */
    // Report catches and exploitation rates for each category for each iteration
/*
    StoreForReport("initialisation_iteration: ", init_iteration_);
    StoreForReport("Exploitation: ", AS_DOUBLE(exploitation));
    StoreForReport("Catch: ", AS_DOUBLE(catch_));
*/
    Double removals =0;
    for (auto categories : partition_) {
      unsigned offset = 0;
      for (Double& data : categories->data_) {
        // report
        removals = vulnerable_[categories->name_][categories->min_age_ + offset] * exploitation;
        //StoreForReport(categories->name_ + "_Removals: ", AS_DOUBLE(removals));
        data -= removals;
        offset++;
      }
    }
    ++init_iteration_;
  }
}
/**
 * Execute this process
 */
void MortalityPreySuitability::DoExecute() {

  // Check if we are executing this process in current year
  if (std::find(years_.begin(), years_.end(), model_->current_year()) != years_.end()) {

    Double TotalPreyVulnerable = 0;
    Double TotalPreyAvailability = 0;
    Double TotalPredatorVulnerable = 0;
    Double SumMortality = 0.0;

    map<string, Double> Vulnerable_by_Prey;
    map<string, Double> Exploitation_by_Prey;

    auto partition_iter = prey_partition_->Begin(); // vector<vector<partition::Category> >

    for (unsigned category_offset = 0; category_offset < prey_category_labels_.size(); ++category_offset, ++partition_iter) {
      /**
        * Loop through the  combined categories building up the prey abundance for each prey category label
        */
       auto category_iter = partition_iter->begin();
       for (; category_iter != partition_iter->end(); ++category_iter) {
         for (unsigned data_offset = 0; data_offset < (*category_iter)->data_.size(); ++data_offset) {
           Double vulnerable = (*category_iter)->data_[data_offset] * prey_selectivities_[category_offset]->GetResult((*category_iter)->min_age_ + data_offset, (*category_iter)->age_length_);
           if (vulnerable <= 0.0)
             vulnerable = 0.0;
           Vulnerable_by_Prey[prey_category_labels_[category_offset]] += vulnerable;
           TotalPreyVulnerable += vulnerable * electivities_[category_offset];
           TotalPreyAvailability += vulnerable;
         }
       }
       LOG_FINEST() << ": Vulnerable abundance for prey category " << prey_category_labels_[category_offset] << " = " << Vulnerable_by_Prey[prey_category_labels_[category_offset]];
    }

    TotalPreyAvailability = dc::ZeroFun(TotalPreyAvailability, ZERO);
    TotalPreyVulnerable = dc::ZeroFun(TotalPreyVulnerable / TotalPreyAvailability, ZERO);

    /*
     * Loop through the predators calculating vulnerable predator abyundance
     */
    auto predator_partition_iter = predator_partition_->Begin();
    for (unsigned category_offset = 0; category_offset < predator_category_labels_.size(); ++category_offset, ++predator_partition_iter) {

      /*
       * Loop through the  combined categories building up the predator abundance for each prey category label
       */
      auto category_iter = predator_partition_iter->begin();
      for (; category_iter != predator_partition_iter->end(); ++category_iter) {
        for (unsigned data_offset = 0; data_offset < (*category_iter)->data_.size(); ++data_offset) {

          Double predator_vulnerable = (*category_iter)->data_[data_offset] * predator_selectivities_[category_offset]->GetResult((*category_iter)->min_age_ + data_offset, (*category_iter)->age_length_);
          if (predator_vulnerable <= 0.0)
            predator_vulnerable = 0.0;

          TotalPredatorVulnerable += predator_vulnerable;
        }
      }
    }
    LOG_FINEST() << ": Total predator abundance = " << TotalPredatorVulnerable;

    /*
     * Work out exploitation rate for each prey category
     */
    for (unsigned category_offset = 0; category_offset < prey_category_labels_.size(); ++category_offset) {
      Double Exploitation = TotalPredatorVulnerable * consumption_rate_ * ((Vulnerable_by_Prey[prey_category_labels_[category_offset]] / TotalPreyAvailability) * electivities_[category_offset])
          / TotalPreyVulnerable;

      if (Exploitation > u_max_) {
        Exploitation = u_max_;

        if (penalty_) // Throw Penalty
          penalty_->Trigger(penalty_label_, Exploitation, (Vulnerable_by_Prey[prey_category_labels_[category_offset]] * u_max_));

      } else if (Exploitation < 0.0)
        Exploitation = 0.0;

      Exploitation_by_Prey[prey_category_labels_[category_offset]] = Exploitation;
      LOG_FINEST() << ": Exploitation rate for prey category " << prey_category_labels_[category_offset] << " = " << Exploitation_by_Prey[prey_category_labels_[category_offset]];

    }

    // removal of prey components using the exploitation rate.
    partition_iter = prey_partition_->Begin();

    for (unsigned category_offset = 0; category_offset < prey_category_labels_.size(); ++category_offset, ++partition_iter) {

       auto category_iter = partition_iter->begin();
       for (; category_iter != partition_iter->end(); ++category_iter) {
         for (unsigned data_offset = 0; data_offset < (*category_iter)->data_.size(); ++data_offset) {

           Double Current = (*category_iter)->data_.size() * prey_selectivities_[category_offset]->GetResult((*category_iter)->min_age_ + data_offset, (*category_iter)->age_length_)
               * Exploitation_by_Prey[prey_category_labels_[category_offset]];
           if (Current <= 0.0) {
             LOG_WARNING() << ": Negative partition create";
             continue;
           }

           // remove abundance
           (*category_iter)->data_[data_offset] -= Current;
           SumMortality += Current;
        }
      }
    }
  } // if (std::find(years_.begin(), years_.end(), model_->current_year()) != years_.end()) {
}
/**
 * Execute Cinitial this code follows from the original CASAL algorithm
 */
void Cinitial::Execute() {
  LOG_TRACE();
  map<string, vector<Double>> category_by_age_total;
  auto partition_iter = partition_->Begin();
  for (unsigned category_offset = 0; category_offset < category_labels_.size(); ++category_offset, ++partition_iter) {
    category_by_age_total[category_labels_[category_offset]].assign((max_age_ - min_age_ + 1), 0.0);
    /**
     * Loop through the  combined categories building up the total abundance for each category label
     */
    auto category_iter = partition_iter->begin();
    for (; category_iter != partition_iter->end(); ++category_iter) {
      for (unsigned data_offset = 0; data_offset < (max_age_ - min_age_ + 1); ++data_offset) {
        unsigned age_offset = (*category_iter)->min_age_ - min_age_;
        // if this category min_age occurs after model min age ignore current age
        if (data_offset < age_offset)
          continue;
        category_by_age_total[category_labels_[category_offset]][data_offset] += (*category_iter)->data_[data_offset - age_offset];
      }
    }
  }
  LOG_TRACE();
  // loop through the category_labels and calculate the cinitial factor, which is the n_ / col_sums
  map<string, vector<Double>> category_by_age_factor;

  for (unsigned category_offset = 0; category_offset < category_labels_.size(); ++category_offset) {
    category_by_age_factor[category_labels_[category_offset]].assign((max_age_ - min_age_ + 1), 0.0);
    for (unsigned data_offset = 0; data_offset < (max_age_ - min_age_ + 1); ++data_offset) {

      if (category_by_age_total[category_labels_[category_offset]][data_offset] == 0.0)
        category_by_age_factor[category_labels_[category_offset]][data_offset] = 1.0;
      else {
        category_by_age_factor[category_labels_[category_offset]][data_offset] = n_[utilities::ToLowercase(category_labels_[category_offset])][data_offset]
            / category_by_age_total[category_labels_[category_offset]][data_offset];
      }
    }
  }
  LOG_TRACE();
  // Now loop through the combined categories multiplying each category by the factory
  // from the combined group it belongs to
  partition_iter = partition_->Begin();
  for (unsigned category_offset = 0; category_offset < category_labels_.size(); ++category_offset, ++partition_iter) {
    /**
     * Loop through the  combined categories building up the total abundance for each category label
     */
    auto category_iter = partition_iter->begin();
    for (; category_iter != partition_iter->end(); ++category_iter) {
      for (unsigned data_offset = 0; data_offset < (max_age_ - min_age_ + 1); ++data_offset) {
        unsigned age_offset = (*category_iter)->min_age_ - min_age_;
        // if this category min_age occurs after model min age ignore this age
        if (data_offset < age_offset)
          continue;
        (*category_iter)->data_[data_offset - age_offset] *= category_by_age_factor[category_labels_[category_offset]][data_offset];
      }
    }
  }
  // Build cache
  LOG_FINEST() << "finished calculating Cinitial";
  cached_partition_->BuildCache();
  // Execute the annual cycle for one year to calculate Cinitial
  timesteps::Manager* time_step_manager = model_->managers().time_step();
  time_step_manager->ExecuteInitialisation(label_, 1);

  // Store that SSB value ssb_offset times in the Cintiial phase GetPhaseIndex
  LOG_FINE() << "derived_ptr_.size(): " << derived_ptr_.size();
  for (auto derived_quantities : derived_ptr_) {
    vector<vector<Double>>& initialisation_values = derived_quantities->initialisation_values();
    unsigned cinit_phase_index = model_->managers().initialisation_phase()->GetPhaseIndex(label_);
    LOG_FINE() << "initialisation_values size: " << initialisation_values.size();
    LOG_FINE() << "ssb_offset: " << ssb_offset_;
    LOG_FINE() << "cinit_phase_index: " << cinit_phase_index;
    LOG_FINE() << "init_values[cinit_phase].size(): " << initialisation_values[cinit_phase_index].size();

    for(unsigned i = 0; i < ssb_offset_; ++i)
      initialisation_values[cinit_phase_index].push_back(*initialisation_values[cinit_phase_index].rbegin());
  }


  // set the partition back to Cinitial state
  auto cached_partition_iter  = cached_partition_->Begin();
  partition_iter = partition_->Begin();
  for (unsigned category_offset = 0; category_offset < category_labels_.size(); ++category_offset, ++partition_iter, ++cached_partition_iter) {
    auto category_iter = partition_iter->begin();
    auto cached_category_iter = cached_partition_iter->begin();
    for (; category_iter != partition_iter->end(); ++cached_category_iter, ++category_iter) {
      (*category_iter)->data_ = (*cached_category_iter).data_;
    }
  }
}
Exemple #23
0
/**
 * Parse the configuration file. Creating objects and loading
 * the parameter objects
 */
void File::Parse() {
  LOG_TRACE();

  if (file_.fail() || !file_.is_open())
    LOG_CODE_ERROR() << "Unable to parse the configuration file because a previous error has not been reported.\nFile: " << file_name_;

  /**
   * Iterate through our file parsing the contents
   */
  string    current_line        = "";
  while (getline(file_, current_line)) {
    ++line_number_;

    if (current_line.length() == 0)
      continue;

    // Handle comments
    HandleComments(current_line);

    if (current_line.length() == 0)
      continue;

    /**
     * Change tabs to spaces, remove any leading/trailing or multiple spaces
     * so we can be sure the input is nicely formatted
     */
    boost::replace_all(current_line, "\t", " ");
    boost::trim_all(current_line);
    LOG_FINEST() << "current_line == '" << current_line << "'";

    /**
     * Now we need to check if this line is an include line for a new
     * file.
     */
    if (current_line.length() > strlen(CONFIG_INCLUDE) + 2) {
      string lower_line = util::ToLowercase(current_line);
      if (current_line.substr(0, strlen(CONFIG_INCLUDE)) == CONFIG_INCLUDE) {
        string include_name = current_line.substr(strlen(CONFIG_INCLUDE));
        LOG_FINEST() << "Loading new configuration file via include " << include_name;

        boost::replace_all(include_name, "\"", "");
        boost::trim_all(include_name);
        File include_file(loader_);

        if (include_name.find('\\') == string::npos && file_name_.find('\\') != string::npos)
          include_name = file_name_.substr(0, file_name_.find_last_of('\\') + 1) + include_name;
        if (include_name.find('/') == string::npos && file_name_.find('/') != string::npos)
          include_name = file_name_.substr(0, file_name_.find_last_of('/') + 1) + include_name;

        if (!include_file.OpenFile(include_name))
          LOG_FATAL() << "At line: " << line_number_ << " of " << file_name_
              << ": Include file '" << include_name << "' could not be opened. Does this file exist?";

        include_file.Parse();
        continue;
      }
    }


    /**
     * At this point everything is standard. We have a simple line of text that we now need to parse. All
     * comments etc have been removed and we've gone through any include_file directives
     */
    FileLine current_file_line;
    current_file_line.file_name_    = file_name_;
    current_file_line.line_number_  = line_number_;
    current_file_line.line_         = current_line;

    loader_.AddFileLine(current_file_line);
  } // while(get_line())
}
Exemple #24
0
/**
 * Calculate the derived quantity value for the
 * state of the model.
 *
 * This class will calculate a value that is the sum total
 * of the population in the model filtered by category and
 * multiplied by the selectivities.
 *
 */
void Biomass::Execute() {
  LOG_TRACE();
  Double value = 0.0;

  if (model_->state() == State::kInitialise) {

    auto iterator = partition_.begin();
    // iterate over each category
    for (unsigned i = 0; i < partition_.size() && iterator != partition_.end(); ++i, ++iterator) {
      (*iterator)->UpdateMeanWeightData();
      for (unsigned j = 0; j < (*iterator)->data_.size(); ++j) {
        unsigned age = (*iterator)->min_age_ + j;
        value += (*iterator)->data_[j] * selectivities_[i]->GetResult(age, (*iterator)->age_length_) * (*iterator)->mean_weight_per_[age];
      }
    }

    unsigned initialisation_phase = model_->managers().initialisation_phase()->current_initialisation_phase();
    if (initialisation_values_.size() <= initialisation_phase)
      initialisation_values_.resize(initialisation_phase + 1);

    Double b0_value = 0;

    if (time_step_proportion_ == 0.0) {
      b0_value = cache_value_;
      initialisation_values_[initialisation_phase].push_back(b0_value);
    } else if (time_step_proportion_ == 1.0) {
      b0_value = value;
      initialisation_values_[initialisation_phase].push_back(b0_value);
    } else if (mean_proportion_method_) {
      b0_value = cache_value_ + ((value - cache_value_) * time_step_proportion_);
      initialisation_values_[initialisation_phase].push_back(b0_value);
    } else {
      b0_value = pow(cache_value_, 1 - time_step_proportion_) * pow(value ,time_step_proportion_);
      initialisation_values_[initialisation_phase].push_back(b0_value);
    }

    // Store b0 or binitial on the model depending on what initialisation phase we are using
    vector<string> init_label = model_->initialisation_phases();
    InitialisationPhase* Init_phase = model_->managers().initialisation_phase()->GetInitPhase(init_label[initialisation_phase]);
    string type = Init_phase->type();
    if (type == PARAM_DERIVED || type == PARAM_ITERATIVE)
      model_->set_b0(label_, b0_value);
    if (type == PARAM_CINITIAL)
      model_->set_binitial(label_, b0_value);

  } else {
    auto iterator = partition_.begin();
    // iterate over each category
    LOG_FINEST() << "Partition size = " << partition_.size();
    for (unsigned i = 0; i < partition_.size() && iterator != partition_.end(); ++i, ++iterator) {
      (*iterator)->UpdateMeanWeightData();

      for (unsigned j = 0; j < (*iterator)->data_.size(); ++j) {
        unsigned age = (*iterator)->min_age_ + j;
        value += (*iterator)->data_[j] * selectivities_[i]->GetResult(age, (*iterator)->age_length_) * (*iterator)->mean_weight_per_[age];
      }
    }

    if (time_step_proportion_ == 0.0)
      values_[model_->current_year()] = cache_value_;
    else if (time_step_proportion_ == 1.0)
      values_[model_->current_year()] = value;
    if (mean_proportion_method_)
      values_[model_->current_year()] = cache_value_ + ((value - cache_value_) * time_step_proportion_);
    else
      values_[model_->current_year()] = pow(cache_value_, 1 - time_step_proportion_) * pow(value ,time_step_proportion_);
  }
  LOG_FINEST() << " Pre Exploitation value " <<  cache_value_ << " Post exploitation " << value << " Final value " << values_[model_->current_year()];
}
/**
 * Build our parameters
 */
void ElementDifference::DoBuild() {
  LOG_TRACE();
  string error = "";
  if (!model_->objects().VerfiyAddressableForUse(second_parameter_, addressable::kLookup, error)) {
    LOG_FATAL_P(PARAM_SECOND_PARAMETER) << "could not be verified for use in additional_prior.element_difference. Error was " << error;
  }
  error = "";
  if (!model_->objects().VerfiyAddressableForUse(parameter_, addressable::kLookup, error)) {
    LOG_FATAL_P(PARAM_PARAMETER) << "could not be verified for use in additional_prior.element_difference. Error was " << error;
  }
  // first parameter
  addressable::Type addressable_type = model_->objects().GetAddressableType(parameter_);
  LOG_FINEST() << "addressable type = " << addressable_type;
  switch(addressable_type) {
    case addressable::kInvalid:
      LOG_CODE_ERROR() << "Invalid addressable type: " << parameter_;
      break;
    case addressable::kMultiple:
      addressable_ptr_vector_ = model_->objects().GetAddressables(parameter_);
      break;
    case addressable::kVector:
      addressable_vector_ = model_->objects().GetAddressableVector(parameter_);
      break;
    case addressable::kUnsignedMap:
      addressable_map_ = model_->objects().GetAddressableUMap(parameter_);
      break;
    case addressable::kSingle:
      addressable_ = model_->objects().GetAddressable(parameter_);
      break;
    default:
      LOG_ERROR() << "The addressable you have provided for use in a additional priors: " << parameter_
        << " is not a type that is supported for vector smoothing additional priors";
      break;
  }
  // Get second parameter estimates
  addressable_type = model_->objects().GetAddressableType(second_parameter_);
  LOG_FINEST() << "addressable type = " << addressable_type;
  switch(addressable_type) {
    case addressable::kInvalid:
      LOG_CODE_ERROR() << "Invalid addressable type: " << second_parameter_;
      break;
    case addressable::kMultiple:
      second_addressable_ptr_vector_ = model_->objects().GetAddressables(second_parameter_);
      break;
    case addressable::kVector:
      second_addressable_vector_ = model_->objects().GetAddressableVector(second_parameter_);
      break;
    case addressable::kUnsignedMap:
      second_addressable_map_ = model_->objects().GetAddressableUMap(second_parameter_);
      break;
    case addressable::kSingle:
      second_addressable_ = model_->objects().GetAddressable(second_parameter_);
      break;
    default:
      LOG_ERROR() << "The addressable you have provided for use in a additional priors: " << second_parameter_
        << " is not a type that is supported for difference element additional priors";
      break;
  }

  // Check the two parameters are the same length
  vector<Double> values;
  vector<Double> second_values;
  // Load first parameter
  if (addressable_vector_ != nullptr)
    values.assign((*addressable_vector_).begin(), (*addressable_vector_).end());
  else if (addressable_ptr_vector_ != nullptr) {
    for (auto ptr : (*addressable_ptr_vector_))
      values.push_back((*ptr));
  } else if (addressable_map_ != nullptr) {
    for (auto iter : (*addressable_map_))
      values.push_back(iter.second);
  } else if (addressable_ != nullptr) {
    values.push_back((*addressable_));
  } else
    LOG_CODE_ERROR() << "(addressable_map_ != 0) && (addressable_vector_ != 0)";
  // Load second parameter
  if (second_addressable_vector_ != nullptr)
    second_values.assign((*second_addressable_vector_).begin(), (*second_addressable_vector_).end());
  else if (second_addressable_ptr_vector_ != nullptr) {
    for (auto ptr : (*second_addressable_ptr_vector_))
      second_values.push_back((*ptr));
  } else if (second_addressable_map_ != nullptr) {
    for (auto iter : (*second_addressable_map_))
      second_values.push_back(iter.second);
  } else if (second_addressable_ != nullptr) {
    second_values.push_back((*second_addressable_));
  } else
    LOG_CODE_ERROR() << "(second_addressable_map_ != 0) && (second_addressable_vector_ != 0) && (second_addressable_ != 0)";

  if(second_values.size() != values.size())
    LOG_ERROR_P(PARAM_SECOND_PARAMETER) << "The parameters are not the same size, which they need to be, the second parameter has " << second_values.size() << " elements where as, the first parameter has " << values.size() << " elements";

}
void ProportionsMigrating::Execute() {
  LOG_TRACE();

  /**
   * Verify our cached partition and partition sizes are correct
   */
  auto cached_partition_iter  = cached_partition_->Begin();
  auto partition_iter         = partition_->Begin(); // vector<vector<partition::Category> >

  /**
   * Loop through the provided categories. Each provided category (combination) will have a list of observations
   * with it. We need to build a vector of proportions for each age using that combination and then
   * compare it to the observations.
   */
  LOG_FINEST() << "Number of categories " << category_labels_.size();
  for (unsigned category_offset = 0; category_offset < category_labels_.size(); ++category_offset, ++partition_iter, ++cached_partition_iter) {
    Double      start_value        = 0.0;
    Double      end_value          = 0.0;


    vector<Double> expected_values(age_spread_, 0.0);
    vector<Double> numbers_age_before((model_->age_spread() + 1), 0.0);
    vector<Double> numbers_age_after((model_->age_spread() + 1), 0.0);

    /**
     * Loop through the 2 combined categories building up the
     * expected proportions values.
     */
    auto category_iter = partition_iter->begin();
    auto cached_category_iter = cached_partition_iter->begin();
    for (; category_iter != partition_iter->end(); ++cached_category_iter, ++category_iter) {
      for (unsigned data_offset = 0; data_offset < (*category_iter)->data_.size(); ++data_offset) {
        // We now need to loop through all ages to apply ageing misclassification matrix to account
        // for ages older than max_age_ that could be classified as an individual within the observation range
        unsigned age = ( (*category_iter)->min_age_ + data_offset);

        start_value   = (*cached_category_iter).data_[data_offset];
        end_value     = (*category_iter)->data_[data_offset];

        numbers_age_before[data_offset] += start_value;
        numbers_age_after[data_offset] += end_value;

        LOG_FINE() << "----------";
        LOG_FINE() << "Category: " << (*category_iter)->name_ << " at age " << age;
        LOG_FINE() << "start_value: " << start_value << "; end_value: " << end_value;
      }
    }

    /*
    *  Apply Ageing error on numbers at age before and after
    */
    if (ageing_error_label_ != "") {
      vector<vector<Double>>& mis_matrix = ageing_error_->mis_matrix();
      vector<Double> temp_before(numbers_age_before.size(), 0.0);
      vector<Double> temp_after(numbers_age_after.size(), 0.0);

      for (unsigned i = 0; i < mis_matrix.size(); ++i) {
        for (unsigned j = 0; j < mis_matrix[i].size(); ++j) {
          temp_before[j] += numbers_age_before[i] * mis_matrix[i][j];
          temp_after[j] += numbers_age_after[i] * mis_matrix[i][j];
        }
      }
      numbers_age_before = temp_before;
      numbers_age_after = temp_after;
    }


    /*
     *  Now collapse the number_age into out expected values
     */
    Double plus_before = 0, plus_after = 0;
    for (unsigned k = 0; k < numbers_age_before.size(); ++k) {
      // this is the difference between the
      unsigned age_offset = min_age_ - model_->min_age();
      if (numbers_age_before[k] > 0) {
        if (k >= age_offset && (k - age_offset + min_age_) <= max_age_) {
          expected_values[k - age_offset] = (numbers_age_before[k] - numbers_age_after[k]) / numbers_age_before[k];
          LOG_FINEST() << "Numbers before migration = " << numbers_age_before[k] << " numbers after migration = " << numbers_age_after[k]
                   << " proportion migrated = " <<   expected_values[k - age_offset];
        }
        if (((k - age_offset + min_age_) > max_age_) && age_plus_) {
          plus_before += numbers_age_before[k];
          plus_after += numbers_age_after[k];
        }
      } else {
          if (k >= age_offset && (k - age_offset + min_age_) <= max_age_)
            expected_values[k] = 0;
          if (((k - age_offset + min_age_) > max_age_) && age_plus_) {
            plus_before += 0;
            plus_after += 0;
          }
      }
    }
    LOG_FINEST() << "Plus group before migration = " << plus_before << " Plus group after migration = " << plus_after;
    if (age_plus_)
      expected_values[age_spread_ - 1] = (plus_before - plus_after) / plus_before;


    if (expected_values.size() != proportions_[model_->current_year()][category_labels_[category_offset]].size())
      LOG_CODE_ERROR() << "expected_values.size(" << expected_values.size() << ") != proportions_[category_offset].size("
        << proportions_[model_->current_year()][category_labels_[category_offset]].size() << ")";

    /**
     * save our comparisons so we can use them to generate the score from the likelihoods later
     */

    for (unsigned i = 0; i < expected_values.size(); ++i) {
      LOG_FINEST() << " Numbers at age " << min_age_ + i << " = " << expected_values[i];
      SaveComparison(category_labels_[category_offset], min_age_ + i ,0.0 ,expected_values[i], proportions_[model_->current_year()][category_labels_[category_offset]][i],
          process_errors_by_year_[model_->current_year()], error_values_[model_->current_year()][category_labels_[category_offset]][i], delta_, 0.0);
    }
  }
}