boost::optional<reaction_task_t> find_automatic_reaction_task(const settler_ai_t &ai) { if (automatic_reactions.empty()) return boost::optional<reaction_task_t>{}; boost::optional<reaction_task_t> result; // Iterate through available reactions for (auto outerit=automatic_reactions.begin(); outerit != automatic_reactions.end(); ++outerit) { // Is the workshop busy? auto busy_finder = workshop_claimed.find(outerit->first); if (busy_finder == workshop_claimed.end()) { // Iterate available automatic reactions for (const std::string &reaction_name : outerit->second) { auto reaction = reaction_defs.find(reaction_name); if (reaction != reaction_defs.end()) { // Is the settler allowed to do this? int target_category = -1; if (reaction->second.skill == "Carpentry") { target_category = JOB_CARPENTRY; } else if (reaction->second.skill == "Masonry") { target_category = JOB_MASONRY; } if (target_category == -1 || ai.permitted_work[target_category]) { // Are the inputs available? bool available = true; std::vector<std::pair<std::size_t,bool>> components; for (auto &input : reaction->second.inputs) { const int n_available = available_items_by_reaction_input(input); if (n_available < input.quantity) { available = false; } else { // Claim an item and push its # to the list std::size_t item_id = claim_item_by_reaction_input(input); components.push_back(std::make_pair(item_id,false)); } } if (available) { // Components are available, build job and return it result = reaction_task_t{outerit->first, reaction->second.name, reaction->second.tag, components}; workshop_claimed.insert(outerit->first); return result; } else { for (auto comp : components) { unclaim_by_id(comp.first); } } } } } } } return result; }
void Restaurant::FillInPredictives(const std::vector<double>& parent_predictives, int type, const LambdaManagerInterface& lmanager, int depth, const HPYParameter& hpy_parameter, std::vector<double>& predictives) const { auto floor_it = floor2c_t_.begin(); auto type_it = type2internal_.find(type); predictives[0] = parent_predictives[0]; if (floor2c_t_.empty()) { for (size_t i = 1; i < predictives.size(); ++i) { double lambda = lmanager.lambda(i, depth); predictives[i] = lambda * parent_predictives[i] + (1 - lambda) * predictives[0]; } return; } if ((*floor_it).first == 0) { auto& c_t = (*floor_it).second; predictives[0] *= (hpy_parameter.concentration(depth, 0) + hpy_parameter.discount(depth, 0) * c_t.second) / (c_t.first + hpy_parameter.concentration(depth, 0)); if (type_it != type2internal_.end()) { auto& sections = (*type_it).second.sections_; if (!sections.empty()) { auto section_begin = sections.begin(); if ((*section_begin).first == 0) { int cw = (*section_begin).second.customers; int tw = (*section_begin).second.tables; predictives[0] += (cw - hpy_parameter.discount(depth, 0) * tw) / (c_t.first + hpy_parameter.concentration(depth, 0)); } } } } for (size_t i = 1; i < predictives.size(); ++i) { double lambda = lmanager.lambda(i, depth); predictives[i] = lambda * parent_predictives[i] + (1 - lambda) * predictives[0]; } if ((*floor_it).first == 0) { ++floor_it; } for (; floor_it != floor2c_t_.end(); ++floor_it) { auto floor_id = (*floor_it).first; auto& c_t = (*floor_it).second; tmp_c_[floor_id] = c_t.first; predictives[floor_id] *= (hpy_parameter.concentration(depth, floor_id) + hpy_parameter.discount(depth, floor_id) * c_t.second) / (c_t.first + hpy_parameter.concentration(depth, floor_id)); } if (type_it == type2internal_.end()) return; auto& sections = (*type_it).second.sections_; auto section_it = sections.begin(); if (section_it != sections.end() && (*section_it).first == 0) { ++section_it; } for (; section_it != sections.end(); ++section_it) { auto floor_id = (*section_it).first; auto& section = (*section_it).second; int cw = section.customers; int tw = section.tables; predictives[floor_id] += (cw - hpy_parameter.discount(depth, floor_id) * tw) / (tmp_c_[floor_id] + hpy_parameter.concentration(depth, floor_id)); } }