Exemplo n.º 1
0
	void backward_propagation::set_input_configuration_specific(const std::map<std::string, layer_configuration_specific>& input_configuration_specific_map)
	{
		bool same_input_config = true;
		std::map<std::string, layer_configuration_specific> input_configuration_specific_map_filtered;
		for(std::map<std::string, layer_configuration_specific>::const_iterator it = input_configuration_specific_map.begin(); it != input_configuration_specific_map.end(); ++it)
		{
			if (data_layer_names.find(it->first) == data_layer_names.end())
				continue;

			input_configuration_specific_map_filtered.insert(*it);

			std::map<std::string, layer_configuration_specific>::const_iterator it2 = layer_config_map.find(it->first);
			if ((it2 == layer_config_map.end()) || (it->second != it2->second))
			{
				same_input_config = false;
			}
		}
		if (same_input_config)
			return;

		layer_config_map = schema->get_layer_configuration_specific_map(input_configuration_specific_map_filtered);

		if (debug->is_debug())
		{
			boost::filesystem::ofstream out(debug->get_path_to_unique_file("backward_prop_schema_with_feature_map_configs", "gv"), std::ios_base::out | std::ios_base::trunc);
			this->schema->write_gv(out, layer_config_map, cumulative_tiling_factor_map);
		}

		update_flops();

		layer_config_map_modified();
	}
Exemplo n.º 2
0
	void network_updater::set_input_configuration_specific(const layer_configuration_specific& input_configuration_specific)
	{
		if ((layer_config_list.size() > 0) && (layer_config_list[0] == input_configuration_specific))
			return;

		layer_config_list = schema->get_layer_configuration_specific_list(input_configuration_specific);

		update_flops();

		layer_config_list_modified();
	}