void network_data_initializer::initialize(
		layer_data_list& data_list,
		const network_schema& schema)
	{
		std::vector<layer::const_ptr> layer_list = schema.get_layers();
		for(int i = 0; i < layer_list.size(); ++i)
		{
			float weight_multiplier = 1.0F;
			if (layer_list[i]->get_type_name() == rectified_linear_layer::layer_type_name)
			{
				weight_multiplier *= sqrtf(2.0F);
			}
			if (layer_list[i]->get_type_name() == parametric_rectified_linear_layer::layer_type_name)
			{
				layer_data::ptr data = data_list.find(layer_list[i]->instance_name);
				float a = std::accumulate(data->at(0).begin(), data->at(0).end(), 0.0F) / static_cast<float>(data->at(0).size());
				weight_multiplier *= sqrtf(2.0F / (1.0F + a * a));
			}
			if (layer_list[i]->get_type_name() == add_layer::layer_type_name)
			{
				nnforge_shared_ptr<const add_layer> layer_derived = nnforge_dynamic_pointer_cast<const add_layer>(layer_list[i]);
				weight_multiplier *= 1.0F / std::max(static_cast<int>(layer_list[i]->input_layer_instance_names.size()), 1) / layer_derived->alpha;
			}

			if ((weight_multiplier != 1.0F) && (!layer_list[i]->input_layer_instance_names.empty()))
			{
				for(std::vector<std::string>::const_iterator it = layer_list[i]->input_layer_instance_names.begin(); it != layer_list[i]->input_layer_instance_names.end(); ++it)
				{
					layer::const_ptr previous_layer = schema.get_layer(*it);
					if ((previous_layer->get_type_name() == convolution_layer::layer_type_name) || (previous_layer->get_type_name() == sparse_convolution_layer::layer_type_name))
					{
						layer_data::ptr data = data_list.find(previous_layer->instance_name);
						std::vector<float>::iterator it_start = data->at(0).begin();
						std::vector<float>::iterator it_end = data->at(0).end();
						for(std::vector<float>::iterator it = it_start; it != it_end; ++it)
							*it *= weight_multiplier;
					}
				}
			}
		}
	}
	void validate_progress_network_data_pusher::push(
		const training_task_state& task_state,
		const network_schema& schema)
	{
		if ((task_state.get_current_epoch() % report_frequency) == 0)
		{
			forward_prop->set_data(*task_state.data);

			neuron_value_set_data_bunch_writer writer;
			forward_propagation::stat st = forward_prop->run(*reader, writer);

			forward_prop->clear_data();

			unsigned int last_index = static_cast<unsigned int>(task_state.history.size()) - 1;

			std::cout << "----- Validating -----" << std::endl;
			std::cout << st << std::endl;

			for(std::map<std::string, std::pair<layer_configuration_specific, neuron_value_set::ptr> >::const_iterator it = writer.layer_name_to_config_and_value_set_map.begin(); it != writer.layer_name_to_config_and_value_set_map.end(); ++it)
				std::cout << schema.get_layer(it->first)->get_string_for_average_data(it->second.first, *it->second.second->get_average()) << std::endl;
		}
	}
	void report_progress_network_data_pusher::push(
		const training_task_state& task_state,
		const network_schema& schema)
	{
		unsigned int last_index = static_cast<unsigned int>(task_state.history.size()) - 1;

		std::cout << "----- Training -----" << std::endl;
		std::cout << task_state.history[last_index].first << std::endl;
		if (!task_state.comments[last_index].empty())
			std::cout << task_state.comments[last_index] << std::endl;

		std::cout << "Avg [rate weights updates]";
		std::vector<std::string> data_name_list = task_state.data->data_list.get_data_layer_name_list();
		for(std::vector<std::string>::const_iterator it = data_name_list.begin(); it != data_name_list.end(); ++it)
		{
			layer_data::ptr layer_data = task_state.data->data_list.get(*it);
			if (!layer_data->empty())
			{
				std::cout << ", " << *it;
				const std::vector<float>& absolute_updates = task_state.history[last_index].first.average_absolute_updates.find(*it)->second;
				for(int part_id = 0; part_id < layer_data->size(); ++part_id)
				{
					const std::vector<float>& weights = layer_data->at(part_id);
					double sum = 0.0;
					for(std::vector<float>::const_iterator it = weights.begin(); it != weights.end(); ++it)
						sum += static_cast<double>(fabsf(*it));
					float avg_weight = static_cast<float>(sum) / static_cast<float>(weights.size());

					std::cout << (boost::format(" [%|1$.2e| %|2$.2e| %|3$.2e|]") % (absolute_updates[part_id] / avg_weight) % avg_weight % absolute_updates[part_id]); 
				}
			}
		}
		std::cout << std::endl;

		for(std::map<std::string, std::pair<layer_configuration_specific, nnforge_shared_ptr<std::vector<float> > > >::const_iterator it = task_state.history[last_index].second.begin(); it != task_state.history[last_index].second.end(); ++it)
			std::cout << schema.get_layer(it->first)->get_string_for_average_data(it->second.first, *it->second.second) << std::endl;
	}