shared_ptr<Node> op::Divide::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 2) { throw ngraph_error("Incorrect number of new arguments"); } return make_shared<Divide>(new_args.at(0), new_args.at(1)); }
void op::Softmax::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) { auto delta = deltas.at(0); auto z = delta * shared_from_this(); auto zsum = make_shared<op::Sum>(z, m_axes); Shape shape; for (size_t i = 0; i < get_shape().size(); ++i) { if (m_axes.find(i) == m_axes.end()) { shape.push_back(get_shape()[i]); } else { shape.push_back(1); } } AxisVector order(zsum->get_shape().size()); iota(order.begin(), order.end(), 0); auto zreshape = make_shared<op::Reshape>(zsum, order, shape); auto adjoint = z - builder::make_with_numpy_broadcast<op::Multiply>(shared_from_this(), zreshape); auto x = get_argument(0); adjoints.add_delta(x, adjoint); }
shared_ptr<Node> op::ConvolutionBias::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 3) { throw ngraph_error("Incorrect number of new arguments"); } return shared_ptr<Node>(new ConvolutionBias(new_args.at(0), new_args.at(1), new_args.at(2), get_window_movement_strides(), get_window_dilation_strides(), get_padding_below(), get_padding_above(), get_data_dilation_strides())); }
template<class NodeVector> void FileSystem::debugPrintNodes(NodeVector nodes) { if( debug) { unsigned int ix; NodeInfo* node; for (ix = 0; ix < nodes.size(); ++ix) { node = nodes.at(ix); cout << "Node: " << node->getName() << "\t\tSize: " << node->getSize() << "\tModify: " << node->getModifyTime() << "\tPath: " << node->getPath() << "\tSimilars: "; vector<NodeInfo*>::iterator it; vector<NodeInfo*> nodes = node->getSimilar(); for(it=nodes.begin(); it != nodes.end(); ++it) { cout << (*it)->getPath() << ", "; } cout << endl; } } }
void op::Cos::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) { auto delta = deltas.at(0); auto x = get_argument(0); adjoints.add_delta(x, -delta * (make_shared<op::Sin>(x))); }
shared_ptr<Node> op::Softmax::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 1) { throw ngraph_error("Incorrect number of new arguments"); } return make_shared<Softmax>(new_args.at(0), m_axes); }
shared_ptr<Node> op::Result::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 1) { throw ngraph_error("Incorrect number of new arguments"); } if (new_args.at(0)->get_outputs().size() != 1) { throw ngraph_error("Expected a single-output argument"); } auto res = make_shared<Result>(new_args.at(0)); res->set_needs_copy(m_needs_copy); res->set_needs_default_layout(m_needs_default_layout); return res; }
shared_ptr<Node> op::GetOutputElement::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 1) { throw ngraph_error("Incorrect number of new arguments"); } return make_shared<GetOutputElement>(new_args.at(0), m_n); }
shared_ptr<Node> op::ConvolutionBiasBackpropFiltersBias::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 2) { throw ngraph_error("Incorrect number of new arguments"); } return make_shared<ConvolutionBiasBackpropFiltersBias>(new_args.at(0), m_filters_shape, m_bias_shape, new_args.at(1), m_window_movement_strides_forward, m_window_dilation_strides_forward, m_padding_below_forward, m_padding_above_forward, m_data_dilation_strides_forward); }
shared_ptr<Node> op::OneHot::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 1) { throw ngraph_error("Incorrect number of new arguments"); } return make_shared<OneHot>(new_args.at(0), m_shape, m_one_hot_axis); }
void op::Divide::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) { auto delta = deltas.at(0); auto x = get_argument(0); auto y = get_argument(1); adjoints.add_delta(x, delta / y); adjoints.add_delta(y, -delta * shared_from_this() / y); }
shared_ptr<Node> op::ReduceWindow::copy_with_new_args(const NodeVector& new_args) const { if (new_args.size() != 2) { throw ngraph_error("Incorrect number of new arguments"); } auto node = make_shared<ReduceWindow>(new_args.at(0), new_args.at(1), m_reduction_function, m_window_shape, m_window_movement_strides); node->m_reduction_function = clone_function(*m_reduction_function); return node; }
void op::ConvolutionBias::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) { auto delta = deltas.at(0); auto data = get_argument(0); const auto data_shape = data->get_shape(); auto filter = get_argument(1); const auto filter_shape = filter->get_shape(); auto bias = get_argument(2); const auto bias_shape = bias->get_shape(); // using regular convolution backprop for data adjoints.add_delta(data, make_shared<op::ConvolutionBackpropData>(data_shape, filter, delta, m_window_movement_strides, m_window_dilation_strides, m_padding_below, m_padding_above, m_data_dilation_strides)); auto filter_bias_backprop = make_shared<op::ConvolutionBiasBackpropFiltersBias>(data, filter_shape, bias_shape, delta, m_window_movement_strides, m_window_dilation_strides, m_padding_below, m_padding_above, m_data_dilation_strides); auto filter_delta = make_shared<op::GetOutputElement>(filter_bias_backprop, 0); auto bias_delta = make_shared<op::GetOutputElement>(filter_bias_backprop, 1); adjoints.add_delta(filter, filter_delta); adjoints.add_delta(bias, bias_delta); }
void op::Result::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) { auto delta = deltas.at(0); adjoints.add_delta(get_argument(0), delta); }
void op::GetOutputElement::generate_adjoints(autodiff::Adjoints& adjoints, const NodeVector& deltas) { auto delta = deltas.at(0); adjoints.add_delta(get_inputs().at(0).get_output().get_node(), delta, get_n()); }