1 #include <torch/csrc/autograd/functions/accumulate_grad.h> 3 #include <torch/csrc/autograd/grad_mode.h> 4 #include <torch/csrc/autograd/variable.h> 5 #include <torch/csrc/autograd/functions/basic_ops.h> 6 #include <torch/csrc/autograd/functions/tensor.h> 7 #include <torch/csrc/autograd/functions/utils.h> 15 namespace torch {
namespace autograd {
19 AccumulateGrad::AccumulateGrad(Variable variable_)
20 : Function(UINT64_MAX)
21 , variable(
std::move(variable_)) {
22 add_input_metadata(variable);
25 auto AccumulateGrad::apply(variable_list&& grads) -> variable_list {
27 check_input_variables(
"AccumulateGrad", grads, 1, 0);
29 if (!grads[0].defined())
31 if (variable.grad_fn())
32 throw std::logic_error(
"leaf variable has been moved into the graph interior");
33 if (!variable.requires_grad())
36 auto new_grad = std::move(grads[0]);
37 for (
auto& hook : variable.hooks()) {
38 new_grad = (*hook)({new_grad})[0];
42 if (!grad.defined()) {
44 if (!GradMode::is_enabled()
45 && !new_grad.is_sparse()
46 && new_grad.is_contiguous()
47 && new_grad.use_count() <= 1 + !post_hooks().empty()) {
55 variable.grad() = new_grad.detach();
57 variable.grad() = new_grad.clone();
59 }
else if (!GradMode::is_enabled()) {
60 Variable& grad_variable = as_variable_ref(grad);
66 if (grad_variable.is_sparse() && !new_grad.is_sparse()) {
67 grad_variable.set_data(new_grad.data() + grad_variable.data());
69 grad_variable.data() += new_grad.data();
72 variable.grad() = grad + new_grad;
75 return variable_list();