1 #include <torch/csrc/autograd/saved_variable.h> 3 #include <torch/csrc/autograd/edge.h> 4 #include <torch/csrc/autograd/function.h> 5 #include <torch/csrc/autograd/variable.h> 7 #include <ATen/Tensor.h> 13 namespace torch {
namespace autograd {
15 SavedVariable::SavedVariable(
const Variable& variable,
bool is_output) {
16 if (variable.defined()) {
17 was_default_constructed_ =
false;
18 output_nr_ = variable.output_nr();
19 requires_grad_ = variable.requires_grad();
20 has_grad_fn_ = !variable.is_leaf();
23 data_ = variable.data();
24 if (variable.is_leaf()) {
25 grad_accumulator_ = variable.grad_accumulator();
26 }
else if (!is_output) {
27 grad_fn_ = variable.grad_fn();
29 version_counter_ = variable.version_counter();
30 saved_version_ = version_counter_.current_version();
35 if (!data_.defined()) {
36 if (!was_default_constructed_) {
37 throw std::runtime_error(ERR_BACKWARD_TWICE);
42 if (saved_version_ != version_counter_.current_version()) {
43 throw std::runtime_error(
44 "one of the variables needed for gradient computation has been " 45 "modified by an inplace operation");
48 auto grad_fn = grad_fn_;
49 if (has_grad_fn_ && !grad_fn) {
53 throw std::runtime_error(
"No grad_fn for non-leaf saved variable");
55 grad_fn = std::move(saved_for);
63 var = make_variable(data_,
Edge(std::move(grad_fn), output_nr_));
65 var = make_variable(data_, requires_grad_);
67 var.set_version_counter(saved_version_);
73 if (requires_grad_ && !var.
grad_fn() && grad_accumulator_.expired())
74 throw std::logic_error(
"No grad accumulator for a saved leaf!");
80 const char* ERR_BACKWARD_TWICE =
81 "Trying to backward through the graph a second time, but the buffers have " 82 "already been freed. Specify retain_graph=True when calling backward " void set_grad_accumulator(std::weak_ptr< Function > grad_accumulator)
Set the gradient accumulator of the Variable.
Represents a particular input of a function.
Variable unpack(std::shared_ptr< Function > saved_for=nullptr) const
Reconstructs the saved variable.
Variable A Variable augments a Tensor with the ability to interact in our autograd machinery...
const std::shared_ptr< Function > & grad_fn() const
Gets the gradient function of the Variable.