3 #include <torch/csrc/WindowsTorchApiMacro.h> 13 #ifndef DOXYGEN_SHOULD_SKIP_THIS 25 #endif // DOXYGEN_SHOULD_SKIP_THIS 42 void add_parameters(
const std::vector<Tensor>& parameters);
45 virtual void zero_grad();
48 const std::vector<Tensor>& parameters()
const noexcept;
51 std::vector<Tensor>& parameters() noexcept;
54 size_t size()
const noexcept;
69 if (buffers.size() <= index) {
70 const auto old_size = buffers.size();
71 buffers.resize(index + 1);
72 std::fill(buffers.begin() + old_size, buffers.end(),
T{0});
74 return buffers[index];
80 Tensor& buffer_at(std::vector<Tensor>& buffers,
size_t index);
102 using detail::OptimizerBase::OptimizerBase;
103 virtual void step() = 0;
114 using detail::OptimizerBase::OptimizerBase;
std::vector< Tensor > parameters_
The parameters this optimizer optimizes.
Optimizer that defines a required step() method that takes no arguments and produces no values...
T & buffer_at(std::vector< T > &buffers, size_t index)
Accesses a buffer at the given index.
Optimizer that requires the loss function to be supplied to the step() function, as it may evaluate t...
Flush-To-Zero and Denormals-Are-Zero mode.
Base class for all optimizers, that does not yet define a step() mechanism.
std::function< Tensor()> LossClosure
A loss function closure, which is expected to return the loss value.