1 #include <torch/csrc/autograd/python_legacy_variable.h> 5 #include <torch/csrc/Exceptions.h> 6 #include <torch/csrc/autograd/python_function.h> 7 #include <torch/csrc/autograd/python_variable.h> 8 #include <torch/csrc/tensor/python_tensor.h> 9 #include <torch/csrc/jit/tracer.h> 13 namespace torch {
namespace autograd {
15 static PyObject *THPVariable_pynew(PyTypeObject* type, PyObject *args, PyObject *kwds) {
18 PyObject *data =
nullptr;
19 PyObject *grad_fn =
nullptr;
22 const char* name =
nullptr;
24 const char *accepted_args[] = {
"data",
"requires_grad",
"volatile",
"_grad_fn",
"name",
nullptr};
25 if (!PyArg_ParseTupleAndKeywords(args, kwds,
"|ObbOz", (
char**)accepted_args,
26 &data, &requires_grad, &is_volatile, &grad_fn, &name))
29 if (grad_fn == Py_None)
33 PyErr_WarnEx(PyExc_UserWarning,
34 "volatile was removed and now has no effect. Use `with torch.no_grad():` " 38 if (is_volatile && requires_grad) {
39 throw ValueError(
"Variable can't be volatile and require_grad at the same time!");
41 if (grad_fn && !THPFunction_Check(grad_fn)) {
42 throw TypeError(
"_grad_fn has to be a Function object or None, but got %s",
43 Py_TYPE(grad_fn)->tp_name);
46 if (!data || data == Py_None) {
49 auto var = at::empty({0}, torch::tensors::get_default_tensor_type().options());
50 tensor =
static_cast<Variable&
>(var).data();
51 }
else if (THPVariable_Check(data)) {
55 Py_TYPE(data)->tp_name);
60 auto grad_fn_ = THPFunction_asFunction((
THPFunction*)grad_fn);
61 Edge edge(grad_fn_, grad_fn_->add_input_metadata(tensor));
62 var = make_variable(std::move(tensor), std::move(edge));
64 var = make_variable(std::move(tensor), requires_grad);
71 if (jit::tracer::isTracing() && data && data != Py_None && THPVariable_Check(data)) {
72 if (
auto *v = jit::tracer::getValueTrace(((
THPVariable*)data)->cdata)) {
73 jit::tracer::setValueTrace(var, v);
77 return THPVariable_Wrap(std::move(var));
81 PyTypeObject THPLegacyVariableType = {
82 PyVarObject_HEAD_INIT(
nullptr, 0)
83 "torch._C._LegacyVariableBase",
101 Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
122 void init_legacy_variable(PyObject *module) {
123 if (PyType_Ready(&THPLegacyVariableType) < 0) {
126 auto obj = (PyObject*)&THPLegacyVariableType;
128 if (PyModule_AddObject(module,
"_LegacyVariableBase", obj) < 0) {
TensorOptions requires_grad(bool requires_grad=true)
Convenience function that returns a TensorOptions object with the requires_grad set to the given one...
Flush-To-Zero and Denormals-Are-Zero mode.