2 #include <torch/csrc/autograd/functions/accumulate_grad.h> 3 #include <torch/csrc/autograd/functions/basic_ops.h> 4 #include <torch/csrc/autograd/functions/tensor.h> 5 #include <torch/csrc/autograd/functions/pybind.h> 6 #include <torch/csrc/autograd/python_cpp_function.h> 7 #include <torch/csrc/autograd/generated/python_functions.h> 8 #include <torch/csrc/jit/python_tracer.h> 9 #include <torch/csrc/utils/pybind.h> 10 #include <torch/csrc/utils/tuple_parser.h> 21 parser.parse(msg,
"msg");
22 parser.parse(num_inputs,
"num_inputs");
29 Function* operator()(PyObject* args) {
30 throw std::runtime_error(
"Cannot construct");
34 template<
typename C,
typename T>
35 static void addClass(PyObject* module, PyTypeObject& type,
const char* name,
36 PyGetSetDef* function_properties=
nullptr, PyMethodDef* function_methods=
nullptr)
38 createForwardFunctionPyTypeObject<T>(type, name, function_properties, function_methods);
40 PyModule_AddObject(module, name, (PyObject*)&type);
41 registerCppFunction(
typeid(
C), &type);
44 template<
typename T,
typename ValueT,
typename ParamsT, ValueT ParamsT::*ptr,
45 typename ConvertArgT, PyObject* (*Convert)(ConvertArgT)>
46 PyObject* getTupleAttr(PyObject* obj,
void* _unused)
50 auto& arr = ((
T*)(self->cdata.get()))->*ptr;
51 auto num_elems = arr.size();
53 if (!py_tuple)
return nullptr;
54 for (
size_t i = 0; i < num_elems; ++i) {
55 PyTuple_SET_ITEM(py_tuple.get(), i, Convert(arr[i]));
57 return py_tuple.release();
61 template<
typename T,
typename ValueT,
typename ParamsT, ValueT ParamsT::*ptr,
62 typename ConvertArgT, PyObject* (*Convert)(ConvertArgT)>
63 PyObject* getValueAttr(PyObject* obj,
void* _unused)
67 auto& val = ((
T*)(self->cdata.get()))->*ptr;
72 static PyObject* accumulateGradVar(PyObject *_self,
void* _unused)
76 return THPVariable_Wrap(grad_acc->variable);
79 static struct PyGetSetDef accumulate_grad_properties[] = {
80 THP_FUNCTION_DEFAULT_PROPERTIES,
81 {(
char*)
"variable", accumulateGradVar,
nullptr,
nullptr,
nullptr},
85 void THPAutograd_initFunctions()
87 THPObjectPtr module(PyModule_New(
"torch._C._functions"));
90 static PyTypeObject AccumulateGradClass;
91 addClass<AccumulateGrad, NoCtor>(module, AccumulateGradClass,
"AccumulateGrad", accumulate_grad_properties);
93 static PyTypeObject ErrorClass;
94 addClass<Error, NoCtor>(module, ErrorClass,
"Error");
96 static PyTypeObject NotImplementedClass;
97 addClass<NotImplemented, NoCtor>(module, NotImplementedClass,
"NotImplemented");
99 static PyTypeObject DelayedErrorClass;
100 addClass<DelayedError, DelayedErrorCtor>(module, DelayedErrorClass,
"DelayedError");
102 static PyTypeObject CopyBackwardsClass;
103 addClass<CopyBackwards, NoCtor>(module, CopyBackwardsClass,
"CopyBackwards");
105 static PyTypeObject CopySlicesClass;
106 addClass<CopySlices, NoCtor>(module, CopySlicesClass,
"CopySlices");
108 generated::initialize_autogenerated_functions();
110 auto c_module =
THPObjectPtr(PyImport_ImportModule(
"torch._C"));
114 if (PyModule_AddObject(c_module,
"_functions", module) < 0) {