Caffe2 - Python API
A deep learning, cross platform ML framework
Packages
Classes
Files
C++ API
Python API
GitHub
File List
torch
backends
cuda
__init__.py
1
import
sys
2
import
torch
3
4
5
class
ContextProp
(object):
6
def
__init__(self, getter, setter):
7
self.
getter
= getter
8
self.
setter
= setter
9
10
def
__get__(self, obj, objtype):
11
return
self.
getter
()
12
13
def
__set__(self, obj, val):
14
if
isinstance(self.
setter
, str):
15
raise
RuntimeError(self.
setter
)
16
self.
setter
(val)
17
18
19
class
cuFFTPlanCache
(object):
20
size =
ContextProp
(torch._cufft_get_plan_cache_size,
21
'cufft_plan_cache.size is a read-only property showing the current cache. '
22
'To set the cache capacity, use cufft_plan_cache.max_size.'
)
23
max_size =
ContextProp
(torch._cufft_get_plan_cache_max_size, torch._cufft_set_plan_cache_max_size)
24
clear = torch._cufft_clear_plan_cache
25
26
27
class
CUDAModule
(object):
28
def
__init__(self, m):
29
self.
__dict__
= m.__dict__
30
# You have to retain the old module, otherwise it will
31
# get GC'ed and a lot of things will break. See:
32
# https://stackoverflow.com/questions/47540722/how-do-i-use-the-sys-modules-replacement-trick-in-init-py-on-python-2
33
self.
__old_mod
= m
34
35
cufft_plan_cache =
cuFFTPlanCache
()
36
37
# This is the sys.modules replacement trick, see
38
# https://stackoverflow.com/questions/2447353/getattr-on-a-module/7668273#7668273
39
sys.modules[__name__] =
CUDAModule
(sys.modules[__name__])
torch.backends.cuda.ContextProp.setter
setter
Definition:
__init__.py:8
torch.backends.cuda.CUDAModule.__old_mod
__old_mod
Definition:
__init__.py:33
torch.backends.cuda.ContextProp
Definition:
__init__.py:5
torch.backends.cuda.CUDAModule.__dict__
__dict__
Definition:
__init__.py:29
torch.backends.cuda.cuFFTPlanCache
Definition:
__init__.py:19
torch.backends.cuda.ContextProp.getter
getter
Definition:
__init__.py:7
torch.backends.cuda.CUDAModule
Definition:
__init__.py:27
Generated on Thu Mar 21 2019 13:06:36 for Caffe2 - Python API by
1.8.11