4 from ._utils
import _type, _cuda
12 content =
' ' +
'\n '.join(str(self[i])
for i
in range(len(self)))
13 return content +
'\n[{} of size {}]'.format(
torch.typename(self), len(self))
19 return iter(map(
lambda i: self[i], range(self.size())))
24 def __deepcopy__(self, memo):
25 memo = memo.setdefault(
'torch', {})
26 if self._cdata
in memo:
27 return memo[self._cdata]
28 new_storage = self.
clone()
29 memo[self._cdata] = new_storage
35 return (_load_from_bytes, (b.getvalue(),))
38 return super(_StorageBase, self).__sizeof__() + self.element_size() * self.size()
41 """Returns a copy of this storage""" 42 device = self.get_device()
if self.
is_cuda else -1
44 return type(self)(self.size()).copy_(self)
47 """Returns a list containing the elements of this storage""" 48 return [v
for v
in self]
51 """Returns a CPU copy of this storage if it's not already on the CPU""" 52 return self.type(getattr(torch, self.__class__.__name__))
55 """Casts this storage to double type""" 56 return self.type(type(self).__module__ +
'.DoubleStorage')
59 """Casts this storage to float type""" 60 return self.type(type(self).__module__ +
'.FloatStorage')
63 """Casts this storage to half type""" 64 return self.type(type(self).__module__ +
'.HalfStorage')
67 """Casts this storage to long type""" 68 return self.type(type(self).__module__ +
'.LongStorage')
71 """Casts this storage to int type""" 72 return self.type(type(self).__module__ +
'.IntStorage')
75 """Casts this storage to short type""" 76 return self.type(type(self).__module__ +
'.ShortStorage')
79 """Casts this storage to char type""" 80 return self.type(type(self).__module__ +
'.CharStorage')
83 """Casts this storage to byte type""" 84 return self.type(type(self).__module__ +
'.ByteStorage')
87 """Casts this storage to bool type""" 88 return self.type(type(self).__module__ +
'.BoolStorage')
91 """Copies the storage to pinned memory, if it's not already pinned.""" 93 raise TypeError(
"cannot pin '{0}' only CPU memory can be pinned" 97 return type(self)(self.size(), allocator=allocator).copy_(self)
100 """Moves the storage to shared memory. 102 This is a no-op for storages already in shared memory and for CUDA 103 storages, which do not need to be moved for sharing across processes. 104 Storages in shared memory cannot be resized. 111 elif get_sharing_strategy() ==
'file_system':
112 self._share_filename_()
118 def _new_shared(cls, size):
119 """Creates a new storage in shared memory with the same data type""" 123 elif get_sharing_strategy() ==
'file_system':
124 return cls._new_using_filename(size)
126 return cls._new_using_fd(size)
129 def _load_from_bytes(b):
130 return torch.load(io.BytesIO(b))
133 _StorageBase.type = _type
134 _StorageBase.cuda = _cuda
def typename(o)
Define basic utilities.