1 from .module
import Module
2 from .utils
import _pair, _quadruple, _ntuple
3 from ..
import functional
as F
4 from ..._jit_internal
import weak_module, weak_script_method
12 __constants__ = [
'padding',
'value']
14 def __init__(self, value):
15 super(_ConstantPadNd, self).__init__()
19 def forward(self, input):
20 return F.pad(input, self.padding,
'constant', self.
value)
23 return 'padding={}, value={}'.format(self.padding, self.
value)
28 r"""Pads the input tensor boundaries with a constant value. 30 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 33 padding (int, tuple): the size of the padding. If is `int`, uses the same 34 padding in both boundaries. If a 2-`tuple`, uses 35 (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`) 38 - Input: :math:`(N, C, W_{in})` 39 - Output: :math:`(N, C, W_{out})` where 41 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 45 >>> m = nn.ConstantPad1d(2, 3.5) 46 >>> input = torch.randn(1, 2, 4) 48 tensor([[[-1.0491, -0.7152, -0.0749, 0.8530], 49 [-1.3287, 1.8966, 0.1466, -0.2771]]]) 51 tensor([[[ 3.5000, 3.5000, -1.0491, -0.7152, -0.0749, 0.8530, 3.5000, 53 [ 3.5000, 3.5000, -1.3287, 1.8966, 0.1466, -0.2771, 3.5000, 55 >>> m = nn.ConstantPad1d(2, 3.5) 56 >>> input = torch.randn(1, 2, 3) 58 tensor([[[ 1.6616, 1.4523, -1.1255], 59 [-3.6372, 0.1182, -1.8652]]]) 61 tensor([[[ 3.5000, 3.5000, 1.6616, 1.4523, -1.1255, 3.5000, 3.5000], 62 [ 3.5000, 3.5000, -3.6372, 0.1182, -1.8652, 3.5000, 3.5000]]]) 63 >>> # using different paddings for different sides 64 >>> m = nn.ConstantPad1d((3, 1), 3.5) 66 tensor([[[ 3.5000, 3.5000, 3.5000, 1.6616, 1.4523, -1.1255, 3.5000], 67 [ 3.5000, 3.5000, 3.5000, -3.6372, 0.1182, -1.8652, 3.5000]]]) 71 def __init__(self, padding, value):
72 super(ConstantPad1d, self).__init__(value)
78 r"""Pads the input tensor boundaries with a constant value. 80 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 83 padding (int, tuple): the size of the padding. If is `int`, uses the same 84 padding in all boundaries. If a 4-`tuple`, uses (:math:`\text{padding\_left}`, 85 :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`) 88 - Input: :math:`(N, C, H_{in}, W_{in})` 89 - Output: :math:`(N, C, H_{out}, W_{out})` where 91 :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` 93 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 97 >>> m = nn.ConstantPad2d(2, 3.5) 98 >>> input = torch.randn(1, 2, 2) 100 tensor([[[ 1.6585, 0.4320], 101 [-0.8701, -0.4649]]]) 103 tensor([[[ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], 104 [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], 105 [ 3.5000, 3.5000, 1.6585, 0.4320, 3.5000, 3.5000], 106 [ 3.5000, 3.5000, -0.8701, -0.4649, 3.5000, 3.5000], 107 [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], 108 [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000, 3.5000]]]) 109 >>> # using different paddings for different sides 110 >>> m = nn.ConstantPad2d((3, 0, 2, 1), 3.5) 112 tensor([[[ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], 113 [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000], 114 [ 3.5000, 3.5000, 3.5000, 1.6585, 0.4320], 115 [ 3.5000, 3.5000, 3.5000, -0.8701, -0.4649], 116 [ 3.5000, 3.5000, 3.5000, 3.5000, 3.5000]]]) 119 __constants__ = [
'padding',
'value']
121 def __init__(self, padding, value):
122 super(ConstantPad2d, self).__init__(value)
123 self.
padding = _quadruple(padding)
128 r"""Pads the input tensor boundaries with a constant value. 130 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 133 padding (int, tuple): the size of the padding. If is `int`, uses the same 134 padding in all boundaries. If a 6-`tuple`, uses 135 (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`, 136 :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`, 137 :math:`\text{padding\_front}`, :math:`\text{padding\_back}`) 140 - Input: :math:`(N, C, D_{in}, H_{in}, W_{in})` 141 - Output: :math:`(N, C, D_{out}, H_{out}, W_{out})` where 143 :math:`D_{out} = D_{in} + \text{padding\_front} + \text{padding\_back}` 145 :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` 147 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 151 >>> m = nn.ConstantPad3d(3, 3.5) 152 >>> input = torch.randn(16, 3, 10, 20, 30) 153 >>> output = m(input) 154 >>> # using different paddings for different sides 155 >>> m = nn.ConstantPad3d((3, 3, 6, 6, 0, 1), 3.5) 156 >>> output = m(input) 160 def __init__(self, padding, value):
161 super(ConstantPad3d, self).__init__(value)
162 self.
padding = _ntuple(6)(padding)
167 __constants__ = [
'padding']
170 def forward(self, input):
171 return F.pad(input, self.padding,
'reflect')
173 def extra_repr(self):
174 return '{}'.format(self.padding)
179 r"""Pads the input tensor using the reflection of the input boundary. 181 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 184 padding (int, tuple): the size of the padding. If is `int`, uses the same 185 padding in all boundaries. If a 2-`tuple`, uses 186 (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`) 189 - Input: :math:`(N, C, W_{in})` 190 - Output: :math:`(N, C, W_{out})` where 192 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 196 >>> m = nn.ReflectionPad1d(2) 197 >>> input = torch.arange(8, dtype=torch.float).reshape(1, 2, 4) 199 tensor([[[0., 1., 2., 3.], 202 tensor([[[2., 1., 0., 1., 2., 3., 2., 1.], 203 [6., 5., 4., 5., 6., 7., 6., 5.]]]) 204 >>> # using different paddings for different sides 205 >>> m = nn.ReflectionPad1d((3, 1)) 207 tensor([[[3., 2., 1., 0., 1., 2., 3., 2.], 208 [7., 6., 5., 4., 5., 6., 7., 6.]]]) 212 def __init__(self, padding):
213 super(ReflectionPad1d, self).__init__()
219 r"""Pads the input tensor using the reflection of the input boundary. 221 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 224 padding (int, tuple): the size of the padding. If is `int`, uses the same 225 padding in all boundaries. If a 4-`tuple`, uses (:math:`\text{padding\_left}`, 226 :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`) 229 - Input: :math:`(N, C, H_{in}, W_{in})` 230 - Output: :math:`(N, C, H_{out}, W_{out})` where 232 :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` 234 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 238 >>> m = nn.ReflectionPad2d(2) 239 >>> input = torch.arange(9, dtype=torch.float).reshape(1, 1, 3, 3) 241 tensor([[[[0., 1., 2.], 245 tensor([[[[8., 7., 6., 7., 8., 7., 6.], 246 [5., 4., 3., 4., 5., 4., 3.], 247 [2., 1., 0., 1., 2., 1., 0.], 248 [5., 4., 3., 4., 5., 4., 3.], 249 [8., 7., 6., 7., 8., 7., 6.], 250 [5., 4., 3., 4., 5., 4., 3.], 251 [2., 1., 0., 1., 2., 1., 0.]]]]) 252 >>> # using different paddings for different sides 253 >>> m = nn.ReflectionPad2d((1, 1, 2, 0)) 255 tensor([[[[7., 6., 7., 8., 7.], 256 [4., 3., 4., 5., 4.], 257 [1., 0., 1., 2., 1.], 258 [4., 3., 4., 5., 4.], 259 [7., 6., 7., 8., 7.]]]]) 263 def __init__(self, padding):
264 super(ReflectionPad2d, self).__init__()
265 self.
padding = _quadruple(padding)
270 __constants__ = [
'padding']
273 def forward(self, input):
274 return F.pad(input, self.padding,
'replicate')
276 def extra_repr(self):
277 return '{}'.format(self.padding)
282 r"""Pads the input tensor using replication of the input boundary. 284 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 287 padding (int, tuple): the size of the padding. If is `int`, uses the same 288 padding in all boundaries. If a 2-`tuple`, uses 289 (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`) 292 - Input: :math:`(N, C, W_{in})` 293 - Output: :math:`(N, C, W_{out})` where 295 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 299 >>> m = nn.ReplicationPad1d(2) 300 >>> input = torch.arange(8, dtype=torch.float).reshape(1, 2, 4) 302 tensor([[[0., 1., 2., 3.], 305 tensor([[[0., 0., 0., 1., 2., 3., 3., 3.], 306 [4., 4., 4., 5., 6., 7., 7., 7.]]]) 307 >>> # using different paddings for different sides 308 >>> m = nn.ReplicationPad1d((3, 1)) 310 tensor([[[0., 0., 0., 0., 1., 2., 3., 3.], 311 [4., 4., 4., 4., 5., 6., 7., 7.]]]) 315 def __init__(self, padding):
316 super(ReplicationPad1d, self).__init__()
322 r"""Pads the input tensor using replication of the input boundary. 324 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 327 padding (int, tuple): the size of the padding. If is `int`, uses the same 328 padding in all boundaries. If a 4-`tuple`, uses (:math:`\text{padding\_left}`, 329 :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`) 332 - Input: :math:`(N, C, H_{in}, W_{in})` 333 - Output: :math:`(N, C, H_{out}, W_{out})` where 335 :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` 337 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 341 >>> m = nn.ReplicationPad2d(2) 342 >>> input = torch.arange(9, dtype=torch.float).reshape(1, 1, 3, 3) 344 tensor([[[[0., 1., 2.], 348 tensor([[[[0., 0., 0., 1., 2., 2., 2.], 349 [0., 0., 0., 1., 2., 2., 2.], 350 [0., 0., 0., 1., 2., 2., 2.], 351 [3., 3., 3., 4., 5., 5., 5.], 352 [6., 6., 6., 7., 8., 8., 8.], 353 [6., 6., 6., 7., 8., 8., 8.], 354 [6., 6., 6., 7., 8., 8., 8.]]]]) 355 >>> # using different paddings for different sides 356 >>> m = nn.ReplicationPad2d((1, 1, 2, 0)) 358 tensor([[[[0., 0., 1., 2., 2.], 359 [0., 0., 1., 2., 2.], 360 [0., 0., 1., 2., 2.], 361 [3., 3., 4., 5., 5.], 362 [6., 6., 7., 8., 8.]]]]) 366 def __init__(self, padding):
367 super(ReplicationPad2d, self).__init__()
368 self.
padding = _quadruple(padding)
373 r"""Pads the input tensor using replication of the input boundary. 375 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 378 padding (int, tuple): the size of the padding. If is `int`, uses the same 379 padding in all boundaries. If a 6-`tuple`, uses 380 (:math:`\text{padding\_left}`, :math:`\text{padding\_right}`, 381 :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`, 382 :math:`\text{padding\_front}`, :math:`\text{padding\_back}`) 385 - Input: :math:`(N, C, D_{in}, H_{in}, W_{in})` 386 - Output: :math:`(N, C, D_{out}, H_{out}, W_{out})` where 388 :math:`D_{out} = D_{in} + \text{padding\_front} + \text{padding\_back}` 390 :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` 392 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 396 >>> m = nn.ReplicationPad3d(3) 397 >>> input = torch.randn(16, 3, 8, 320, 480) 398 >>> output = m(input) 399 >>> # using different paddings for different sides 400 >>> m = nn.ReplicationPad3d((3, 3, 6, 6, 1, 1)) 401 >>> output = m(input) 405 def __init__(self, padding):
406 super(ReplicationPad3d, self).__init__()
407 self.
padding = _ntuple(6)(padding)
412 r"""Pads the input tensor boundaries with zero. 414 For `N`-dimensional padding, use :func:`torch.nn.functional.pad()`. 417 padding (int, tuple): the size of the padding. If is `int`, uses the same 418 padding in all boundaries. If a 4-`tuple`, uses (:math:`\text{padding\_left}`, 419 :math:`\text{padding\_right}`, :math:`\text{padding\_top}`, :math:`\text{padding\_bottom}`) 422 - Input: :math:`(N, C, H_{in}, W_{in})` 423 - Output: :math:`(N, C, H_{out}, W_{out})` where 425 :math:`H_{out} = H_{in} + \text{padding\_top} + \text{padding\_bottom}` 427 :math:`W_{out} = W_{in} + \text{padding\_left} + \text{padding\_right}` 431 >>> m = nn.ZeroPad2d(2) 432 >>> input = torch.randn(1, 1, 3, 3) 434 tensor([[[[-0.1678, -0.4418, 1.9466], 435 [ 0.9604, -0.4219, -0.5241], 436 [-0.9162, -0.5436, -0.6446]]]]) 438 tensor([[[[ 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000], 439 [ 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000], 440 [ 0.0000, 0.0000, -0.1678, -0.4418, 1.9466, 0.0000, 0.0000], 441 [ 0.0000, 0.0000, 0.9604, -0.4219, -0.5241, 0.0000, 0.0000], 442 [ 0.0000, 0.0000, -0.9162, -0.5436, -0.6446, 0.0000, 0.0000], 443 [ 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000], 444 [ 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000, 0.0000]]]]) 445 >>> # using different paddings for different sides 446 >>> m = nn.ZeroPad2d((1, 1, 2, 0)) 448 tensor([[[[ 0.0000, 0.0000, 0.0000, 0.0000, 0.0000], 449 [ 0.0000, 0.0000, 0.0000, 0.0000, 0.0000], 450 [ 0.0000, -0.1678, -0.4418, 1.9466, 0.0000], 451 [ 0.0000, 0.9604, -0.4219, -0.5241, 0.0000], 452 [ 0.0000, -0.9162, -0.5436, -0.6446, 0.0000]]]]) 456 def __init__(self, padding):
457 super(ZeroPad2d, self).__init__(padding, 0.)