1 from numbers
import Number
10 Creates a Laplace distribution parameterized by :attr:`loc` and :attr:'scale'. 14 >>> m = Laplace(torch.tensor([0.0]), torch.tensor([1.0])) 15 >>> m.sample() # Laplace distributed with loc=0, scale=1 19 loc (float or Tensor): mean of the distribution 20 scale (float or Tensor): scale of the distribution 22 arg_constraints = {
'loc': constraints.real,
'scale': constraints.positive}
23 support = constraints.real
32 return 2 * self.scale.pow(2)
36 return (2 ** 0.5) * self.
scale 38 def __init__(self, loc, scale, validate_args=None):
39 self.loc, self.
scale = broadcast_all(loc, scale)
40 if isinstance(loc, Number)
and isinstance(scale, Number):
41 batch_shape = torch.Size()
43 batch_shape = self.loc.size()
44 super(Laplace, self).__init__(batch_shape, validate_args=validate_args)
46 def expand(self, batch_shape, _instance=None):
48 batch_shape = torch.Size(batch_shape)
49 new.loc = self.loc.expand(batch_shape)
50 new.scale = self.scale.expand(batch_shape)
51 super(Laplace, new).__init__(batch_shape, validate_args=
False)
55 def rsample(self, sample_shape=torch.Size()):
57 finfo = torch.finfo(self.loc.dtype)
58 if torch._C._get_tracing_state():
60 u = torch.rand(shape, dtype=self.loc.dtype, device=self.loc.device) * 2 - 1
61 return self.loc - self.
scale * u.sign() * torch.log1p(-u.abs().clamp(min=finfo.tiny))
62 u = self.loc.new(shape).uniform_(finfo.eps - 1, 1)
65 return self.loc - self.
scale * u.sign() * torch.log1p(-u.abs())
67 def log_prob(self, value):
70 return -torch.log(2 * self.
scale) - torch.abs(value - self.loc) / self.
scale 75 return 0.5 - 0.5 * (value - self.loc).sign() * torch.expm1(-(value - self.loc).abs() / self.
scale)
77 def icdf(self, value):
81 return self.loc - self.
scale * (term).sign() * torch.log1p(-2 * term.abs())
84 return 1 + torch.log(2 * self.
scale)
def _get_checked_instance(self, cls, _instance=None)
def _extended_shape(self, sample_shape=torch.Size())
def _validate_sample(self, value)