1 from numbers
import Number
11 Creates a Exponential distribution parameterized by :attr:`rate`. 15 >>> m = Exponential(torch.tensor([1.0])) 16 >>> m.sample() # Exponential distributed with rate=1 20 rate (float or Tensor): rate = 1 / scale of the distribution 22 arg_constraints = {
'rate': constraints.positive}
23 support = constraints.positive
25 _mean_carrier_measure = 0
29 return self.rate.reciprocal()
33 return self.rate.reciprocal()
37 return self.rate.pow(-2)
39 def __init__(self, rate, validate_args=None):
40 self.rate, = broadcast_all(rate)
41 batch_shape = torch.Size()
if isinstance(rate, Number)
else self.rate.size()
42 super(Exponential, self).__init__(batch_shape, validate_args=validate_args)
44 def expand(self, batch_shape, _instance=None):
46 batch_shape = torch.Size(batch_shape)
47 new.rate = self.rate.expand(batch_shape)
48 super(Exponential, new).__init__(batch_shape, validate_args=
False)
52 def rsample(self, sample_shape=torch.Size()):
54 if torch._C._get_tracing_state():
56 u = torch.rand(shape, dtype=self.rate.dtype, device=self.rate.device)
57 return -(-u).log1p() / self.rate
58 return self.rate.new(shape).exponential_() / self.rate
60 def log_prob(self, value):
63 return self.rate.log() - self.rate * value
68 return 1 - torch.exp(-self.rate * value)
70 def icdf(self, value):
73 return -torch.log(1 - value) / self.rate
76 return 1.0 - torch.log(self.rate)
79 def _natural_params(self):
82 def _log_normalizer(self, x):
def _get_checked_instance(self, cls, _instance=None)
def _extended_shape(self, sample_shape=torch.Size())
def _validate_sample(self, value)