Caffe2 - Python API
A deep learning, cross platform ML framework
dirichlet.py
1 import torch
2 from torch.autograd import Function
3 from torch.autograd.function import once_differentiable
4 from torch.distributions import constraints
5 from torch.distributions.exp_family import ExponentialFamily
6 
7 
8 # This helper is exposed for testing.
9 def _Dirichlet_backward(x, concentration, grad_output):
10  total = concentration.sum(-1, True).expand_as(concentration)
11  grad = torch._dirichlet_grad(x, concentration, total)
12  return grad * (grad_output - (x * grad_output).sum(-1, True))
13 
14 
15 class _Dirichlet(Function):
16  @staticmethod
17  def forward(ctx, concentration):
18  x = torch._sample_dirichlet(concentration)
19  ctx.save_for_backward(x, concentration)
20  return x
21 
22  @staticmethod
23  @once_differentiable
24  def backward(ctx, grad_output):
25  x, concentration = ctx.saved_tensors
26  return _Dirichlet_backward(x, concentration, grad_output)
27 
28 
30  r"""
31  Creates a Dirichlet distribution parameterized by concentration :attr:`concentration`.
32 
33  Example::
34 
35  >>> m = Dirichlet(torch.tensor([0.5, 0.5]))
36  >>> m.sample() # Dirichlet distributed with concentrarion concentration
37  tensor([ 0.1046, 0.8954])
38 
39  Args:
40  concentration (Tensor): concentration parameter of the distribution
41  (often referred to as alpha)
42  """
43  arg_constraints = {'concentration': constraints.positive}
44  support = constraints.simplex
45  has_rsample = True
46 
47  def __init__(self, concentration, validate_args=None):
48  if concentration.dim() < 1:
49  raise ValueError("`concentration` parameter must be at least one-dimensional.")
50  self.concentration = concentration
51  batch_shape, event_shape = concentration.shape[:-1], concentration.shape[-1:]
52  super(Dirichlet, self).__init__(batch_shape, event_shape, validate_args=validate_args)
53 
54  def expand(self, batch_shape, _instance=None):
55  new = self._get_checked_instance(Dirichlet, _instance)
56  batch_shape = torch.Size(batch_shape)
57  new.concentration = self.concentration.expand(batch_shape + self.event_shape)
58  super(Dirichlet, new).__init__(batch_shape, self.event_shape, validate_args=False)
59  new._validate_args = self._validate_args
60  return new
61 
62  def rsample(self, sample_shape=()):
63  shape = self._extended_shape(sample_shape)
64  concentration = self.concentration.expand(shape)
65  return _Dirichlet.apply(concentration)
66 
67  def log_prob(self, value):
68  if self._validate_args:
69  self._validate_sample(value)
70  return ((torch.log(value) * (self.concentration - 1.0)).sum(-1) +
71  torch.lgamma(self.concentration.sum(-1)) -
72  torch.lgamma(self.concentration).sum(-1))
73 
74  @property
75  def mean(self):
76  return self.concentration / self.concentration.sum(-1, True)
77 
78  @property
79  def variance(self):
80  con0 = self.concentration.sum(-1, True)
81  return self.concentration * (con0 - self.concentration) / (con0.pow(2) * (con0 + 1))
82 
83  def entropy(self):
84  k = self.concentration.size(-1)
85  a0 = self.concentration.sum(-1)
86  return (torch.lgamma(self.concentration).sum(-1) - torch.lgamma(a0) -
87  (k - a0) * torch.digamma(a0) -
88  ((self.concentration - 1.0) * torch.digamma(self.concentration)).sum(-1))
89 
90  @property
91  def _natural_params(self):
92  return (self.concentration, )
93 
94  def _log_normalizer(self, x):
95  return x.lgamma().sum(-1) - torch.lgamma(x.sum(-1))
def _get_checked_instance(self, cls, _instance=None)
def _extended_shape(self, sample_shape=torch.Size())