Caffe2 - Python API
A deep learning, cross platform ML framework
independent.py
1 import torch
2 from torch.distributions import constraints
3 from torch.distributions.distribution import Distribution
4 from torch.distributions.utils import _sum_rightmost
5 
6 
8  r"""
9  Reinterprets some of the batch dims of a distribution as event dims.
10 
11  This is mainly useful for changing the shape of the result of
12  :meth:`log_prob`. For example to create a diagonal Normal distribution with
13  the same shape as a Multivariate Normal distribution (so they are
14  interchangeable), you can::
15 
16  >>> loc = torch.zeros(3)
17  >>> scale = torch.ones(3)
18  >>> mvn = MultivariateNormal(loc, scale_tril=torch.diag(scale))
19  >>> [mvn.batch_shape, mvn.event_shape]
20  [torch.Size(()), torch.Size((3,))]
21  >>> normal = Normal(loc, scale)
22  >>> [normal.batch_shape, normal.event_shape]
23  [torch.Size((3,)), torch.Size(())]
24  >>> diagn = Independent(normal, 1)
25  >>> [diagn.batch_shape, diagn.event_shape]
26  [torch.Size(()), torch.Size((3,))]
27 
28  Args:
29  base_distribution (torch.distributions.distribution.Distribution): a
30  base distribution
31  reinterpreted_batch_ndims (int): the number of batch dims to
32  reinterpret as event dims
33  """
34  arg_constraints = {}
35 
36  def __init__(self, base_distribution, reinterpreted_batch_ndims, validate_args=None):
37  if reinterpreted_batch_ndims > len(base_distribution.batch_shape):
38  raise ValueError("Expected reinterpreted_batch_ndims <= len(base_distribution.batch_shape), "
39  "actual {} vs {}".format(reinterpreted_batch_ndims,
40  len(base_distribution.batch_shape)))
41  shape = base_distribution.batch_shape + base_distribution.event_shape
42  event_dim = reinterpreted_batch_ndims + len(base_distribution.event_shape)
43  batch_shape = shape[:len(shape) - event_dim]
44  event_shape = shape[len(shape) - event_dim:]
45  self.base_dist = base_distribution
46  self.reinterpreted_batch_ndims = reinterpreted_batch_ndims
47  super(Independent, self).__init__(batch_shape, event_shape, validate_args=validate_args)
48 
49  def expand(self, batch_shape, _instance=None):
50  new = self._get_checked_instance(Independent, _instance)
51  batch_shape = torch.Size(batch_shape)
52  new.base_dist = self.base_dist.expand(batch_shape +
54  new.reinterpreted_batch_ndims = self.reinterpreted_batch_ndims
55  super(Independent, new).__init__(batch_shape, self.event_shape, validate_args=False)
56  new._validate_args = self._validate_args
57  return new
58 
59  @property
60  def has_rsample(self):
61  return self.base_dist.has_rsample
62 
63  @property
64  def has_enumerate_support(self):
65  if self.reinterpreted_batch_ndims > 0:
66  return False
67  return self.base_dist.has_enumerate_support
68 
69  @constraints.dependent_property
70  def support(self):
71  return self.base_dist.support
72 
73  @property
74  def mean(self):
75  return self.base_dist.mean
76 
77  @property
78  def variance(self):
79  return self.base_dist.variance
80 
81  def sample(self, sample_shape=torch.Size()):
82  return self.base_dist.sample(sample_shape)
83 
84  def rsample(self, sample_shape=torch.Size()):
85  return self.base_dist.rsample(sample_shape)
86 
87  def log_prob(self, value):
88  log_prob = self.base_dist.log_prob(value)
89  return _sum_rightmost(log_prob, self.reinterpreted_batch_ndims)
90 
91  def entropy(self):
92  entropy = self.base_dist.entropy()
93  return _sum_rightmost(entropy, self.reinterpreted_batch_ndims)
94 
95  def enumerate_support(self, expand=True):
96  if self.reinterpreted_batch_ndims > 0:
97  raise NotImplementedError("Enumeration over cartesian product is not implemented")
98  return self.base_dist.enumerate_support(expand=expand)
def _get_checked_instance(self, cls, _instance=None)