Caffe2 - Python API
A deep learning, cross platform ML framework
bucket_weighted.py
1 ## @package bucket_weighted
2 # Module caffe2.python.layers.bucket_weighted
3 from __future__ import absolute_import
4 from __future__ import division
5 from __future__ import print_function
6 from __future__ import unicode_literals
7 
8 import logging
9 import numpy as np
10 
11 from caffe2.python import core, schema
12 from caffe2.python.layers.layers import (
13  get_categorical_limit,
14  ModelLayer,
15 )
16 
17 from caffe2.python.layers.tags import Tags
18 
19 logger = logging.getLogger(__name__)
20 
21 
22 class BucketWeighted(ModelLayer):
23  def __init__(self, model, input_record, max_score=0, bucket_boundaries=None,
24  weight_optim=None, name="bucket_weighted"):
25  super(BucketWeighted, self).__init__(model, name, input_record)
26 
27  assert isinstance(input_record, schema.List), "Incorrect input type"
28  self.bucket_boundaries = bucket_boundaries
29  if bucket_boundaries is not None:
30  self.shape = len(bucket_boundaries) + 1
31  elif max_score > 0:
32  self.shape = max_score
33  else:
34  self.shape = get_categorical_limit(input_record)
35 
36  self.bucket_w = self.create_param(param_name='bucket_w',
37  shape=[self.shape, ],
38  initializer=('ConstantFill', {'value': 1.0}),
39  optimizer=weight_optim)
40 
42  ('bucket_weights',
43  schema.Scalar((np.float32, self.shape),
44  self.get_next_blob_reference("bucket_w_gather")))
45  )
46 
47  self.tags.update({Tags.HANDLE_AS_SPARSE_LAYER})
48 
49  def get_memory_usage(self):
50  return self.shape
51 
52  def add_ops(self, net):
53  if self.bucket_boundaries is not None:
54  buckets = net.Bucketize(
55  self.input_record.values(),
56  "buckets",
57  boundaries=self.bucket_boundaries
58  )
59  else:
60  buckets = self.input_record.values()
61  buckets_int = net.Cast(
62  buckets,
63  "buckets_int",
64  to=core.DataType.INT32
65  )
66  net.Gather(
67  [self.bucket_w, buckets_int],
68  self.output_schema.bucket_weights.field_blobs())