Caffe2 - Python API
A deep learning, cross platform ML framework
regularizer.py
1 # Copyright (c) 2016-present, Facebook, Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 ##############################################################################
15 
16 # @package optimizer
17 # Module caffe2.python.optimizer
18 from __future__ import absolute_import
19 from __future__ import division
20 from __future__ import print_function
21 from __future__ import unicode_literals
22 
23 
24 from caffe2.python import core
25 
26 
27 class Regularizer(object):
28  def __init__(self):
29  self.apply_after_optimizer = False
30 
31  '''
32  Adds regularization to train_net for given parameter. Its factor ahead of
33  regularization is given when initialization.
34  The param should be a BlobReference.
35  '''
36 
37  def __call__(self, net, param_init_net, param, grad=None):
38  assert isinstance(param, core.BlobReference)
39  return self._run(net, param_init_net, param, grad)
40 
41  def _run(self, net, param_init_net, param, grad):
42  raise Exception("Not Impelemented")
43 
44 
46  def __init__(self, reg_lambda):
47  super(L1Norm, self).__init__()
48  assert reg_lambda >= 0,\
49  'factor ahead of regularization should be 0 or positive'
50 
51  self.reg_lambda = reg_lambda
52 
53  def _run(self, net, param_init_net, param, grad=None):
54  output_blob = net.NextScopedBlob(param + '_l1_regularization')
55  net.LpNorm([param], [output_blob], p=1)
56  net.Scale([output_blob], [output_blob], scale=self.reg_lambda)
57  return output_blob
58 
59 
61  def __init__(self, reg_lambda):
62  super(L2Norm, self).__init__()
63  assert reg_lambda >= 0,\
64  'factor ahead of regularization should be 0 or positive'
65 
66  self.reg_lambda = reg_lambda
67 
68  def _run(self, net, param_init_net, param, grad=None):
69  output_blob = net.NextScopedBlob(param + '_l2_regularization')
70  net.LpNorm([param], [output_blob], p=2)
71  net.Scale([output_blob], [output_blob], scale=self.reg_lambda)
72  return output_blob
73 
74 
76  def __init__(self, norm=1.0):
77  super(MaxNorm, self).__init__()
78  self.norm = norm
79  self.apply_after_optimizer = True
80 
81  def _run(self, net, param_init_net, param, grad):
82  assert self.norm > 0, 'norm should be bigger than 0.'
83  if isinstance(grad, core.GradientSlice):
84  net.SparseNormalize(
85  [param, grad.indices, grad.values],
86  [param],
87  use_max_norm=True,
88  norm=self.norm,
89  )
90  else:
91  raise NotImplementedError(
92  "MaxNorm is not supported for dense parameters"
93  )
94 
95 
97  def __init__(self, norm=1.0):
98  super(ConstantNorm, self).__init__()
99  self.norm = norm
100  self.apply_after_optimizer = True
101 
102  def _run(self, net, param_init_net, param, grad):
103  assert self.norm > 0, 'norm should be bigger than 0.'
104  if isinstance(grad, core.GradientSlice):
105  net.SparseNormalize(
106  [param, grad.indices, grad.values],
107  [param],
108  use_max_norm=False,
109  norm=self.norm,
110  )
111  else:
112  raise NotImplementedError(
113  "ConstantNorm is not supported for dense parameters"
114  )
def _run(self, net, param_init_net, param, grad)
Definition: regularizer.py:41