4 from __future__
import absolute_import
5 from __future__
import division
6 from __future__
import print_function
7 from __future__
import unicode_literals
13 logger = logging.getLogger(__name__)
15 Homotopy Weighting between two weights x, y by doing: 17 where alpha is a decreasing scalar parameter ranging from [min, max] (default, 18 [0, 1]), and alpha + beta = max + min, which means that beta is increasing in 21 Homotopy methods first solves an "easy" problem (one to which the solution is 22 well known), and is gradually transformed into the target problem 31 name=
'homotopy_weight',
40 self).__init__(model, name, input_record, **kwargs)
44 data = self.input_record.field_blobs()
55 assert max_weight > min_weight
56 self.
scale = float(max_weight - min_weight)
57 self.
offset = self.model.add_global_constant(
58 '%s_offset_1dfloat' % self.
name, float(min_weight)
62 def solve_inv_lr_params(self, half_life, quad_life):
66 assert quad_life > 2 * half_life
67 t = float(quad_life) / float(half_life)
68 x = t * (1.0 + np.sqrt(2.0)) / 2.0 - np.sqrt(2.0)
69 gamma = (x - 1.0) / float(half_life)
70 power = np.log(2.0) / np.log(x)
72 'homotopy_weighting: found lr param: gamma=%g, power=%g' %
77 def create_atomic_iter(self):
79 param_name=(
'%s_mutex' % self.
name),
81 initializer=(
'CreateMutex', ),
82 optimizer=self.model.NoOptim,
85 param_name=(
'%s_atomic_iter' % self.
name),
90 'dtype': core.DataType.INT64
93 optimizer=self.model.NoOptim,
97 def update_weight(self, net):
98 alpha = net.NextScopedBlob(
'alpha')
99 beta = net.NextScopedBlob(
'beta')
100 lr = net.NextScopedBlob(
'lr')
101 comp_lr = net.NextScopedBlob(
'complementary_lr')
102 scaled_lr = net.NextScopedBlob(
'scaled_lr')
103 scaled_comp_lr = net.NextScopedBlob(
'scaled_complementary_lr')
114 net.Sub([self.model.global_constants[
'ONE'], lr], [comp_lr])
115 net.Scale([lr], [scaled_lr], scale=self.
scale)
116 net.Scale([comp_lr], [scaled_comp_lr], scale=self.
scale)
117 net.Add([scaled_lr, self.
offset], [alpha])
118 net.Add([scaled_comp_lr, self.
offset], [beta])
121 def add_ops(self, net):
def get_next_blob_reference(self, name)
def solve_inv_lr_params(self, half_life, quad_life)
def create_atomic_iter(self)
def create_param(self, param_name, shape, initializer, optimizer, ps_param=None, regularizer=None)
def update_weight(self, net)