3 from __future__ 
import absolute_import
     4 from __future__ 
import division
     5 from __future__ 
import print_function
     6 from __future__ 
import unicode_literals
    10 from caffe2.python import core, schema, scope, utils, workspace
    12 from caffe2.proto 
import caffe2_pb2
    14 from collections 
import namedtuple
    16 logger = logging.getLogger(__name__)
    17 logger.setLevel(logging.INFO)
    20 IdList = schema.List(np.int64)
    21 IdScoreList = schema.Map(np.int64, np.float32)
    25     if schema.equal_schemas(record, IdList):
    27     elif schema.equal_schemas(record, IdScoreList, check_field_types=
False):
    30         raise NotImplementedError(
'Not implemented for {}'.format(record))
    31     assert record[key].metadata 
is not None, (
    32         "Blob {} doesn't have metadata".format(str(record[key]())))
    36 def get_categorical_limit(record):
    38     return key.metadata.categorical_limit
    41 def get_avg_length(record):
    42     return record[
'lengths'].metadata.expected_value
    45 def set_request_only(field):
    46     for f 
in field.all_scalars():
    47         categorical_limit, expected_value = 
None, 
None    49             feature_specs = schema.FeatureSpec(
    50                 feature_is_request_only=
True,
    52         elif not f.metadata.feature_specs:
    53             categorical_limit = f.metadata.categorical_limit
    54             expected_value = f.metadata.expected_value
    55             feature_specs = schema.FeatureSpec(
    56                 feature_is_request_only=
True,
    59             categorical_limit = f.metadata.categorical_limit
    60             expected_value = f.metadata.expected_value
    61             feature_specs = schema.FeatureSpec(
    62                 feature_type=f.metadata.feature_specs.feature_type,
    63                 feature_names=f.metadata.feature_specs.feature_names,
    64                 feature_ids=f.metadata.feature_specs.feature_ids,
    65                 feature_is_request_only=
True,
    66                 desired_hash_size=f.metadata.feature_specs.desired_hash_size,
    70         if not np.issubdtype(f.field_type(), np.integer):
    71             assert categorical_limit 
is None, \
    72                 "categorical_limit shouldn't be set for no-integer field"    76                 categorical_limit=categorical_limit,
    77                 expected_value=expected_value,
    78                 feature_specs=feature_specs,
    85     List of contexts where layer could be instantitated    91     ACCUMULATE_PRED = 
'accumulate_pred'    93     PREDICTION = 
'prediction'   100 def register_layer(name, layer):
   101     assert name 
not in _LAYER_REGISTRY, 
"{0} already exists".format(name)
   102     _LAYER_REGISTRY[name] = layer
   105 def layer_exists(name):
   106     return name 
in _LAYER_REGISTRY
   109 def get_layer_class(name):
   110     return _LAYER_REGISTRY[name]
   113 def create_layer(layer_name, *args, **kwargs):
   114     return _LAYER_REGISTRY[layer_name](*args, **kwargs)
   117 LayerPsParam = namedtuple(
'LayerPsParam', [
'sparse_key', 
'average_length'])
   122     def __init__(self, parameter=None, optimizer=None, initializer=None,
   123                  ps_param=
None, regularizer=
None):
   125             "expect {0} to be a blob reference".format(str(parameter))
   136     def initializer(self):
   140     def initializer(self, op):
   141         assert op 
is None or core.IsOperator(getattr(op, 
'type', 
None)), \
   142             "initializer expects an operator, got type: {}".format(type(op))
   152     def shape(self, shape):
   153         assert self.
shape is None or self.
shape == shape, \
   154             "inconsistent shape for layer parameter:"\
   155             " {}, expect: {}, but got {}".format(self, self.
shape, shape)
   158     def _infer_shape_from_initializer(self):
   159         for arg 
in self.initializer.arg:
   160             if arg.name == 
'shape':
   161                 return list(arg.ints)
   162         with workspace.WorkspaceGuard(
"model_init_by_loading_params"):
   166                 shape_blob = net.NextScopedBlob(self.
parameter + 
"_shape")
   168                 workspace.RunNetOnce(net)
   169                 shape = workspace.FetchBlob(shape_blob).tolist()
   171                 workspace.ResetWorkspace()
   173             except RuntimeError 
as exp:
   175                     "Cannot infer the shape of blob {} from operator {}: {}".format(
   176                         self.
parameter, self.initializer.type, exp)
   178                 workspace.ResetWorkspace()
   185 def is_request_only_scalar(scalar):
   186     if len(scalar.field_metadata()) == 0:
   188     for metadata 
in scalar.field_metadata():
   189         if not (metadata 
and metadata.feature_specs 
and getattr(
   190                 metadata.feature_specs, 
'feature_is_request_only', 
False)):
   197     def __init__(self, model, prefix, input_record,
   198                  predict_input_record_fields=
None, tags=
None, **kwargs):
   200         Base class for model layers. Layer is an abstraction that allows to   201         provide model description in terms of meta-operators, where each of the   202         meta-operators can have different implementations for training,   203         evaluation and prediction, that are instantiated later. As an example   204         SampledSoftmax can do something related to sampling depending on   205         supervision during the training and just apply softmax if it's used for   206         prediction/evaluation.   208         All inputs/outputs from layers are represented as a record (instance of   209         schema bounded to blobs) and are accessible through input_record and   210         output_schema. If Layer needs to have only a subset of inputs/provides   211         subset of outputs during the inference - it should provide   212         predict_input_record and predict_output_schema correspondingly (those   213         records are expected to be a subset of input_record/output_schema).   215         Each layer has a list of Tags associated with it, that depends on   216         current context and arguments. It's possible to use those tags during   217         the instantiation time.   220         self.
name = model.next_layer_name(prefix)
   224         if predict_input_record_fields:
   225             if not isinstance(predict_input_record_fields, list):
   226                 predict_input_record_fields = [predict_input_record_fields]
   228                 predict_input_record_fields]
   233         if len(input_record.all_scalars()) == 0:
   235         for scalar 
in input_record.all_scalars():
   236             if not is_request_only_scalar(scalar):
   246         self.
tags = set(tags 
or [])
   247         self.tags.update(TagContext.current().tags)
   253         return self.__class__.__name__
   255     def _check_output_schema(self):
   256         assert self.
_output_schema is not None, 
"Schema is not initialized"   260             "predict_output_schema is not a subset of the output_schema")
   263     def predict_input_record(self):
   267     def input_record(self):
   271     def predict_output_schema(self):
   275     @predict_output_schema.setter
   276     def predict_output_schema(self, output_schema):
   281     def output_schema(self):
   287     @output_schema.setter
   288     def output_schema(self, output_schema):
   292     def get_parameters(self):
   296         """Return a subset of parameters which can be converted to fp16"""   299     def get_memory_usage(self):
   304         Adds layer initialization operators to passed net.   315             init_op = param.initializer
   316             current_device_scope = scope.CurrentDeviceScope()
   320             if not init_op.HasField(
'device_option') 
and\
   321                     current_device_scope:
   322                 init_op = caffe2_pb2.OperatorDef()
   323                 init_op.CopyFrom(param.initializer)
   324                 init_op.device_option.CopyFrom(current_device_scope)
   327             if any(utils.OpAlmostEqual(op, init_op, 
'debug_info')
   328                    for op 
in init_net._net.op):
   331             init_net._net.op.extend([init_op])
   333     def create_param(self, param_name, shape, initializer, optimizer,
   334                      ps_param=
None, regularizer=
None):
   335         with scope.NameScope(self.
name, reset=
True):
   336             param = self.model.create_param(param_name=param_name,
   338                                             initializer=initializer,
   341                                             regularizer=regularizer)
   344             assert all(param.parameter != p.parameter 
for p 
in self.
params)
   346             self.params.append(param)
   347             return param.parameter
   349     def get_next_blob_reference(self, name):
   350         with scope.NameScope(self.
name, reset=
True):
   351             return self.model.net.NextScopedBlob(name)
   354                       context=InstantiationContext.TRAINING):
   356         Adds layer trainig or initialization operators to the passed in net.   357         init_net can be None and can be called independently from add_init_params   361         with scope.NameScope(self.
name):
   362             if context 
not in {InstantiationContext.PREDICTION,
   363                                InstantiationContext.EVAL,
   364                                InstantiationContext.ACCUMULATE_PRED}:
   366                     "Only prediction and eval context don't need init_net")
   369             if context == InstantiationContext.TRAINING:
   371             elif context == InstantiationContext.EVAL:
   373             elif context == InstantiationContext.ACCUMULATE_PRED:
   378             if context 
in {InstantiationContext.TRAINING,
   379                            InstantiationContext.EVAL} \
   383     def add_ops(self, net):
   385         raise NotImplementedError
   387     def add_eval_ops(self, net):
   392     def add_train_ops(self, net):
   397     def add_ops_to_accumulate_pred(self, net):
   404     def add_param_copy_operators(self, net):
   406             param_copy_ref = self.model.metrics_schema[str(param.parameter)]
   407             net.Copy([param.parameter], param_copy_ref.field_blobs())
   409     def export_output_for_metrics(self):
   413         export_name = self.
name + 
"/output"   416     def export_params_for_metrics(self):
   422                 str(param).
split(
"/")[-1] + 
"_copy")
   423             self.model.add_metric_field(str(param.parameter), param_copy_ref)
 
Module caffe2.python.layers.split. 
 
def add_operators(self, net, init_net=None, context=InstantiationContext.TRAINING)
 
_export_output_for_metrics
 
def __init__(self, model, prefix, input_record, predict_input_record_fields=None, tags=None, kwargs)
 
def add_train_ops(self, net)
 
def get_next_blob_reference(self, name)
 
def add_param_copy_operators(self, net)
 
def get_fp16_compatible_parameters(self)
 
precomputation_object_only
 
precomputation_request_only
 
def _check_output_schema(self)
 
_export_params_for_metrics
 
def add_ops_to_accumulate_pred(self, net)
 
def add_init_params(self, init_net)
 
def add_eval_ops(self, net)
 
def _infer_shape_from_initializer(self)