Caffe2 - Python API
A deep learning, cross platform ML framework
layers.py
1 
3 from __future__ import absolute_import
4 from __future__ import division
5 from __future__ import print_function
6 from __future__ import unicode_literals
7 
8 from caffe2.python import schema, scope
9 from caffe2.python.layers.tags import TagContext
10 
11 from collections import namedtuple
12 import numpy as np
13 
14 # Some types to simplify descriptions of things traveling between ops
15 IdList = schema.List(np.int64)
16 IdScoreList = schema.Map(np.int64, np.float32)
17 
18 
19 def get_categorical_limit(record):
20  if schema.equal_schemas(record, IdList):
21  key = 'items'
22  elif schema.equal_schemas(record, IdScoreList):
23  key = 'keys'
24  else:
25  raise NotImplementedError()
26  assert record[key].metadata is not None, (
27  "Blob {} doesn't have metadata".format(str(record[key]())))
28  return record[key].metadata.categorical_limit
29 
30 
31 class InstantiationContext(object):
32  """
33  List of contexts where layer could be instantitated
34  """
35  CALIBRATION = 'calibration'
36  EVAL = 'eval'
37  PREDICTION = 'prediction'
38  TRAINING = 'training'
39 
40 
41 _LAYER_REGISTRY = {}
42 
43 
44 def register_layer(name, layer):
45  assert name not in _LAYER_REGISTRY, "{0} already exists".format(name)
46  _LAYER_REGISTRY[name] = layer
47 
48 
49 def layer_exists(name):
50  return name in _LAYER_REGISTRY
51 
52 
53 def get_layer_class(name):
54  return _LAYER_REGISTRY[name]
55 
56 
57 def create_layer(layer_name, *args, **kwargs):
58  return _LAYER_REGISTRY[layer_name](*args, **kwargs)
59 
60 
61 LayerPsParam = namedtuple('LayerPsParam', ['sparse_key', 'average_length'])
62 
63 
64 # TODO(amalevich): Modify this to some better struct, something closer to
65 # ParameterInfo.
66 LayerParameter = namedtuple(
67  'LayerParameter',
68  ['parameter', 'optimizer', 'initializer', 'ps_param'])
69 LayerParameter.__new__.__defaults__ = (None, None, None, None)
70 
71 
72 def _is_request_only_scalar(scalar):
73  if len(scalar.field_metadata()) == 0:
74  return False
75  for metadata in scalar.field_metadata():
76  if not (metadata and metadata.feature_specs and getattr(
77  metadata.feature_specs, 'feature_is_request_only', False)):
78  return False
79  return True
80 
81 
82 class ModelLayer(object):
83 
84  def __init__(self, model, prefix, input_record,
85  predict_input_record_fields=None, tags=None, **kwargs):
86  """
87  Base class for model layers. Layer is an abstraction that allows to
88  provide model description in terms of meta-operators, where each of the
89  meta-operators can have different implementations for training,
90  evaluation and prediction, that are instantiated later. As an example
91  SampledSoftmax can do something related to sampling depending on
92  supervision during the training and just apply softmax if it's used for
93  prediction/evaluation.
94 
95  All inputs/outputs from layers are represented as a record (instance of
96  schema bounded to blobs) and are accessible through input_record and
97  output_schema. If Layer needs to have only a subset of inputs/provides
98  subset of outputs during the inference - it should provide
99  predict_input_record and predict_output_schema correspondingly (those
100  records are expected to be a subset of input_record/output_schema).
101 
102  Each layer is also have list of Tags associated with it, that depends on
103  current context and arguments. It's possible to use those tags during
104  the instantiation time.
105 
106  """
107  self.name = model.next_layer_name(prefix)
108  self.model = model
109  self.kwargs = kwargs
110  self._input_record = input_record
111  if predict_input_record_fields:
112  if not isinstance(predict_input_record_fields, list):
113  predict_input_record_fields = [predict_input_record_fields]
115  predict_input_record_fields]
116  else:
117  self._predict_input_record = None
118 
119  self.request_only = True
120  if len(input_record.all_scalars()) == 0:
121  self.request_only = False
122  for scalar in input_record.all_scalars():
123  if not _is_request_only_scalar(scalar):
124  self.request_only = False
125  break
126 
127  self._output_schema = None
128  self._predict_output_schema = None
129  self.eval_output_schema = None
130  self.tags = set(tags or [])
131  self.tags.update(TagContext.current().tags)
132  self.params = []
133 
134  def get_type(self):
135  return self.__class__.__name__
136 
137  def _check_output_schema(self):
138  assert self._output_schema is not None, "Schema is not initialized"
139  assert (self._predict_output_schema is None or
141  self._output_schema)), (
142  "predict_output_schema is not a subset of the output_schema")
143 
144  @property
145  def predict_input_record(self):
146  return self._predict_input_record or self._input_record
147 
148  @property
149  def input_record(self):
150  return self._input_record
151 
152  @property
153  def predict_output_schema(self):
154  self._check_output_schema()
155  return self._predict_output_schema or self._output_schema
156 
157  @predict_output_schema.setter
158  def predict_output_schema(self, output_schema):
159  assert self._predict_output_schema is None
160  self._predict_output_schema = output_schema
161 
162  @property
163  def output_schema(self):
164  self._check_output_schema()
165  return self._output_schema
166 
167  @output_schema.setter
168  def output_schema(self, output_schema):
169  assert self._output_schema is None
170  self._output_schema = output_schema
171 
172  def get_parameters(self):
173  return self.params
174 
176  """Return a subset of parameters which can be converted to fp16"""
177  return []
178 
179  def get_memory_usage(self):
180  return 0
181 
182  def add_operators(self, net, init_net=None,
183  context=InstantiationContext.TRAINING):
184  # Namescope below should warranty that all intermediate blobs will be
185  # assiciated with the layer that produces them
186  with scope.NameScope(self.name):
187  if context not in {InstantiationContext.PREDICTION,
188  InstantiationContext.EVAL,
189  InstantiationContext.CALIBRATION}:
190  assert init_net, (
191  "Only prediction and eval context don't need init_net")
192  if init_net:
193  for param in self.params:
194  # TODO(amalevich): Either return back to lambdas, that add
195  # all params (looks a bit safer and breaking less
196  # abstractions) or extend Net interface to this type of
197  # operations better
198  init_net._net.op.extend([param.initializer])
199  if context == InstantiationContext.TRAINING:
200  self.add_train_ops(net)
201  elif context == InstantiationContext.EVAL:
202  self.add_eval_ops(net)
203  elif context == InstantiationContext.CALIBRATION:
204  self.add_calibration_ops(net)
205  else:
206  self.add_ops(net)
207 
208  def add_ops(self, net):
209  raise NotImplementedError
210 
211  def add_eval_ops(self, net):
212  # Default train layer implementation is completely matching predict
213  # layer implementation.
214  self.add_ops(net)
215 
216  def add_train_ops(self, net):
217  # Default eval layer implementation is completely matching eval
218  # layer implementation.
219  self.add_eval_ops(net)
220 
221  def add_calibration_ops(self, net):
222  # Default calibration layer implementation is completely matching eval
223  # layer implementation.
224  self.add_eval_ops(net)
def add_train_ops(self, net)
Definition: layers.py:216
def add_eval_ops(self, net)
Definition: layers.py:211
def get_fp16_compatible_parameters(self)
Definition: layers.py:175
def NameScope(prefix, reset=False)
Definition: scope.py:41
def equal_schemas(schema, original_schema)
Definition: schema.py:991
def Map(keys, values, keys_name='keys', values_name='values', lengths_blob=None)
Definition: schema.py:644
def __init__(self, model, prefix, input_record, predict_input_record_fields=None, tags=None, kwargs)
Definition: layers.py:85
def is_schema_subset(schema, original_schema)
Definition: schema.py:985
def add_calibration_ops(self, net)
Definition: layers.py:221
def add_ops(self, net)
Definition: layers.py:208
def _check_output_schema(self)
Definition: layers.py:137