3 from __future__
import absolute_import
4 from __future__
import division
5 from __future__
import print_function
6 from __future__
import unicode_literals
8 from caffe2.python
import core, model_helper, schema
9 from caffe2.python.layers
import layers
13 logger = logging.getLogger(__name__)
18 Model helper for building models on top of layers abstractions. 20 Each layer is the abstraction that is higher level than Operator. Layer 21 is responsible for ownership of it's own parameters and can easily be 22 instantiated in multiple nets possible with different sets of ops. 23 As an example: one can easily instantiate predict and train nets from 24 the same set of layers, where predict net will have subset of the 25 operators from train net. 28 def __init__(self, name, input_feature_schema, trainer_extra_schema):
29 super(LayerModelHelper, self).__init__(name=name)
56 def add_metric_field(self, name, value):
58 "Try to add metric field twice: {}".format(name))
63 def add_global_constant(self, name, array=None, dtype=None,
71 assert initializer
is None,\
72 "Only one from array and initializer should be specified" 74 array = np.array(array)
76 array = np.array(array, dtype=dtype)
80 if array.dtype == np.int32:
81 op_name =
'GivenTensorIntFill' 82 elif array.dtype == np.int64:
83 op_name =
'GivenTensorInt64Fill' 84 elif array.dtype == np.str:
85 op_name =
'GivenTensorStringFill' 87 op_name =
'GivenTensorFill' 89 def initializer(blob_name):
94 values=array.flatten().tolist()
97 assert initializer
is not None 103 def _init_global_constants(self):
110 def _add_global_constants(self, init_net):
112 init_net._net.op.extend([initializer_op])
114 def create_init_net(self, name):
119 def next_layer_name(self, prefix):
124 name = base_name +
'_auto_' + str(index)
130 def add_layer(self, layer):
132 for param
in layer.get_parameters():
140 return layer.output_schema
142 def get_parameter_blobs(self):
145 for param
in layer.get_parameters():
146 param_blobs.append(param.parameter)
151 def default_optimizer(self):
154 @default_optimizer.setter
155 def default_optimizer(self, optimizer):
159 def input_feature_schema(self):
163 def trainer_extra_schema(self):
169 Returns the schema that represents model output that should be used for 172 During the training/evaluation this schema will be appended to the 173 schema that represents model output. 178 def output_schema(self):
182 @output_schema.setter
183 def output_schema(self, schema):
189 assert self.
_loss is not None 193 def loss(self, loss):
194 assert self.
_loss is None 197 def __getattr__(self, layer):
199 if layers.layer_exists(layer):
200 def wrapper(*args, **kwargs):
202 layers.create_layer(layer, self, *args, **kwargs))
205 def wrapper(*args, **kwargs):
206 def apply_operator(net, in_record, out_record):
209 net.__getattr__(layer)(in_record.field_blobs(),
210 out_record.field_blobs(),
212 if 'name' not in kwargs:
213 kwargs[
'name'] = layer
215 layers.create_layer(
'Functional',
216 self, *args, function=apply_operator,
221 "Tring to create non-registered layer: {0}".format(layer))
227 def apply_optimizers(self, train_net, train_init_net, grad_map):
234 train_net, train_init_net, param, grad_map.get(str(param)))
240 def NoOptim(self, *args, **kwargs):
Module caffe2.python.optimizer.
def NewRecord(net, schema)
def create_init_net(self, name)
def default_optimizer(self)
def add_layer(self, layer)
def _add_global_constants(self, init_net)
Module caffe2.python.layers.layers.
global_constant_initializers
def CreateOperator(operator_type, inputs, outputs, name='', control_input=None, device_option=None, arg=None, engine=None, kwargs)
def add_global_constant(self, name, array=None, dtype=None, initializer=None)
def _init_global_constants(self)