1、__init__(): Defines custom layer attributes, and creates layer state variables that do not depend on input shapes, usingadd_weight().
定义自定义层属性,并创建不依赖于输入形状的层状态变量。
2、build(self, input_shape): This method can be used to create weights that depend on the shape(s) of the input(s), usingadd_weight().__call__()will automatically build the layer (if it has not been built yet) by callingbuild().
class Layer(object):
""" 代码其余部分省略 """
def build(self, input_shape):
"""Creates the layer weights.
Must be implemented on all layers that have weights.
# Arguments
input_shape: Keras tensor (future input to layer)
or list/tuple of Keras tensors to reference
for weight shape computations.
"""
self.built = True
class _Conv(Layer):
""" 代码其余部分省略 """
def build(self, input_shape):
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis]
kernel_shape = self.kernel_size + (input_dim, self.filters)
self.kernel = self.add_weight(shape=kernel_shape,
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
if self.use_bias:
self.bias = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='bias',
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
else:
self.bias = None
# Set input spec.
self.input_spec = InputSpec(ndim=self.rank + 2,
axes={channel_axis: input_dim})
self.built = True
3、call(self, *args, **kwargs): Called in__call__after making surebuild()has been called.call()performs the logic of applying the layer to the input tensors (which should be passed in as argument). Two reserved keyword arguments you can optionally use incall()are: -training(boolean, whether the call is in inference mode or training mode) -mask(boolean tensor encoding masked timesteps in the input, used in RNN layers)
class Layer(object):
""" 代码其余部分省略 """
def call(self, inputs, **kwargs):
"""This is where the layer's logic lives.
# Arguments
inputs: Input tensor, or list/tuple of input tensors.
**kwargs: Additional keyword arguments.
# Returns
A tensor or list/tuple of tensors.
"""
return inputs
class _Conv(Layer):
""" 省略其余部分代码 """
def call(self, inputs):
if self.rank == 1:
""" 省略该部分代码 """
if self.rank == 2:
outputs = K.conv2d(
inputs,
self.kernel,
strides=self.strides,
padding=self.padding,
data_format=self.data_format,
dilation_rate=self.dilation_rate)
if self.rank == 3:
""" 省略该部分代码 """
if self.use_bias:
outputs = K.bias_add(
outputs,
self.bias,
data_format=self.data_format)
if self.activation is not None:
return self.activation(outputs)
return outputs
4、get_config(self): Returns a dictionary containing the configuration used to initialize this layer. If the keys differ from the arguments in__init__, then overridefrom_config(self)as well. This method is used when saving the layer or a model that contains this layer.
class Node(object):
def __init__(self, outbound_layer,
inbound_layers, node_indices, tensor_indices,
input_tensors, output_tensors,
input_masks, output_masks,
input_shapes, output_shapes,
arguments=None):
""" 此处省略部分代码 """
# Add nodes to all layers involved.
for layer in inbound_layers:
if layer is not None:
layer._outbound_nodes.append(self)
outbound_layer._inbound_nodes.append(self)
""" 此处省略部分代码 """
在源码中,对这两个属性的解释如下:
"""
Each time a layer is connected to some new input,
a node is added to `layer._inbound_nodes`.
Each time the output of a layer is used by another layer,
a node is added to `layer._outbound_nodes`.
"""
class Container(Layer):
""" 省略部分代码段 """
def __init__(...):
......
def build_map_of_graph(tensor, finished_nodes, nodes_in_progress,
layer=None, node_index=None, tensor_index=None):
"""Builds a map of the graph of layers.
This recursively updates the map `layer_indices`,
the list `nodes_in_decreasing_depth` and the set `container_nodes`.
# Arguments
tensor: Some tensor in a graph.
finished_nodes: Set of nodes whose subgraphs have been traversed
completely. Useful to prevent duplicated work.
nodes_in_progress: Set of nodes that are currently active on the
recursion stack. Useful to detect cycles.
layer: Layer from which `tensor` comes from. If not provided,
will be obtained from `tensor._keras_history`.
node_index: Node index from which `tensor` comes from.
tensor_index: Tensor_index from which `tensor` comes from.
# Raises
RuntimeError: if a cycle is detected.
"""
if not layer or node_index is None or tensor_index is None:
layer, node_index, tensor_index = tensor._keras_history
node = layer._inbound_nodes[node_index]
# Prevent cycles.
if node in nodes_in_progress:
raise RuntimeError(
'The tensor ' + str(tensor) + ' at layer "' +
layer.name + '" is part of a cycle.')
# Don't repeat work for shared subgraphs
if node in finished_nodes:
return
# Update container_nodes.
container_nodes.add(self._node_key(layer, node_index))
# Store the traversal order for layer sorting.
if layer not in layer_indices:
layer_indices[layer] = len(layer_indices)
nodes_in_progress.add(node)
# Propagate to all previous tensors connected to this node.
for i in range(len(node.inbound_layers)):
x = node.input_tensors[i]
layer = node.inbound_layers[i]
node_index = node.node_indices[i]
tensor_index = node.tensor_indices[i]
build_map_of_graph(x, finished_nodes, nodes_in_progress,
layer, node_index, tensor_index)
finished_nodes.add(node)
nodes_in_progress.remove(node)
nodes_in_decreasing_depth.append(node)
finished_nodes = set()
nodes_in_progress = set()
for x in self.outputs:
build_map_of_graph(x, finished_nodes, nodes_in_progress)
......