Skip to content

Commit

Permalink
Sync OSS keras to head.
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 362398642
  • Loading branch information
qlzh727 authored and tensorflower-gardener committed Mar 12, 2021
1 parent 6c1e3b2 commit e3f6e4b
Show file tree
Hide file tree
Showing 28 changed files with 524 additions and 732 deletions.
83 changes: 6 additions & 77 deletions keras/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.distribute import distribute_coordinator as dc
from tensorflow.python.distribute import distribute_coordinator_context as dc_context
from tensorflow.python.eager import function as eager_function
from tensorflow.python.eager.context import get_config
from tensorflow.python.framework import config
from tensorflow.python.framework import ops
Expand Down Expand Up @@ -1247,7 +1246,7 @@ def is_keras_tensor(x):
keras_tensor.KerasTensor)):
raise ValueError('Unexpectedly found an instance of type `' + str(type(x)) +
'`. Expected a symbolic tensor instance.')
if keras_tensor.keras_tensors_enabled():
if tf.compat.v1.executing_eagerly_outside_functions():
return isinstance(x, keras_tensor.KerasTensor)
return hasattr(x, '_keras_history')

Expand Down Expand Up @@ -1300,7 +1299,7 @@ def placeholder(shape=None,
if not shape:
if ndim:
shape = (None,) * ndim
if keras_tensor.keras_tensors_enabled():
if tf.compat.v1.executing_eagerly_outside_functions():
if sparse:
spec = tf.SparseTensorSpec(
shape=shape, dtype=dtype)
Expand Down Expand Up @@ -1343,8 +1342,7 @@ def tensor_spec_to_placeholder(tensorspec):
# (intended to be used with keras.backend.function)
from keras.engine import input_layer # pylint: disable=g-import-not-at-top
x = input_layer.Input(tensor=x)
if keras_tensor.keras_tensors_enabled():
x._is_backend_placeholder = True
x._is_backend_placeholder = True

return x

Expand All @@ -1359,7 +1357,7 @@ def is_placeholder(x):
Boolean.
"""
try:
if keras_tensor.keras_tensors_enabled():
if tf.compat.v1.executing_eagerly_outside_functions():
return hasattr(x, '_is_backend_placeholder')
from keras.utils import tf_utils # pylint: disable=g-import-not-at-top
if tf_utils.is_extension_type(x):
Expand Down Expand Up @@ -3690,7 +3688,8 @@ def get_value(x):

if tf.compat.v1.executing_eagerly_outside_functions():
# This method of evaluating works inside the Keras FuncGraph.
return eval_in_eager_or_function(x)
with tf.init_scope():
return x.numpy()

with x.graph.as_default():
return x.eval(session=get_session((x,)))
Expand Down Expand Up @@ -4038,76 +4037,6 @@ def __call__(self, inputs):
return tf.nest.map_structure(self._eval_if_composite, output_structure)


def eval_in_eager_or_function(outputs):
"""Method to evaluate a tensor in eager or in a tf.function.
In the case of a tf.function, it will lift the tensor out of the function
and try to evaluate that piece of the graph.
Warning: Do not add new usages of this function.
TODO(b/150169018): delete this function once _keras_history_helper is no
longer needed, after Keras switches to KerasTensors and op layers
work via dispatch.
Args:
outputs: tensors to fetch.
Returns:
The value of the tensors (as numpy arrays).
"""
outputs_structure = outputs
outputs = tf.nest.flatten(outputs, expand_composites=True)

graphs = {
i.graph
for i in tf.nest.flatten([outputs])
if hasattr(i, 'graph')
}
if len(graphs) > 1:
raise ValueError('Cannot create an execution function which is comprised '
'of elements from multiple graphs.')

source_graph = graphs.pop()

with _scratch_graph() as exec_graph:
global_graph = get_graph()
if source_graph not in (exec_graph, global_graph):
raise ValueError('Unknown graph. Aborting.')

if source_graph is global_graph and exec_graph is not global_graph:
init_tensors = outputs
lifted_map = tf.__internal__.lift_to_graph(
tensors=init_tensors,
graph=exec_graph,
sources=[],
add_sources=True,
handle_captures=True,
base_graph=source_graph)

outputs = [lifted_map[i] for i in outputs]

# Consolidate updates
with exec_graph.as_default():
outputs = cast_variables_to_tensor(outputs)

exec_graph.inputs = exec_graph.internal_captures
exec_graph.outputs = outputs
graph_fn = eager_function.ConcreteFunction(exec_graph)

graph_fn._num_positional_args = 0
graph_fn._arg_keywords = []

outputs = graph_fn()

# EagerTensor.numpy() will often make a copy to ensure memory safety.
# However in this case `outputs` is not directly returned, so it is always
# safe to reuse the underlying buffer without checking. In such a case the
# private numpy conversion method is preferred to guarantee performance.
return tf.nest.pack_sequence_as(
outputs_structure,
[x._numpy() for x in outputs], # pylint: disable=protected-access
expand_composites=True)


@keras_export('keras.backend.function')
@doc_controls.do_not_generate_docs
def function(inputs, outputs, updates=None, name=None, **kwargs):
Expand Down
185 changes: 32 additions & 153 deletions keras/engine/base_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1078,103 +1078,25 @@ def _convert_non_tensor(x):
args, kwargs)
training_arg_passed_by_framework = True

if keras_tensor.keras_tensors_enabled():
with call_context.enter(
layer=self, inputs=inputs, build_graph=True, training=training_value):
# Check input assumptions set after layer building, e.g. input shape.
outputs = self._keras_tensor_symbolic_call(
inputs, input_masks, args, kwargs)

if outputs is None:
raise ValueError('A layer\'s `call` method should return a '
'Tensor or a list of Tensors, not None '
'(layer: ' + self.name + ').')
if training_arg_passed_by_framework:
args, kwargs = self._set_call_arg_value(
'training', None, args, kwargs, pop_kwarg_if_none=True)
if mask_arg_passed_by_framework:
kwargs.pop('mask')
# Node connectivity does not special-case the first argument.
outputs = self._set_connectivity_metadata((inputs,) + args, kwargs,
outputs)
return outputs

# Only create Keras history if at least one tensor originates from a
# `keras.Input`. Otherwise this Layer may be being used outside the Keras
# framework.
# TODO(kaftan): make this not special case inputs
if base_layer_utils.needs_keras_history(inputs):
base_layer_utils.create_keras_history(inputs)

with call_context.enter(
layer=self, inputs=inputs, build_graph=True, training=training_value):
# Symbolic execution on symbolic tensors. We will attempt to build
# the corresponding TF subgraph inside `backend.get_graph()`
input_spec.assert_input_compatibility(self.input_spec, inputs, self.name)
graph = backend.get_graph()
# Use `self._name_scope()` to avoid auto-incrementing the name.
with graph.as_default(), backend.name_scope(self._name_scope()):
# Build layer if applicable (if the `build` method has been
# overridden).
self._maybe_build(inputs)
cast_inputs = self._maybe_cast_inputs(inputs, input_list)

if not self.dynamic:
# Wrapping `call` function in autograph to allow for dynamic control
# flow and control dependencies in call. We are limiting this to
# subclassed layers as autograph is strictly needed only for
# subclassed layers and models.
# tf_convert will respect the value of autograph setting in the
# enclosing tf.function, if any.
if (base_layer_utils.is_subclassed(self) and
not base_layer_utils.from_saved_model(self)):
call_fn = tf.__internal__.autograph.tf_convert(self.call,
tf.__internal__.autograph.control_status_ctx())
else:
call_fn = self.call

try:
with autocast_variable.enable_auto_cast_variables(
self._compute_dtype_object):
outputs = call_fn(cast_inputs, *args, **kwargs)

except tf.errors.OperatorNotAllowedInGraphError as e:
raise TypeError('You are attempting to use Python control '
'flow in a layer that was not declared to be '
'dynamic. Pass `dynamic=True` to the class '
'constructor.\nEncountered error:\n"""\n' + str(e) +
'\n"""')
else:
# We will use static shape inference to return symbolic tensors
# matching the specifications of the layer outputs.
# Since `self.dynamic` is True, we will never attempt to
# run the underlying TF graph (which is disconnected).
# TODO(fchollet): consider py_func as an alternative, which
# would enable us to run the underlying graph if needed.
outputs = self._symbolic_call(inputs)

if outputs is None:
raise ValueError('A layer\'s `call` method should return a '
'Tensor or a list of Tensors, not None '
'(layer: ' + self.name + ').')
# TODO(kaftan): This should be 'any' and check all args
if base_layer_utils.have_all_keras_metadata(inputs):
if training_arg_passed_by_framework:
args, kwargs = self._set_call_arg_value(
'training', None, args, kwargs, pop_kwarg_if_none=True)
if mask_arg_passed_by_framework:
kwargs.pop('mask')
# Node connectivity does not special-case the first argument.
outputs = self._set_connectivity_metadata((inputs,) + args, kwargs,
outputs)
self._handle_activity_regularization(inputs, outputs)
self._set_mask_metadata(inputs, outputs, input_masks, True)
if hasattr(self, '_set_inputs') and not self.inputs:
# Subclassed network: explicitly set metadata normally set by
# a call to self._set_inputs().
self._set_inputs(cast_inputs, outputs)

return outputs
# Check input assumptions set after layer building, e.g. input shape.
outputs = self._keras_tensor_symbolic_call(
inputs, input_masks, args, kwargs)

if outputs is None:
raise ValueError('A layer\'s `call` method should return a '
'Tensor or a list of Tensors, not None '
'(layer: ' + self.name + ').')
if training_arg_passed_by_framework:
args, kwargs = self._set_call_arg_value(
'training', None, args, kwargs, pop_kwarg_if_none=True)
if mask_arg_passed_by_framework:
kwargs.pop('mask')
# Node connectivity does not special-case the first argument.
outputs = self._set_connectivity_metadata((inputs,) + args, kwargs,
outputs)
return outputs

def _set_training_mode(self, args, kwargs, call_context):
training_mode = None
Expand Down Expand Up @@ -1386,20 +1308,7 @@ def updates(self):
warnings.warn('`layer.updates` will be removed in a future version. '
'This property should not be used in TensorFlow 2.0, '
'as `updates` are applied automatically.')
if keras_tensor.keras_tensors_enabled():
return []

collected_updates = []
all_layers = self._flatten_layers()
with backend.get_graph().as_default():
for layer in all_layers:
if not layer.trainable and not layer.stateful:
continue
for u in layer._updates:
if callable(u):
u = u()
collected_updates.append(u)
return collected_updates
return []

@property
def losses(self):
Expand Down Expand Up @@ -1576,16 +1485,12 @@ def _tag_callable(loss):

self._eager_losses.extend(eager_losses)

if in_call_context and not keras_tensor.keras_tensors_enabled():
for symbolic_loss in symbolic_losses:
for symbolic_loss in symbolic_losses:
if getattr(self, '_is_graph_network', False):
self._graph_network_add_loss(symbolic_loss)
else:
# Possible a loss was added in a Layer's `build`.
self._losses.append(symbolic_loss)
else:
for symbolic_loss in symbolic_losses:
if getattr(self, '_is_graph_network', False):
self._graph_network_add_loss(symbolic_loss)
else:
# Possible a loss was added in a Layer's `build`.
self._losses.append(symbolic_loss)

def _clear_losses(self):
"""Used every step in eager to reset losses."""
Expand Down Expand Up @@ -1679,10 +1584,7 @@ def call(self, inputs):
raise TypeError('Unknown keyword arguments: ', str(kwargs.keys()))

from_metric_obj = hasattr(value, '_metric_obj')
if keras_tensor.keras_tensors_enabled():
is_symbolic = isinstance(value, keras_tensor.KerasTensor)
else:
is_symbolic = tf_utils.is_symbolic_tensor(value)
is_symbolic = isinstance(value, keras_tensor.KerasTensor)
in_call_context = base_layer_utils.call_context().in_call

if name is None and not from_metric_obj:
Expand Down Expand Up @@ -2705,12 +2607,9 @@ def _maybe_build(self, inputs):

# Optionally load weight values specified at layer instantiation.
if self._initial_weights is not None:
if tf.compat.v1.executing_eagerly_outside_functions():
with tf.init_scope():
# Using `init_scope` since we want variable assignment in
# `set_weights` to be treated like variable initialization.
self.set_weights(self._initial_weights)
else:
with tf.init_scope():
# Using `init_scope` since we want variable assignment in
# `set_weights` to be treated like variable initialization.
self.set_weights(self._initial_weights)
self._initial_weights = None

Expand Down Expand Up @@ -3294,31 +3193,11 @@ def get_config(self):

def _in_functional_construction_mode(layer, inputs, args, kwargs, input_list): # pylint: disable=unused-argument
"""Check the arguments to see if we are constructing a functional model."""
if keras_tensor.keras_tensors_enabled():
# We are constructing a functional model if any of the inputs
# are KerasTensors
return any(
isinstance(tensor, keras_tensor.KerasTensor)
for tensor in tf.nest.flatten([inputs, args, kwargs]))
else:
if tf.executing_eagerly():
all_inputs_symbolic = all(
tf_utils.is_symbolic_tensor(t) for t in input_list)
if (base_layer_utils.is_subclassed(layer) and
any(tf_utils.is_symbolic_tensor(t) for t in tf.nest.flatten(
[inputs, args, kwargs])) and not all_inputs_symbolic):
raise ValueError('It appears you are trying to construct a '
'functional model, but not all of the inputs in '
'the first positional argument of your layer call '
'are symbolic tensors. '
'(Input objects, or the output of another layer) '
'Functional models cannot correctly track custom '
'layers unless all values in the first call argument '
'are symbolic.')
return all_inputs_symbolic
else:
return (base_layer_utils.is_in_keras_graph() or
all(hasattr(t, '_keras_history') for t in input_list))
# We are constructing a functional model if any of the inputs
# are KerasTensors
return any(
isinstance(tensor, keras_tensor.KerasTensor)
for tensor in tf.nest.flatten([inputs, args, kwargs]))


def _convert_numpy_or_python_types(x):
Expand Down
8 changes: 4 additions & 4 deletions keras/engine/base_layer_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,9 @@ def _create_keras_history_helper(tensors, processed_ops, created_layers):
have been wrapped in `TensorFlowOpLayer` instances. Second element is
a list of the `TensorFlowOpLayer` instances created.
"""
if tf.compat.v1.executing_eagerly_outside_functions():
raise ValueError(
'`create_keras_history` should only be called if eager is disabled!')
# Import of `base_layer` needed in order to create `TensorFlowOpLayer`.
# Cannot be imported at top because of circular dependencies.
# TODO(omalleyt): Resolve circular dependency.
Expand Down Expand Up @@ -246,10 +249,7 @@ def _create_keras_history_helper(tensors, processed_ops, created_layers):
constants[i] = op_input
else:
with tf.init_scope():
if tf.compat.v1.executing_eagerly_outside_functions():
constants[i] = backend.eval_in_eager_or_function(op_input)
else:
constants[i] = backend.function([], op_input)([])
constants[i] = backend.function([], op_input)([])
layer_inputs = unnest_if_single_tensor(layer_inputs)
processed_ops, created_layers = _create_keras_history_helper(
layer_inputs, processed_ops, created_layers)
Expand Down
Loading

0 comments on commit e3f6e4b

Please sign in to comment.