Module keras.tests.model_subclassing_test_util

Keras models for use in Model subclassing tests.

Expand source code
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras models for use in Model subclassing tests."""

import keras
from keras import testing_utils


# pylint: disable=missing-docstring,not-callable
class SimpleConvTestModel(keras.Model):

  def __init__(self, num_classes=10):
    super(SimpleConvTestModel, self).__init__(name='test_model')
    self.num_classes = num_classes

    self.conv1 = keras.layers.Conv2D(32, (3, 3), activation='relu')
    self.flatten = keras.layers.Flatten()
    self.dense1 = keras.layers.Dense(num_classes, activation='softmax')

  def call(self, x):
    x = self.conv1(x)
    x = self.flatten(x)
    return self.dense1(x)


def get_multi_io_subclass_model(use_bn=False, use_dp=False, num_classes=(2, 3)):
  """Creates MultiIOModel for the tests of subclass model."""
  shared_layer = keras.layers.Dense(32, activation='relu')
  branch_a = [shared_layer]
  if use_dp:
    branch_a.append(keras.layers.Dropout(0.5))
  branch_a.append(keras.layers.Dense(num_classes[0], activation='softmax'))

  branch_b = [shared_layer]
  if use_bn:
    branch_b.append(keras.layers.BatchNormalization())
  branch_b.append(keras.layers.Dense(num_classes[1], activation='softmax'))

  model = (
      testing_utils._MultiIOSubclassModel(   # pylint: disable=protected-access
          branch_a, branch_b, name='test_model'))
  return model


class NestedTestModel1(keras.Model):
  """A model subclass nested inside a model subclass.
  """

  def __init__(self, num_classes=2):
    super(NestedTestModel1, self).__init__(name='nested_model_1')
    self.num_classes = num_classes
    self.dense1 = keras.layers.Dense(32, activation='relu')
    self.dense2 = keras.layers.Dense(num_classes, activation='relu')
    self.bn = keras.layers.BatchNormalization()
    self.test_net = testing_utils.SmallSubclassMLP(
        num_hidden=32, num_classes=4, use_bn=True, use_dp=True)

  def call(self, inputs):
    x = self.dense1(inputs)
    x = self.bn(x)
    x = self.test_net(x)
    return self.dense2(x)


class NestedTestModel2(keras.Model):
  """A model subclass with a functional-API graph network inside.
  """

  def __init__(self, num_classes=2):
    super(NestedTestModel2, self).__init__(name='nested_model_2')
    self.num_classes = num_classes
    self.dense1 = keras.layers.Dense(32, activation='relu')
    self.dense2 = keras.layers.Dense(num_classes, activation='relu')
    self.bn = self.bn = keras.layers.BatchNormalization()
    self.test_net = self.get_functional_graph_model(32, 4)

  @staticmethod
  def get_functional_graph_model(input_dim, num_classes):
    # A simple functional-API model (a.k.a. graph network)
    inputs = keras.Input(shape=(input_dim,))
    x = keras.layers.Dense(32, activation='relu')(inputs)
    x = keras.layers.BatchNormalization()(x)
    outputs = keras.layers.Dense(num_classes)(x)
    return keras.Model(inputs, outputs)

  def call(self, inputs):
    x = self.dense1(inputs)
    x = self.bn(x)
    x = self.test_net(x)
    return self.dense2(x)


def get_nested_model_3(input_dim, num_classes):
  # A functional-API model with a subclassed model inside.
  # NOTE: this requires the inner subclass to implement `compute_output_shape`.

  inputs = keras.Input(shape=(input_dim,))
  x = keras.layers.Dense(32, activation='relu')(inputs)
  x = keras.layers.BatchNormalization()(x)

  class Inner(keras.Model):

    def __init__(self):
      super(Inner, self).__init__()
      self.dense1 = keras.layers.Dense(32, activation='relu')
      self.dense2 = keras.layers.Dense(5, activation='relu')
      self.bn = keras.layers.BatchNormalization()

    def call(self, inputs):
      x = self.dense1(inputs)
      x = self.dense2(x)
      return self.bn(x)

  test_model = Inner()
  x = test_model(x)
  outputs = keras.layers.Dense(num_classes)(x)
  return keras.Model(inputs, outputs, name='nested_model_3')


class CustomCallModel(keras.Model):

  def __init__(self):
    super(CustomCallModel, self).__init__()
    self.dense1 = keras.layers.Dense(1, activation='relu')
    self.dense2 = keras.layers.Dense(1, activation='softmax')

  def call(self, first, second, fiddle_with_output='no', training=True):
    combined = self.dense1(first) + self.dense2(second)
    if fiddle_with_output == 'yes':
      return 10. * combined
    else:
      return combined


class TrainingNoDefaultModel(keras.Model):

  def __init__(self):
    super(TrainingNoDefaultModel, self).__init__()
    self.dense1 = keras.layers.Dense(1)

  def call(self, x, training):
    return self.dense1(x)


class TrainingMaskingModel(keras.Model):

  def __init__(self):
    super(TrainingMaskingModel, self).__init__()
    self.dense1 = keras.layers.Dense(1)

  def call(self, x, training=False, mask=None):
    return self.dense1(x)

Functions

def get_multi_io_subclass_model(use_bn=False, use_dp=False, num_classes=(2, 3))

Creates MultiIOModel for the tests of subclass model.

Expand source code
def get_multi_io_subclass_model(use_bn=False, use_dp=False, num_classes=(2, 3)):
  """Creates MultiIOModel for the tests of subclass model."""
  shared_layer = keras.layers.Dense(32, activation='relu')
  branch_a = [shared_layer]
  if use_dp:
    branch_a.append(keras.layers.Dropout(0.5))
  branch_a.append(keras.layers.Dense(num_classes[0], activation='softmax'))

  branch_b = [shared_layer]
  if use_bn:
    branch_b.append(keras.layers.BatchNormalization())
  branch_b.append(keras.layers.Dense(num_classes[1], activation='softmax'))

  model = (
      testing_utils._MultiIOSubclassModel(   # pylint: disable=protected-access
          branch_a, branch_b, name='test_model'))
  return model
def get_nested_model_3(input_dim, num_classes)
Expand source code
def get_nested_model_3(input_dim, num_classes):
  # A functional-API model with a subclassed model inside.
  # NOTE: this requires the inner subclass to implement `compute_output_shape`.

  inputs = keras.Input(shape=(input_dim,))
  x = keras.layers.Dense(32, activation='relu')(inputs)
  x = keras.layers.BatchNormalization()(x)

  class Inner(keras.Model):

    def __init__(self):
      super(Inner, self).__init__()
      self.dense1 = keras.layers.Dense(32, activation='relu')
      self.dense2 = keras.layers.Dense(5, activation='relu')
      self.bn = keras.layers.BatchNormalization()

    def call(self, inputs):
      x = self.dense1(inputs)
      x = self.dense2(x)
      return self.bn(x)

  test_model = Inner()
  x = test_model(x)
  outputs = keras.layers.Dense(num_classes)(x)
  return keras.Model(inputs, outputs, name='nested_model_3')

Classes

class CustomCallModel

Model groups layers into an object with training and inference features.

Args

inputs
The input(s) of the model: a keras.Input object or list of keras.Input objects.
outputs
The output(s) of the model. See Functional API example below.
name
String, the name of the model.

There are two ways to instantiate a Model:

1 - With the "Functional API", where you start from Input, you chain layer calls to specify the model's forward pass, and finally you create your model from inputs and outputs:

import tensorflow as tf

inputs = tf.keras.Input(shape=(3,))
x = tf.keras.layers.Dense(4, activation=tf.nn.relu)(inputs)
outputs = tf.keras.layers.Dense(5, activation=tf.nn.softmax)(x)
model = tf.keras.Model(inputs=inputs, outputs=outputs)

Note: Only dicts, lists, and tuples of input tensors are supported. Nested inputs are not supported (e.g. lists of list or dicts of dict).

2 - By subclassing the Model class: in that case, you should define your layers in __init__ and you should implement the model's forward pass in call.

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)

  def call(self, inputs):
    x = self.dense1(inputs)
    return self.dense2(x)

model = MyModel()

If you subclass Model, you can optionally have a training argument (boolean) in call, which you can use to specify a different behavior in training and inference:

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
    self.dropout = tf.keras.layers.Dropout(0.5)

  def call(self, inputs, training=False):
    x = self.dense1(inputs)
    if training:
      x = self.dropout(x, training=training)
    return self.dense2(x)

model = MyModel()

Once the model is created, you can config the model with losses and metrics with model.compile(), train the model with model.fit(), or use the model to do prediction with model.predict().

Expand source code
class CustomCallModel(keras.Model):

  def __init__(self):
    super(CustomCallModel, self).__init__()
    self.dense1 = keras.layers.Dense(1, activation='relu')
    self.dense2 = keras.layers.Dense(1, activation='softmax')

  def call(self, first, second, fiddle_with_output='no', training=True):
    combined = self.dense1(first) + self.dense2(second)
    if fiddle_with_output == 'yes':
      return 10. * combined
    else:
      return combined

Ancestors

Inherited members

class NestedTestModel1 (num_classes=2)

A model subclass nested inside a model subclass.

Expand source code
class NestedTestModel1(keras.Model):
  """A model subclass nested inside a model subclass.
  """

  def __init__(self, num_classes=2):
    super(NestedTestModel1, self).__init__(name='nested_model_1')
    self.num_classes = num_classes
    self.dense1 = keras.layers.Dense(32, activation='relu')
    self.dense2 = keras.layers.Dense(num_classes, activation='relu')
    self.bn = keras.layers.BatchNormalization()
    self.test_net = testing_utils.SmallSubclassMLP(
        num_hidden=32, num_classes=4, use_bn=True, use_dp=True)

  def call(self, inputs):
    x = self.dense1(inputs)
    x = self.bn(x)
    x = self.test_net(x)
    return self.dense2(x)

Ancestors

Inherited members

class NestedTestModel2 (num_classes=2)

A model subclass with a functional-API graph network inside.

Expand source code
class NestedTestModel2(keras.Model):
  """A model subclass with a functional-API graph network inside.
  """

  def __init__(self, num_classes=2):
    super(NestedTestModel2, self).__init__(name='nested_model_2')
    self.num_classes = num_classes
    self.dense1 = keras.layers.Dense(32, activation='relu')
    self.dense2 = keras.layers.Dense(num_classes, activation='relu')
    self.bn = self.bn = keras.layers.BatchNormalization()
    self.test_net = self.get_functional_graph_model(32, 4)

  @staticmethod
  def get_functional_graph_model(input_dim, num_classes):
    # A simple functional-API model (a.k.a. graph network)
    inputs = keras.Input(shape=(input_dim,))
    x = keras.layers.Dense(32, activation='relu')(inputs)
    x = keras.layers.BatchNormalization()(x)
    outputs = keras.layers.Dense(num_classes)(x)
    return keras.Model(inputs, outputs)

  def call(self, inputs):
    x = self.dense1(inputs)
    x = self.bn(x)
    x = self.test_net(x)
    return self.dense2(x)

Ancestors

Static methods

def get_functional_graph_model(input_dim, num_classes)
Expand source code
@staticmethod
def get_functional_graph_model(input_dim, num_classes):
  # A simple functional-API model (a.k.a. graph network)
  inputs = keras.Input(shape=(input_dim,))
  x = keras.layers.Dense(32, activation='relu')(inputs)
  x = keras.layers.BatchNormalization()(x)
  outputs = keras.layers.Dense(num_classes)(x)
  return keras.Model(inputs, outputs)

Inherited members

class SimpleConvTestModel (num_classes=10)

Model groups layers into an object with training and inference features.

Args

inputs
The input(s) of the model: a keras.Input object or list of keras.Input objects.
outputs
The output(s) of the model. See Functional API example below.
name
String, the name of the model.

There are two ways to instantiate a Model:

1 - With the "Functional API", where you start from Input, you chain layer calls to specify the model's forward pass, and finally you create your model from inputs and outputs:

import tensorflow as tf

inputs = tf.keras.Input(shape=(3,))
x = tf.keras.layers.Dense(4, activation=tf.nn.relu)(inputs)
outputs = tf.keras.layers.Dense(5, activation=tf.nn.softmax)(x)
model = tf.keras.Model(inputs=inputs, outputs=outputs)

Note: Only dicts, lists, and tuples of input tensors are supported. Nested inputs are not supported (e.g. lists of list or dicts of dict).

2 - By subclassing the Model class: in that case, you should define your layers in __init__ and you should implement the model's forward pass in call.

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)

  def call(self, inputs):
    x = self.dense1(inputs)
    return self.dense2(x)

model = MyModel()

If you subclass Model, you can optionally have a training argument (boolean) in call, which you can use to specify a different behavior in training and inference:

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
    self.dropout = tf.keras.layers.Dropout(0.5)

  def call(self, inputs, training=False):
    x = self.dense1(inputs)
    if training:
      x = self.dropout(x, training=training)
    return self.dense2(x)

model = MyModel()

Once the model is created, you can config the model with losses and metrics with model.compile(), train the model with model.fit(), or use the model to do prediction with model.predict().

Expand source code
class SimpleConvTestModel(keras.Model):

  def __init__(self, num_classes=10):
    super(SimpleConvTestModel, self).__init__(name='test_model')
    self.num_classes = num_classes

    self.conv1 = keras.layers.Conv2D(32, (3, 3), activation='relu')
    self.flatten = keras.layers.Flatten()
    self.dense1 = keras.layers.Dense(num_classes, activation='softmax')

  def call(self, x):
    x = self.conv1(x)
    x = self.flatten(x)
    return self.dense1(x)

Ancestors

Inherited members

class TrainingMaskingModel

Model groups layers into an object with training and inference features.

Args

inputs
The input(s) of the model: a keras.Input object or list of keras.Input objects.
outputs
The output(s) of the model. See Functional API example below.
name
String, the name of the model.

There are two ways to instantiate a Model:

1 - With the "Functional API", where you start from Input, you chain layer calls to specify the model's forward pass, and finally you create your model from inputs and outputs:

import tensorflow as tf

inputs = tf.keras.Input(shape=(3,))
x = tf.keras.layers.Dense(4, activation=tf.nn.relu)(inputs)
outputs = tf.keras.layers.Dense(5, activation=tf.nn.softmax)(x)
model = tf.keras.Model(inputs=inputs, outputs=outputs)

Note: Only dicts, lists, and tuples of input tensors are supported. Nested inputs are not supported (e.g. lists of list or dicts of dict).

2 - By subclassing the Model class: in that case, you should define your layers in __init__ and you should implement the model's forward pass in call.

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)

  def call(self, inputs):
    x = self.dense1(inputs)
    return self.dense2(x)

model = MyModel()

If you subclass Model, you can optionally have a training argument (boolean) in call, which you can use to specify a different behavior in training and inference:

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
    self.dropout = tf.keras.layers.Dropout(0.5)

  def call(self, inputs, training=False):
    x = self.dense1(inputs)
    if training:
      x = self.dropout(x, training=training)
    return self.dense2(x)

model = MyModel()

Once the model is created, you can config the model with losses and metrics with model.compile(), train the model with model.fit(), or use the model to do prediction with model.predict().

Expand source code
class TrainingMaskingModel(keras.Model):

  def __init__(self):
    super(TrainingMaskingModel, self).__init__()
    self.dense1 = keras.layers.Dense(1)

  def call(self, x, training=False, mask=None):
    return self.dense1(x)

Ancestors

Inherited members

class TrainingNoDefaultModel

Model groups layers into an object with training and inference features.

Args

inputs
The input(s) of the model: a keras.Input object or list of keras.Input objects.
outputs
The output(s) of the model. See Functional API example below.
name
String, the name of the model.

There are two ways to instantiate a Model:

1 - With the "Functional API", where you start from Input, you chain layer calls to specify the model's forward pass, and finally you create your model from inputs and outputs:

import tensorflow as tf

inputs = tf.keras.Input(shape=(3,))
x = tf.keras.layers.Dense(4, activation=tf.nn.relu)(inputs)
outputs = tf.keras.layers.Dense(5, activation=tf.nn.softmax)(x)
model = tf.keras.Model(inputs=inputs, outputs=outputs)

Note: Only dicts, lists, and tuples of input tensors are supported. Nested inputs are not supported (e.g. lists of list or dicts of dict).

2 - By subclassing the Model class: in that case, you should define your layers in __init__ and you should implement the model's forward pass in call.

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)

  def call(self, inputs):
    x = self.dense1(inputs)
    return self.dense2(x)

model = MyModel()

If you subclass Model, you can optionally have a training argument (boolean) in call, which you can use to specify a different behavior in training and inference:

import tensorflow as tf

class MyModel(tf.keras.Model):

  def __init__(self):
    super(MyModel, self).__init__()
    self.dense1 = tf.keras.layers.Dense(4, activation=tf.nn.relu)
    self.dense2 = tf.keras.layers.Dense(5, activation=tf.nn.softmax)
    self.dropout = tf.keras.layers.Dropout(0.5)

  def call(self, inputs, training=False):
    x = self.dense1(inputs)
    if training:
      x = self.dropout(x, training=training)
    return self.dense2(x)

model = MyModel()

Once the model is created, you can config the model with losses and metrics with model.compile(), train the model with model.fit(), or use the model to do prediction with model.predict().

Expand source code
class TrainingNoDefaultModel(keras.Model):

  def __init__(self):
    super(TrainingNoDefaultModel, self).__init__()
    self.dense1 = keras.layers.Dense(1)

  def call(self, x, training):
    return self.dense1(x)

Ancestors

Inherited members