Module keras.saving.saved_model.json_utils
Utils for creating and loading the Layer metadata for SavedModel.
These are required to retain the original format of the build input shape, since layers and models may have different build behaviors depending on if the shape is a list, tuple, or TensorShape. For example, Network.build() will create separate inputs if the given input_shape is a list, and will create a single input if the given shape is a tuple.
Expand source code
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utils for creating and loading the Layer metadata for SavedModel.
These are required to retain the original format of the build input shape, since
layers and models may have different build behaviors depending on if the shape
is a list, tuple, or TensorShape. For example, Network.build() will create
separate inputs if the given input_shape is a list, and will create a single
input if the given shape is a tuple.
"""
import tensorflow.compat.v2 as tf
import collections
import enum
import json
import numpy as np
import wrapt
from tensorflow.python.framework import type_spec
class Encoder(json.JSONEncoder):
"""JSON encoder and decoder that handles TensorShapes and tuples."""
def default(self, obj): # pylint: disable=method-hidden
"""Encodes objects for types that aren't handled by the default encoder."""
if isinstance(obj, tf.TensorShape):
items = obj.as_list() if obj.rank is not None else None
return {'class_name': 'TensorShape', 'items': items}
return get_json_type(obj)
def encode(self, obj):
return super(Encoder, self).encode(_encode_tuple(obj))
def _encode_tuple(x):
if isinstance(x, tuple):
return {'class_name': '__tuple__',
'items': tuple(_encode_tuple(i) for i in x)}
elif isinstance(x, list):
return [_encode_tuple(i) for i in x]
elif isinstance(x, dict):
return {key: _encode_tuple(value) for key, value in x.items()}
else:
return x
def decode(json_string):
return json.loads(json_string, object_hook=_decode_helper)
def _decode_helper(obj):
"""A decoding helper that is TF-object aware."""
if isinstance(obj, dict) and 'class_name' in obj:
if obj['class_name'] == 'TensorShape':
return tf.TensorShape(obj['items'])
elif obj['class_name'] == 'TypeSpec':
return type_spec.lookup(obj['type_spec'])._deserialize( # pylint: disable=protected-access
_decode_helper(obj['serialized']))
elif obj['class_name'] == '__tuple__':
return tuple(_decode_helper(i) for i in obj['items'])
elif obj['class_name'] == '__ellipsis__':
return Ellipsis
return obj
def get_json_type(obj):
"""Serializes any object to a JSON-serializable structure.
Args:
obj: the object to serialize
Returns:
JSON-serializable structure representing `obj`.
Raises:
TypeError: if `obj` cannot be serialized.
"""
# if obj is a serializable Keras class instance
# e.g. optimizer, layer
if hasattr(obj, 'get_config'):
return {'class_name': obj.__class__.__name__, 'config': obj.get_config()}
# if obj is any numpy type
if type(obj).__module__ == np.__name__:
if isinstance(obj, np.ndarray):
return obj.tolist()
else:
return obj.item()
# misc functions (e.g. loss function)
if callable(obj):
return obj.__name__
# if obj is a python 'type'
if type(obj).__name__ == type.__name__:
return obj.__name__
if isinstance(obj, tf.compat.v1.Dimension):
return obj.value
if isinstance(obj, tf.TensorShape):
return obj.as_list()
if isinstance(obj, tf.DType):
return obj.name
if isinstance(obj, collections.abc.Mapping):
return dict(obj)
if obj is Ellipsis:
return {'class_name': '__ellipsis__'}
if isinstance(obj, wrapt.ObjectProxy):
return obj.__wrapped__
if isinstance(obj, tf.TypeSpec):
try:
type_spec_name = type_spec.get_name(type(obj))
return {'class_name': 'TypeSpec', 'type_spec': type_spec_name,
'serialized': obj._serialize()} # pylint: disable=protected-access
except ValueError:
raise ValueError('Unable to serialize {} to JSON, because the TypeSpec '
'class {} has not been registered.'
.format(obj, type(obj)))
if isinstance(obj, enum.Enum):
return obj.value
raise TypeError('Not JSON Serializable:', obj)
Functions
def decode(json_string)
-
Expand source code
def decode(json_string): return json.loads(json_string, object_hook=_decode_helper)
def get_json_type(obj)
-
Serializes any object to a JSON-serializable structure.
Args
obj
- the object to serialize
Returns
JSON-serializable structure representing
obj
.Raises
TypeError
- if
obj
cannot be serialized.
Expand source code
def get_json_type(obj): """Serializes any object to a JSON-serializable structure. Args: obj: the object to serialize Returns: JSON-serializable structure representing `obj`. Raises: TypeError: if `obj` cannot be serialized. """ # if obj is a serializable Keras class instance # e.g. optimizer, layer if hasattr(obj, 'get_config'): return {'class_name': obj.__class__.__name__, 'config': obj.get_config()} # if obj is any numpy type if type(obj).__module__ == np.__name__: if isinstance(obj, np.ndarray): return obj.tolist() else: return obj.item() # misc functions (e.g. loss function) if callable(obj): return obj.__name__ # if obj is a python 'type' if type(obj).__name__ == type.__name__: return obj.__name__ if isinstance(obj, tf.compat.v1.Dimension): return obj.value if isinstance(obj, tf.TensorShape): return obj.as_list() if isinstance(obj, tf.DType): return obj.name if isinstance(obj, collections.abc.Mapping): return dict(obj) if obj is Ellipsis: return {'class_name': '__ellipsis__'} if isinstance(obj, wrapt.ObjectProxy): return obj.__wrapped__ if isinstance(obj, tf.TypeSpec): try: type_spec_name = type_spec.get_name(type(obj)) return {'class_name': 'TypeSpec', 'type_spec': type_spec_name, 'serialized': obj._serialize()} # pylint: disable=protected-access except ValueError: raise ValueError('Unable to serialize {} to JSON, because the TypeSpec ' 'class {} has not been registered.' .format(obj, type(obj))) if isinstance(obj, enum.Enum): return obj.value raise TypeError('Not JSON Serializable:', obj)
Classes
class Encoder (*, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, indent=None, separators=None, default=None)
-
JSON encoder and decoder that handles TensorShapes and tuples.
Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt encoding of keys that are not str, int, float or None. If skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str objects with all incoming non-ASCII characters escaped. If ensure_ascii is false, the output can contain non-ASCII characters.
If check_circular is true, then lists, dicts, and custom encoded objects will be checked for circular references during encoding to prevent an infinite recursion (which would cause an OverflowError). Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be encoded as such. This behavior is not JSON specification compliant, but is consistent with most JavaScript based encoders and decoders. Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be sorted by key; this is useful for regression tests to ensure that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. None is the most compact representation.
If specified, separators should be an (item_separator, key_separator) tuple. The default is (', ', ': ') if indent is
None
and (',', ': ') otherwise. To get the most compact JSON representation, you should specify (',', ':') to eliminate whitespace.If specified, default is a function that gets called for objects that can't otherwise be serialized. It should return a JSON encodable version of the object or raise a
TypeError
.Expand source code
class Encoder(json.JSONEncoder): """JSON encoder and decoder that handles TensorShapes and tuples.""" def default(self, obj): # pylint: disable=method-hidden """Encodes objects for types that aren't handled by the default encoder.""" if isinstance(obj, tf.TensorShape): items = obj.as_list() if obj.rank is not None else None return {'class_name': 'TensorShape', 'items': items} return get_json_type(obj) def encode(self, obj): return super(Encoder, self).encode(_encode_tuple(obj))
Ancestors
- json.encoder.JSONEncoder
Methods
def default(self, obj)
-
Encodes objects for types that aren't handled by the default encoder.
Expand source code
def default(self, obj): # pylint: disable=method-hidden """Encodes objects for types that aren't handled by the default encoder.""" if isinstance(obj, tf.TensorShape): items = obj.as_list() if obj.rank is not None else None return {'class_name': 'TensorShape', 'items': items} return get_json_type(obj)
def encode(self, obj)
-
Return a JSON string representation of a Python data structure.
>>> from json.encoder import JSONEncoder >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) '{"foo": ["bar", "baz"]}'
Expand source code
def encode(self, obj): return super(Encoder, self).encode(_encode_tuple(obj))