Issue
I'm following this tutorial https://www.tensorflow.org/guide/keras/train_and_evaluate#handling_losses_and_metrics_that_dont_fit_the_standard_signature.
The code in concern is this: (copied from the tutorial)
class LogisticEndpoint(keras.layers.Layer):
def __init__(self, name=None):
super(LogisticEndpoint, self).__init__(name=name)
self.loss_fn = keras.losses.BinaryCrossentropy(from_logits=True)
self.accuracy_fn = keras.metrics.BinaryAccuracy()
def call(self, targets, logits, sample_weights=None):
# Compute the training-time loss value and add it
# to the layer using `self.add_loss()`.
loss = self.loss_fn(targets, logits, sample_weights)
self.add_loss(loss)
# Log accuracy as a metric and add it
# to the layer using `self.add_metric()`.
acc = self.accuracy_fn(targets, logits, sample_weights)
self.add_metric(acc, name="accuracy")
# Return the inference-time prediction tensor (for `.predict()`).
return tf.nn.softmax(logits)
import numpy as np
inputs = keras.Input(shape=(3,), name="inputs")
targets = keras.Input(shape=(10,), name="targets")
logits = keras.layers.Dense(10)(inputs)
predictions = LogisticEndpoint(name="predictions")(logits, targets)
model = keras.Model(inputs=[inputs, targets], outputs=predictions)
model.compile(optimizer="adam") # No loss argument!
What I need is to plot the model, so I called
tf.keras.utils.plot_model(model, 'm.png', show_shapes=True)
Apprently from the tutorial code LogisticEndpoint takes two input, namely the return value of dense and targets. However in the plot, a link from target:InputLayer
to predictions:LogisticEndpoint
is missing.
How would I revise the tutorial code so that plot is correct?
Solution
Inputs to your custom layer should be a list/tuple for two input tensors and not two separate inputs. Check the docs for more information. You could try something like this:
import tensorflow as tf
class LogisticEndpoint(tf.keras.layers.Layer):
def __init__(self, name=None):
super(LogisticEndpoint, self).__init__(name=name)
self.loss_fn = tf.keras.losses.BinaryCrossentropy(from_logits=True)
self.accuracy_fn = tf.keras.metrics.BinaryAccuracy()
def call(self, inputs, sample_weights=None):
# Compute the training-time loss value and add it
# to the layer using `self.add_loss()`.
logits, targets = inputs
loss = self.loss_fn(targets, logits, sample_weights)
self.add_loss(loss)
# Log accuracy as a metric and add it
# to the layer using `self.add_metric()`.
acc = self.accuracy_fn(targets, logits, sample_weights)
self.add_metric(acc, name="accuracy")
# Return the inference-time prediction tensor (for `.predict()`).
return tf.nn.softmax(logits)
inputs = tf.keras.Input(shape=(3,), name="inputs")
targets = tf.keras.Input(shape=(10,), name="targets")
logits =tf. keras.layers.Dense(10)(inputs)
predictions = LogisticEndpoint(name="predictions")([logits, targets])
model = tf.keras.Model(inputs=[inputs, targets], outputs=predictions)
model.compile(optimizer="adam") # No loss argument!
tf.keras.utils.plot_model(model, 'm.png', show_shapes=True)
Answered By - AloneTogether
0 comments:
Post a Comment
Note: Only a member of this blog may post a comment.