Issue
Hello guys i am a biggner at computer vision and classification, i am trying to train a model using cnn method with tensorflow and keras, but i keep getting the error bellow this code , could anyone help me or give me at least a peace of advice?
model = keras.models.Sequential([
keras.layers.Conv2D(filters=16, kernel_size=(3,3), activation='relu',input_shape=(IMG_HEIGHT,IMG_WIDTH,channels)),
keras.layers.Conv2D(filters=32, kernel_size=(3,3), activation='relu'),
keras.layers.MaxPool2D(pool_size=(2,2)),
keras.layers.BatchNormalization(axis=-1),
keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu'),
keras.layers.Conv2D(filters=128, kernel_size=(3,3), activation='relu'),
keras.layers.MaxPool2D(pool_size=(2,2)),
keras.layers.BatchNormalization(axis=-1),
keras.layers.Flatten(),
keras.layers.Dense(512,activation='relu'),
keras.layers.BatchNormalization() ,
keras.layers.Dropout(rate=0.5),
keras.layers.Dense(3,activation='softmax')
])
learning_rate = 0.001
epochs=30
opt= Adam(learning_rate=learning_rate , decay=learning_rate/(epochs*0.5))
model.compile(loss='sparse_categorical_crossentropy',optimizer=opt,metrics=['accuracy'])
aug = ImageDataGenerator(
rotation_range=10,
zoom_range=0.15,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.15,
horizontal_flip= False,
vertical_flip= False,
fill_mode="nearest"
)
history = model.fit(aug.flow(X_train, y_train,batch_size=32), epochs=epochs,validation_data=(X_val,y_val) )
InvalidArgumentError Traceback (most recent call last)
<ipython-input-15-15df12cd6846> in <module>()
11
12
---> 13 history = model.fit(aug.flow(X_train, y_train,batch_size=32), epochs=epochs,validation_data=(X_val,y_val) )
1 frames
/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/execute.py in quick_execute(op_name, num_outputs, inputs, attrs, ctx, name)
53 ctx.ensure_initialized()
54 tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,
---> 55 inputs, attrs, num_outputs)
56 except core._NotOkStatusException as e:
57 if name is not None:
InvalidArgumentError: Graph execution error:
Detected at node 'sparse_categorical_crossentropy/SparseSoftmaxCrossEntropyWithLogits/SparseSoftmaxCrossEntropyWithLogits' defined at (most recent call last):
File "/usr/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
Solution
You just have to make sure your labels are zero-based starting from 0 to 2, since your output layer has 3 nodes and a softmax
activation function and you are using sparse_categorical_crossentropy
. Here is a working example:
import tensorflow as tf
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(filters=16, kernel_size=(3,3), activation='relu',input_shape=(256, 256, 3)),
tf.keras.layers.Conv2D(filters=32, kernel_size=(3,3), activation='relu'),
tf.keras.layers.MaxPool2D(pool_size=(2,2)),
tf.keras.layers.BatchNormalization(axis=-1),
tf.keras.layers.Conv2D(filters=64, kernel_size=(3,3), activation='relu'),
tf.keras.layers.Conv2D(filters=128, kernel_size=(3,3), activation='relu'),
tf.keras.layers.MaxPool2D(pool_size=(2,2)),
tf.keras.layers.BatchNormalization(axis=-1),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(512,activation='relu'),
tf.keras.layers.BatchNormalization() ,
tf.keras.layers.Dropout(rate=0.5),
tf.keras.layers.Dense(3,activation='softmax')
])
learning_rate = 0.001
epochs=2
opt= tf.keras.optimizers.Adam(learning_rate=learning_rate , decay=learning_rate/(epochs*0.5))
model.compile(loss='sparse_categorical_crossentropy',optimizer=opt,metrics=['accuracy'])
aug = tf.keras.preprocessing.image.ImageDataGenerator(
rotation_range=10,
zoom_range=0.15,
width_shift_range=0.1,
height_shift_range=0.1,
shear_range=0.15,
horizontal_flip= False,
vertical_flip= False,
fill_mode="nearest"
)
X_train = tf.random.normal((50, 256, 256, 3))
y_train = tf.random.uniform((50, ), maxval=3, dtype=tf.int32)
history = model.fit(aug.flow(X_train, y_train, batch_size=2), epochs=epochs)
Use the dummy data as an orientation for your real data.
Answered By - AloneTogether
0 comments:
Post a Comment
Note: Only a member of this blog may post a comment.