I am using tensorflow with googlecolab for my project. When i fit the model, i get this error. Can you please help me to solve this problem.
from tensorflow.keras.preprocessing.image import ImageDataGenerator
#rescaling
train_datagen_augmented=ImageDataGenerator(rescale=1/255.,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
train_data_augmented=train_datagen_augmented.flow_from_directory(train_dir,
target_size=(224,224),
batch_size=32,
class_mode="categorical")
val_datagen=ImageDataGenerator(rescale=1/255.)
test_datagen=ImageDataGenerator(rescale=1/255.)
val_data=val_datagen.flow_from_directory(val_dir,
target_size=(224,224),
batch_size=32,
class_mode="categorical")
test_data=test_datagen.flow_from_directory(test_dir,
target_size=(224,224),
batch_size=32,
class_mode="categorical")
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D,MaxPool2D,Flatten,Dense
from tensorflow.keras.optimizers.legacy import Adam
model_2=Sequential([
Conv2D(10,3,activation="relu",input_shape=(224,224,3)),
Conv2D(10,3,activation="relu"),
MaxPool2D(),
Conv2D(10,3,activation="relu"),
Conv2D(10,3,activation="relu"),
MaxPool2D(),
Conv2D(10,3,activation="relu"),
Conv2D(10,3,activation="relu"),
MaxPool2D(),
Conv2D(10,3,activation="relu"),
Conv2D(10,3,activation="relu"),
MaxPool2D(),
Flatten(),
Dense(49,activation="softmax")
])
model_2.compile(loss="categorical_crossentropy",
optimizer=Adam(),
metrics=["accuracy"])
history_2=model_2.fit(train_data_augmented,
epochs=5,
steps_per_epoch=len(train_data_augmented),
validation_data=val_data,
validation_steps=len(val_data))
I got this error:
invalidargumenterror: graph execution error: detected at node categorical_crossentropy/softmax_cross_entropy_with_logits defined at (most recent call last): file /usr/lib/python3.9/runpy.py, line 197, in _run_module_as_main return _run_code(code, main_globals, none, file /usr/lib/python3.9/runpy.py, line 87, in _run_code exec(code, run_globals) file /usr/local/lib/python3.9/dist-packages/ipykernel_launcher.py, line 16, in <module> app.launch_new_instance() file /usr/local/lib/python3.9/dist-packages/traitlets/config/application.py, line 992, in launch_instance app.start() file /usr/local/lib/python3.9/dist-packages/ipykernel/kernelapp.py, line 612, in start self.io_loop.start() file /usr/local/lib/python3.9/dist-packages/tornado/platform/asyncio.py, line 215, in start self.asyncio_loop.run_forever() file /usr/lib/python3.9/asyncio/base_events.py, line 601, in run_forever self._run_once() file /usr/lib/python3.9/asyncio/base_events.py, line 1905, in _run_once handle._run() file /usr/lib/python3.9/asyncio/events.py, line 80, in _run self._context.run(self._callback, *self._args) file /usr/local/lib/python3.9/dist-packages/tornado/ioloop.py, line 687, in <lambda> lambda f: self._run_callback(functools.partial(callback, future)) file /usr/local/lib/python3.9/dist-packages/tornado/ioloop.py, line 740, in _run_callback ret = callback() file /usr/local/lib/python3.9/dist-packages/tornado/gen.py, line 821, in inner self.ctx_run(self.run) file /usr/local/lib/python3.9/dist-packages/tornado/gen.py, line 782, in run yielded = self.gen.send(value) file /usr/local/lib/python3.9/dist-packages/ipykernel/kernelbase.py, line 365, in process_one yield gen.maybe_future(dispatch(*args)) file /usr/local/lib/python3.9/dist-packages/tornado/gen.py, line 234, in wrapper yielded = ctx_run(next, result) file /usr/local/lib/python3.9/dist-packages/ipykernel/kernelbase.py, line 268, in dispatch_shell yield gen.maybe_future(handler(stream, idents, msg)) file /usr/local/lib/python3.9/dist-packages/tornado/gen.py, line 234, in wrapper yielded = ctx_run(next, result) file /usr/local/lib/python3.9/dist-packages/ipykernel/kernelbase.py, line 543, in execute_request self.do_execute( file /usr/local/lib/python3.9/dist-packages/tornado/gen.py, line 234, in wrapper yielded = ctx_run(next, result) file /usr/local/lib/python3.9/dist-packages/ipykernel/ipkernel.py, line 306, in do_execute res = shell.run_cell(code, store_history=store_history, silent=silent) file /usr/local/lib/python3.9/dist-packages/ipykernel/zmqshell.py, line 536, in run_cell return super(zmqinteractiveshell, self).run_cell(*args, **kwargs) file /usr/local/lib/python3.9/dist-packages/ipython/core/interactiveshell.py, line 2854, in run_cell result = self._run_cell( file /usr/local/lib/python3.9/dist-packages/ipython/core/interactiveshell.py, line 2881, in _run_cell return runner(coro) file /usr/local/lib/python3.9/dist-packages/ipython/core/async_helpers.py, line 68, in _pseudo_sync_runner coro.send(none) file /usr/local/lib/python3.9/dist-packages/ipython/core/interactiveshell.py, line 3057, in run_cell_async has_raised = await self.run_ast_nodes(code_ast.body, cell_name, file /usr/local/lib/python3.9/dist-packages/ipython/core/interactiveshell.py, line 3249, in run_ast_nodes if (await self.run_code(code, result, async_=asy)): file /usr/local/lib/python3.9/dist-packages/ipython/core/interactiveshell.py, line 3326, in run_code exec(code_obj, self.user_global_ns, self.user_ns) file <ipython-input-32-58037f05cff1>, line 26, in <module> history_2=model_2.fit(train_data, file /usr/local/lib/python3.9/dist-packages/keras/utils/traceback_utils.py, line 65, in error_handler return fn(*args, **kwargs) file /usr/local/lib/python3.9/dist-packages/keras/engine/training.py, line 1694, in fit val_logs = self.evaluate( file /usr/local/lib/python3.9/dist-packages/keras/utils/traceback_utils.py, line 65, in error_handler return fn(*args, **kwargs) file /usr/local/lib/python3.9/dist-packages/keras/engine/training.py, line 2040, in evaluate tmp_logs = self.test_function(iterator) file /usr/local/lib/python3.9/dist-packages/keras/engine/training.py, line 1820, in test_function return step_function(self, iterator) file /usr/local/lib/python3.9/dist-packages/keras/engine/training.py, line 1804, in step_function outputs = model.distribute_strategy.run(run_step, args=(data,)) file /usr/local/lib/python3.9/dist-packages/keras/engine/training.py, line 1792, in run_step outputs = model.test_step(data) file /usr/local/lib/python3.9/dist-packages/keras/engine/training.py, line 1758, in test_step self.compute_loss(x, y, y_pred, sample_weight) file /usr/local/lib/python3.9/dist-packages/keras/engine/training.py, line 1082, in compute_loss return self.compiled_loss( file /usr/local/lib/python3.9/dist-packages/keras/engine/compile_utils.py, line 265, in __call__ loss_value = loss_obj(y_t, y_p, sample_weight=sw) file /usr/local/lib/python3.9/dist-packages/keras/losses.py, line 152, in __call__ losses = call_fn(y_true, y_pred) file /usr/local/lib/python3.9/dist-packages/keras/losses.py, line 284, in call return ag_fn(y_true, y_pred, **self._fn_kwargs) file /usr/local/lib/python3.9/dist-packages/keras/losses.py, line 2004, in categorical_crossentropy return backend.categorical_crossentropy( file /usr/local/lib/python3.9/dist-packages/keras/backend.py, line 5538, in categorical_crossentropy return tf.nn.softmax_cross_entropy_with_logits( node: categorical_crossentropy/softmax_cross_entropy_with_logits logits and labels must be broadcastable: logits_size=[32,49] labels_size=[32,42] [[{{node categorical_crossentropy/softmax_cross_entropy_with_logits}}]] [op:__inference_test_function_31195] site:stackoverflow.com
When i use the tensorflow , ı got the this error. i dont know i can solve this error. please help me.
Just got a similar error while using the ImageDataGenerator, you must check this part of the error it´s throwing your code. (...)"logits and labels must be broadcastable: logits_size=[32,49] labels_size=[32,42]" So i think what is going wrong is that you defined your output model to be 49 outputs in the final Dense layer, but acctually it seems that the lable size is 42