Localisation CNN in Keras - python

I have a machine learning CNN which is supposed to classify and localise objects in present in a image. The input is a RGB image of dimensions (448, 448, 3), and the output is a 24 (for the bounding box coordinates, as there are multiple objects in a image, a maximum of 6, and 6 id's of the image present. Here is how I defined the model:
def define_model():
input = Input(shape = (448,448,3)
x = Conv2D(32, (3,3), activation = 'relu')(input)
x = MaxPooling2D((3,3))(x)
x = Conv2D(64, (3,3), activation = 'relu')(x)
x = MaxPooling2D((3,3))(x)
x = Conv2D(64, (3,3), activation = 'relu')(x)
x = Conv2D(32, (3,3), activation = 'relu')(x)
x = GlobalAveragePooling2D()(x)
classification_output_head = Flatten()(x)
classification_output_head = Dropout(0.1)(classification_output_head)
classification_output_head = Dense(6, activation = 'sigmoid', name = 'class_of_objs')(classification_output_head)
regression_for_bounding_box_output_head = Flatten()(x)
regression_for_bounding_box_output_head = Dense(64, activation = 'relu')((regression_for_bounding_box_output_head))
regression_for_bounding_box_output_head = Dense(32, activation = 'relu')(regression_for_bounding_box_output_head)
regression_for_bounding_box_output_head = Dense(24, activation = 'sigmoid', name = 'bounding_box')(regression_for_bounding_box_output_head)
return Model(inputs=[input], outputs=[classification_output_head, regression_for_bounding_box_output_head])
I then define the model:
model = define_model()
Heres the rest:
losses = {'class_of_objs': 'sparse_categorical_crossentropy',
'bounding_box': 'mse'}
model.compile('adam', loss=losses, metrics=['mae'])
tloader = tf.data.Dataset.from_tensor_slices((x, y_class, y_bbox))
model.fit(tloader,epochs=100)
When I run model.fit I get this error:
Epoch 1/100
WARNING:tensorflow:Model was constructed with shape (None, 448, 448, 3) for input Tensor("input_25:0", shape=(None, 448, 448, 3), dtype=float32), but it was called on an input with incompatible shape (448, 448, 3).
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) <ipython-input-308-70dc0cc6919c> in <module>
----> 1 model.fit(tloader,epochs=100)
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in _method_wrapper(self, *args, **kwargs)
106 def _method_wrapper(self, *args, **kwargs):
107 if not self._in_multi_worker_mode(): # pylint: disable=protected-access
--> 108 return method(self, *args, **kwargs)
109
110 # Running inside `run_distribute_coordinator` already.
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing) 1096 batch_size=batch_size): 1097 callbacks.on_train_batch_begin(step)
-> 1098 tmp_logs = train_function(iterator) 1099 if data_handler.should_sync: 1100 context.async_wait()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected(
# pylint: disable=protected-access
697 *args, **kwds))
698
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args,
**kwargs) 2853 args, kwargs = None, None 2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs) 2856 return graph_function 2857
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs) 3211 3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs) 3214 self._function_cache.primary[cache_key] = graph_function 3215 return graph_function, args, kwargs
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes) 3063 arg_names = base_arg_names + missing_arg_names 3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func( 3066 self._name, 3067 self._python_function,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
How do I fix it

Before calling model.fit, you have to compile the model with model.complile on which you set the losse and the optimizer chosen like this:model.compile(optimizer="Adam", loss="mse", metrics=["mae"])

Related

ValueError: No gradients provided for any variable while doing regression for integer values, which include negatives using keras

I have a problem where I need to predict some integers from an image. The problem is that this includes some negative integers too. I have done some reasearch and came accross Poisson which does count regression, however this does not work due to me also needing to predict some negative integers too, resulting in Poisson output nan as its loss. I was thinking of using Lambda to round the output of my model however this resulted in this error:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/var/folders/nc/c4mgwn897qbg8g52tp3mhbjr0000gp/T/ipykernel_8618/1788039059.py in <module>
----> 1 model.fit(x_train, y_train,callbacks=[callback], epochs = 999)
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1181 _r=1):
1182 callbacks.on_train_batch_begin(step)
-> 1183 tmp_logs = self.train_function(iterator)
1184 if data_handler.should_sync:
1185 context.async_wait()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
887
888 with OptionalXlaContext(self._jit_compile):
--> 889 result = self._call(*args, **kwds)
890
891 new_tracing_count = self.experimental_get_tracing_count()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
931 # This is the first call of __call__, so we have to initialize.
932 initializers = []
--> 933 self._initialize(args, kwds, add_initializers_to=initializers)
934 finally:
935 # At this point we know that the initialization is complete (or less
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
761 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
762 self._concrete_stateful_fn = (
--> 763 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
764 *args, **kwds))
765
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
3048 args, kwargs = None, None
3049 with self._lock:
-> 3050 graph_function, _ = self._maybe_define_function(args, kwargs)
3051 return graph_function
3052
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3442
3443 self._function_cache.missed.add(call_context_key)
-> 3444 graph_function = self._create_graph_function(args, kwargs)
3445 self._function_cache.primary[cache_key] = graph_function
3446
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3277 arg_names = base_arg_names + missing_arg_names
3278 graph_function = ConcreteFunction(
-> 3279 func_graph_module.func_graph_from_py_func(
3280 self._name,
3281 self._python_function,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
997 _, original_func = tf_decorator.unwrap(python_func)
998
--> 999 func_outputs = python_func(*func_args, **func_kwargs)
1000
1001 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
670 # the function a weak reference to itself to avoid a reference cycle.
671 with OptionalXlaContext(compile_with_xla):
--> 672 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
673 return out
674
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
984 except Exception as e: # pylint:disable=broad-except
985 if hasattr(e, "ag_error_metadata"):
--> 986 raise e.ag_error_metadata.to_exception(e)
987 else:
988 raise
ValueError: in user code:
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:855 train_function *
return step_function(self, iterator)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:845 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
return fn(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:838 run_step **
outputs = model.train_step(data)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:799 train_step
self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:530 minimize
return self.apply_gradients(grads_and_vars, name=name)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:630 apply_gradients
grads_and_vars = optimizer_utils.filter_empty_gradients(grads_and_vars)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/utils.py:75 filter_empty_gradients
raise ValueError("No gradients provided for any variable: %s." %
ValueError: No gradients provided for any variable: ['conv2d_2/kernel:0', 'conv2d_2/bias:0', 'conv2d_3/kernel:0', 'conv2d_3/bias:0', 'dense_3/kernel:0', 'dense_3/bias:0', 'dense_4/kernel:0', 'dense_4/bias:0', 'dense_5/kernel:0', 'dense_5/bias:0'].
Here is my implimentation of the Lambda layer thing:
filter_size = (3,3)
filters = 32
pool = 2
input_layer = keras.Input(shape=(100,300,1))
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(input_layer)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(conv_extractor)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
#conv_extractor = layers.Reshape(target_shape=(100 // (pool ** 2), (100 // (pool ** 2)) * filters))(conv_extractor)
shape = ((100 // 4), (300 // 4) * 32)
#conv_extractor = layers.Dense(512, activation='relu')(conv_extractor)
conv_extractor = layers.Reshape(target_shape=(23,2336))(conv_extractor)
gru_1 = GRU(512, return_sequences=True)(conv_extractor)
gru_1b = GRU(512, return_sequences=True, go_backwards=True)(conv_extractor)
gru1_merged = add([gru_1, gru_1b])
gru_2 = GRU(512, return_sequences=True)(gru1_merged)
gru_2b = GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)
x = layers.concatenate([gru_2, gru_2b]) # move concatenate layer aside
x = layers.Flatten()(x)
inner = layers.Dense(30, activation='LeakyReLU')(x)
inner = layers.Dense(10, activation='LeakyReLU')(inner)
inner = layers.Dense(3, activation='LeakyReLU')(inner)
inner layers.Lambda(keras.backend.round)(inner)
model = Model(input_layer,inner)
model.compile(loss = "MeanSquaredError", optimizer = optimizers.Adam(2e-4), metrics=['accuracy'])
model.fit(x_train, y_train, epochs = 999)
Why did I get this error? And how can I fix it? If it's not fixable, is there another way of solving my problem (e.g by modifying the poisson loss function)?
Add the smallest value (in this case is negative) so that everything is >= 0. Then use Poisson.

ValueError: Dimensions must be equal keras

I have a model of this structure:
filter_size = (3,3)
filters = 32
pool = 2
input_layer = keras.Input(shape=(100,300,1))
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(input_layer)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(conv_extractor)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
#conv_extractor = layers.Reshape(target_shape=(100 // (pool ** 2), (100 // (pool ** 2)) * filters))(conv_extractor)
shape = ((100 // 4), (300 // 4) * 32)
#conv_extractor = layers.Dense(512, activation='relu')(conv_extractor)
conv_extractor = layers.Reshape(target_shape=(23,2336))(conv_extractor)
gru_1 = GRU(512, return_sequences=True)(conv_extractor)
gru_1b = GRU(512, return_sequences=True, go_backwards=True)(conv_extractor)
gru1_merged = add([gru_1, gru_1b])
gru_2 = GRU(512, return_sequences=True)(gru1_merged)
gru_2b = GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)
inner = layers.Dense(30, activation='LeakyReLU')(concatenate([gru_2, gru_2b]))
inner = layers.Dense(10, activation='LeakyReLU')(inner)
inner = layers.Dense(3, activation='LeakyReLU')(inner)
model = Model(input_layer,inner)
model.compile(loss = "poisson", optimizer = optimizers.Adam(2e-4), metrics=['accuracy'])
All of the above seems to work, when trying to train using model.fit(x_train, y_train,epochs=3) I get the following error:
ValueError Traceback (most recent call last)
/var/folders/nc/c4mgwn897qbg8g52tp3mhbjr0000gp/T/ipykernel_3907/1977739458.py in <module>
----> 1 model.fit(x_train, y_train,epochs=3)
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1181 _r=1):
1182 callbacks.on_train_batch_begin(step)
-> 1183 tmp_logs = self.train_function(iterator)
1184 if data_handler.should_sync:
1185 context.async_wait()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
887
888 with OptionalXlaContext(self._jit_compile):
--> 889 result = self._call(*args, **kwds)
890
891 new_tracing_count = self.experimental_get_tracing_count()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
931 # This is the first call of __call__, so we have to initialize.
932 initializers = []
--> 933 self._initialize(args, kwds, add_initializers_to=initializers)
934 finally:
935 # At this point we know that the initialization is complete (or less
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
761 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
762 self._concrete_stateful_fn = (
--> 763 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
764 *args, **kwds))
765
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
3048 args, kwargs = None, None
3049 with self._lock:
-> 3050 graph_function, _ = self._maybe_define_function(args, kwargs)
3051 return graph_function
3052
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3442
3443 self._function_cache.missed.add(call_context_key)
-> 3444 graph_function = self._create_graph_function(args, kwargs)
3445 self._function_cache.primary[cache_key] = graph_function
3446
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3277 arg_names = base_arg_names + missing_arg_names
3278 graph_function = ConcreteFunction(
-> 3279 func_graph_module.func_graph_from_py_func(
3280 self._name,
3281 self._python_function,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
997 _, original_func = tf_decorator.unwrap(python_func)
998
--> 999 func_outputs = python_func(*func_args, **func_kwargs)
1000
1001 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
670 # the function a weak reference to itself to avoid a reference cycle.
671 with OptionalXlaContext(compile_with_xla):
--> 672 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
673 return out
674
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
984 except Exception as e: # pylint:disable=broad-except
985 if hasattr(e, "ag_error_metadata"):
--> 986 raise e.ag_error_metadata.to_exception(e)
987 else:
988 raise
ValueError: in user code:
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:855 train_function *
return step_function(self, iterator)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:845 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
return fn(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:838 run_step **
outputs = model.train_step(data)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:800 train_step
self.compiled_metrics.update_state(y, y_pred, sample_weight)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/compile_utils.py:460 update_state
metric_obj.update_state(y_t, y_p, sample_weight=mask)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/utils/metrics_utils.py:86 decorated
update_op = update_state_fn(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:177 update_state_fn
return ag_update_state(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:664 update_state **
matches = ag_fn(y_true, y_pred, **self._fn_kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/util/dispatch.py:206 wrapper
return target(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:3485 sparse_categorical_accuracy
return math_ops.cast(math_ops.equal(y_true, y_pred), backend.floatx())
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/util/dispatch.py:206 wrapper
return target(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/ops/math_ops.py:1729 equal
return gen_math_ops.equal(x, y, name=name)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/ops/gen_math_ops.py:3228 equal
_, _, _op, _outputs = _op_def_library._apply_op_helper(
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/op_def_library.py:748 _apply_op_helper
op = g._create_op_internal(op_type_name, inputs, dtypes=None,
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py:599 _create_op_internal
return super(FuncGraph, self)._create_op_internal( # pylint: disable=protected-access
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:3557 _create_op_internal
ret = Operation(
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:2041 __init__
self._c_op = _create_c_op(self._graph, node_def, inputs,
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:1883 _create_c_op
raise ValueError(str(e))
ValueError: Dimensions must be equal, but are 3 and 23 for '{{node Equal}} = Equal[T=DT_FLOAT, incompatible_shape_error=true](Cast_1, Cast_2)' with input shapes: [?,3], [?,23].
FYI: the shape of x_train is 2000,100,300,1 and y_train is 2000,3
Your model output is (None, 23, 3), while it should be (None, 3) to match with your target variable (y_train) which is (2000,3).
Since your Dense layers input is a 3 dimensional (after concatenate layer), their output will be also a 3D (None, 23, 3). Simply add a Flatten layers before Dense layers.:
gru_2b = layers.GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)
x = layers.concatenate([gru_2, gru_2b]) # move concatenate layer aside
x = layers.Flatten()(x) # add this
inner = layers.Dense(30, activation='LeakyReLU')(x)
Or you can remove return_sequence=True from your last GRU layers like this:
gru_2 = layers.GRU(512)(gru1_merged) # remove return_sequence=True
gru_2b = layers.GRU(512, go_backwards=True)(gru1_merged) # remove return_sequence=True
inner = layers.Dense(30, activation='LeakyReLU')(concatenate([gru_2, gru_2b]))

Tensorflow Keras error when calling .fit() method - ValueError: 'outputs' must be defined before the loop

I was trying to design a model for probing (if sentence length is encoded in encoder representations). However, I am getting an error after I execute the fit() method. I used a pre-trained model to make a new one (probe_model). So that I can check if any information about the sentence is encoded in the representations output by the earlier model.
I have searched on how to resolve this problem before and removing the steps_per_execution=32 parameter doesn't solve this issue.
It gives the following error:
ValueError: Layer model_2 expects 2 input(s), but it received 1 input tensors. Inputs received: [<tf.Tensor 'replicated_input_1:0' shape=(None, 15) dtype=int64>]
Probe model summary
Base model summary
def get_model():
model = tf.keras.models.load_model('../input/sentlen/NLI_model.030-0.793.h5')
layer_name = "encoder_layer"
probe_intermediate_layer = tf.keras.Model(inputs=model.input, outputs=model.get_layer(layer_name).output)
probe_intermediate_layer.trainable = False
probe_input = tf.keras.layers.Input(shape=(None,), dtype="int64")
p = probe_intermediate_layer(probe_input)
p = tf.keras.layers.Dense(200, activation = 'relu')(p)
p = tf.keras.layers.Dropout(0.3)(p)
outputs = tf.keras.layers.Dense(5, activation = 'softmax')(p)
probe_model = tf.keras.Model(inputs = probe_input, outputs = outputs)
return probe_model
# detect and init the TPU
tpu = tf.distribute.cluster_resolver.TPUClusterResolver.connect()
# instantiate a distribution strategy
tpu_strategy = tf.distribute.experimental.TPUStrategy(tpu)
# instantiating the model in the strategy scope creates the model on the TPU
with tpu_strategy.scope():
probe_model = get_model()
probe_model.compile(optimizer='rmsprop',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'],steps_per_execution=32)
probe_model.fit(task_train_1,task_train_labels, epochs = 30, batch_size = 1024)
Error stack:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-20-af206ebfdc18> in <module>
----> 1 probe_model.fit(task_train_1,task_train_labels, epochs = 30, batch_size = 1024)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1098 _r=1):
1099 callbacks.on_train_batch_begin(step)
-> 1100 tmp_logs = self.train_function(iterator)
1101 if data_handler.should_sync:
1102 context.async_wait()
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
826 tracing_count = self.experimental_get_tracing_count()
827 with trace.Trace(self._name) as tm:
--> 828 result = self._call(*args, **kwds)
829 compiler = "xla" if self._experimental_compile else "nonXla"
830 new_tracing_count = self.experimental_get_tracing_count()
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
869 # This is the first call of __call__, so we have to initialize.
870 initializers = []
--> 871 self._initialize(args, kwds, add_initializers_to=initializers)
872 finally:
873 # At this point we know that the initialization is complete (or less
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
724 self._concrete_stateful_fn = (
725 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 726 *args, **kwds))
727
728 def invalid_creator_scope(*unused_args, **unused_kwds):
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2967 args, kwargs = None, None
2968 with self._lock:
-> 2969 graph_function, _ = self._maybe_define_function(args, kwargs)
2970 return graph_function
2971
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3359
3360 self._function_cache.missed.add(call_context_key)
-> 3361 graph_function = self._create_graph_function(args, kwargs)
3362 self._function_cache.primary[cache_key] = graph_function
3363
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3204 arg_names=arg_names,
3205 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3206 capture_by_value=self._capture_by_value),
3207 self._function_attributes,
3208 function_spec=self.function_spec,
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
988 _, original_func = tf_decorator.unwrap(python_func)
989
--> 990 func_outputs = python_func(*func_args, **func_kwargs)
991
992 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
632 xla_context.Exit()
633 else:
--> 634 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
635 return out
636
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
975 except Exception as e: # pylint:disable=broad-except
976 if hasattr(e, "ag_error_metadata"):
--> 977 raise e.ag_error_metadata.to_exception(e)
978 else:
979 raise
ValueError: in user code:
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:811 train_function *
for _ in math_ops.range(self._steps_per_execution):
/opt/conda/lib/python3.7/site-packages/tensorflow/python/autograph/operators/control_flow.py:414 for_stmt
symbol_names, opts)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/autograph/operators/control_flow.py:629 _tf_range_for_stmt
opts)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/autograph/operators/control_flow.py:1059 _tf_while_stmt
body, get_state, set_state, init_vars, nulls, symbol_names)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/autograph/operators/control_flow.py:1032 _try_handling_undefineds
_verify_loop_init_vars(init_vars, symbol_names, first_iter_vars)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/autograph/operators/control_flow.py:193 _verify_loop_init_vars
raise ValueError(error_msg)
ValueError: 'outputs' must be defined before the loop.

How to use 'Recall' as metric in keras classifier?

How can use use the 'Recall' and other metrics in keras classifier. The following code only works for accuracy but if I change the metric to recall it fails.
Versions
"""
[('numpy', '1.19.1'),
('pandas', '1.1.1'),
('sklearn', '0.23.2'),
('tensorflow', '2.3.0'),
('keras', '2.4.3')]
"""
MWE
import numpy as np
import pandas as pd
import sklearn
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers.core import Dense
SEED = 100
np.random.seed(100)
X = np.random.random((20, 3))
y = np.random.randint(0,2,size=20)
print(x.shape, y.shape) # (20, 3) (20,)
n_inputs = X.shape[1]
model = Sequential([
Dense(n_inputs, input_shape=(n_inputs, ), activation='relu'),
Dense(32, activation='relu'),
Dense(2, activation='softmax')
])
METRIC = 'Recall' # Recall fails
# METRIC = 'accuracy' # accuracy works
model.compile('adam',loss='binary_crossentropy',metrics=[METRIC])
model.fit(X, y,validation_split=0.2,epochs=1)
Error
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-17-b372311b0ed4> in <module>
38 epochs=10,
39 shuffle=True,
---> 40 verbose=5)
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in _method_wrapper(self, *args, **kwargs)
106 def _method_wrapper(self, *args, **kwargs):
107 if not self._in_multi_worker_mode(): # pylint: disable=protected-access
--> 108 return method(self, *args, **kwargs)
109
110 # Running inside `run_distribute_coordinator` already.
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1096 batch_size=batch_size):
1097 callbacks.on_train_batch_begin(step)
-> 1098 tmp_logs = train_function(iterator)
1099 if data_handler.should_sync:
1100 context.async_wait()
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
695 self._concrete_stateful_fn = (
696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 697 *args, **kwds))
698
699 def invalid_creator_scope(*unused_args, **unused_kwds):
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3073 arg_names=arg_names,
3074 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3075 capture_by_value=self._capture_by_value),
3076 self._function_attributes,
3077 function_spec=self.function_spec,
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
ValueError: in user code:
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:806 train_function *
return step_function(self, iterator)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:796 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:1211 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2585 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2945 _call_for_each_replica
return fn(*args, **kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:789 run_step **
outputs = model.train_step(data)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:759 train_step
self.compiled_metrics.update_state(y, y_pred, sample_weight)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/compile_utils.py:409 update_state
metric_obj.update_state(y_t, y_p, sample_weight=mask)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/utils/metrics_utils.py:90 decorated
update_op = update_state_fn(*args, **kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/metrics.py:176 update_state_fn
return ag_update_state(*args, **kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/metrics.py:1410 update_state **
sample_weight=sample_weight)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/utils/metrics_utils.py:353 update_confusion_matrix_variables
y_pred.shape.assert_is_compatible_with(y_true.shape)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/framework/tensor_shape.py:1134 assert_is_compatible_with
raise ValueError("Shapes %s and %s are incompatible" % (self, other))
ValueError: Shapes (None, 2) and (None, 1) are incompatible
Change the final layer output 2 to 1.
import numpy as np
import pandas as pd
import sklearn
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers.core import Dense
SEED = 100
np.random.seed(100)
X = np.random.random((20, 3))
y = np.random.randint(0,2,size=20)
print(X.shape, y.shape) # (20, 3) (20,)
n_inputs = X.shape[1]
model = Sequential([
Dense(n_inputs, input_shape=(n_inputs, ), activation='relu'),
Dense(32, activation='relu'),
Dense(1, activation='relu')
])
METRIC = 'Recall' # Recall fails
# METRIC = 'accuracy' # accuracy works
model.compile('adam',loss='binary_crossentropy',metrics=[METRIC])
model.fit(X, y,validation_split=0.2,epochs=1)

ValueError: Shapes (1, 4) and (1, 3) are incompatible

I use a simple neural learning to classify iris.csv on Jupyter notebook.
I set 1 Hidden layer include 10 hidden unit.
'iris.csv' has 4 features and 1 results, and the results have 3 Species.
So I set 4 input_shape and 3 out in Dense layer.
Code:
import seaborn
import numpy
import pandas
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import OneHotEncoder
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.utils import np_utils
data = pandas.read_csv('./data/iris.csv')
data.head()
class_as_one_hot_encoding = pandas.get_dummies(data['Species'])
del data['Species']
data = pandas.concat([data, class_as_one_hot_encoding], axis=1)
data.head()
x = data.values[:,:4]
y = data.values[:,4:]
train_x, test_x, train_y, test_y = train_test_split(x, y, train_size=0.7)
model = Sequential()
model.add(Dense(10, input_shape=(4,)))
model.add(Activation('sigmoid'))
model.add(Dense(3))
model.add(Activation('softmax'))
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=["accuracy"])
model_info = model.fit(train_x, train_y, epochs=100, batch_size=1 )
ValueError: Shapes (1, 4) and (1, 3) are incompatible full error message as below.
I do not know how to fit it.
Error:
Epoch 1/100
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-23-b5e5622d976c> in <module>
8 model.add(Activation('softmax'))
9 model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=["accuracy"])
---> 10 model_info = model.fit(train_x, train_y, epochs=100, batch_size=1 )
~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)
106 def _method_wrapper(self, *args, **kwargs):
107 if not self._in_multi_worker_mode(): # pylint: disable=protected-access
--> 108 return method(self, *args, **kwargs)
109
110 # Running inside `run_distribute_coordinator` already.
~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1096 batch_size=batch_size):
1097 callbacks.on_train_batch_begin(step)
-> 1098 tmp_logs = train_function(iterator)
1099 if data_handler.should_sync:
1100 context.async_wait()
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
ValueError: in user code:
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:806 train_function *
return step_function(self, iterator)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:796 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:1211 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2585 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2945 _call_for_each_replica
return fn(*args, **kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:789 run_step **
outputs = model.train_step(data)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:748 train_step
loss = self.compiled_loss(
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\compile_utils.py:204 __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:149 __call__
losses = ag_call(y_true, y_pred)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:253 call **
return ag_fn(y_true, y_pred, **self._fn_kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\util\dispatch.py:201 wrapper
return target(*args, **kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:1535 categorical_crossentropy
return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\util\dispatch.py:201 wrapper
return target(*args, **kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\backend.py:4687 categorical_crossentropy
target.shape.assert_is_compatible_with(output.shape)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\framework\tensor_shape.py:1134 assert_is_compatible_with
raise ValueError("Shapes %s and %s are incompatible" % (self, other))
ValueError: Shapes (1, 4) and (1, 3) are incompatible

Categories

Resources