ValueError: Shapes (1, 4) and (1, 3) are incompatible - python

I use a simple neural learning to classify iris.csv on Jupyter notebook.
I set 1 Hidden layer include 10 hidden unit.
'iris.csv' has 4 features and 1 results, and the results have 3 Species.
So I set 4 input_shape and 3 out in Dense layer.
Code:
import seaborn
import numpy
import pandas
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import OneHotEncoder
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.utils import np_utils
data = pandas.read_csv('./data/iris.csv')
data.head()
class_as_one_hot_encoding = pandas.get_dummies(data['Species'])
del data['Species']
data = pandas.concat([data, class_as_one_hot_encoding], axis=1)
data.head()
x = data.values[:,:4]
y = data.values[:,4:]
train_x, test_x, train_y, test_y = train_test_split(x, y, train_size=0.7)
model = Sequential()
model.add(Dense(10, input_shape=(4,)))
model.add(Activation('sigmoid'))
model.add(Dense(3))
model.add(Activation('softmax'))
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=["accuracy"])
model_info = model.fit(train_x, train_y, epochs=100, batch_size=1 )
ValueError: Shapes (1, 4) and (1, 3) are incompatible full error message as below.
I do not know how to fit it.
Error:
Epoch 1/100
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-23-b5e5622d976c> in <module>
8 model.add(Activation('softmax'))
9 model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=["accuracy"])
---> 10 model_info = model.fit(train_x, train_y, epochs=100, batch_size=1 )
~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)
106 def _method_wrapper(self, *args, **kwargs):
107 if not self._in_multi_worker_mode(): # pylint: disable=protected-access
--> 108 return method(self, *args, **kwargs)
109
110 # Running inside `run_distribute_coordinator` already.
~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1096 batch_size=batch_size):
1097 callbacks.on_train_batch_begin(step)
-> 1098 tmp_logs = train_function(iterator)
1099 if data_handler.should_sync:
1100 context.async_wait()
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
694 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
695 self._concrete_stateful_fn = (
--> 696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
697 *args, **kwds))
698
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3063 arg_names = base_arg_names + missing_arg_names
3064 graph_function = ConcreteFunction(
-> 3065 func_graph_module.func_graph_from_py_func(
3066 self._name,
3067 self._python_function,
~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
ValueError: in user code:
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:806 train_function *
return step_function(self, iterator)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:796 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:1211 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2585 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2945 _call_for_each_replica
return fn(*args, **kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:789 run_step **
outputs = model.train_step(data)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:748 train_step
loss = self.compiled_loss(
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\engine\compile_utils.py:204 __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:149 __call__
losses = ag_call(y_true, y_pred)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:253 call **
return ag_fn(y_true, y_pred, **self._fn_kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\util\dispatch.py:201 wrapper
return target(*args, **kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:1535 categorical_crossentropy
return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\util\dispatch.py:201 wrapper
return target(*args, **kwargs)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\keras\backend.py:4687 categorical_crossentropy
target.shape.assert_is_compatible_with(output.shape)
C:\Users\Admin\anaconda3\lib\site-packages\tensorflow\python\framework\tensor_shape.py:1134 assert_is_compatible_with
raise ValueError("Shapes %s and %s are incompatible" % (self, other))
ValueError: Shapes (1, 4) and (1, 3) are incompatible

Related

ValueError: No gradients provided for any variable while doing regression for integer values, which include negatives using keras

I have a problem where I need to predict some integers from an image. The problem is that this includes some negative integers too. I have done some reasearch and came accross Poisson which does count regression, however this does not work due to me also needing to predict some negative integers too, resulting in Poisson output nan as its loss. I was thinking of using Lambda to round the output of my model however this resulted in this error:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/var/folders/nc/c4mgwn897qbg8g52tp3mhbjr0000gp/T/ipykernel_8618/1788039059.py in <module>
----> 1 model.fit(x_train, y_train,callbacks=[callback], epochs = 999)
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1181 _r=1):
1182 callbacks.on_train_batch_begin(step)
-> 1183 tmp_logs = self.train_function(iterator)
1184 if data_handler.should_sync:
1185 context.async_wait()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
887
888 with OptionalXlaContext(self._jit_compile):
--> 889 result = self._call(*args, **kwds)
890
891 new_tracing_count = self.experimental_get_tracing_count()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
931 # This is the first call of __call__, so we have to initialize.
932 initializers = []
--> 933 self._initialize(args, kwds, add_initializers_to=initializers)
934 finally:
935 # At this point we know that the initialization is complete (or less
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
761 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
762 self._concrete_stateful_fn = (
--> 763 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
764 *args, **kwds))
765
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
3048 args, kwargs = None, None
3049 with self._lock:
-> 3050 graph_function, _ = self._maybe_define_function(args, kwargs)
3051 return graph_function
3052
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3442
3443 self._function_cache.missed.add(call_context_key)
-> 3444 graph_function = self._create_graph_function(args, kwargs)
3445 self._function_cache.primary[cache_key] = graph_function
3446
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3277 arg_names = base_arg_names + missing_arg_names
3278 graph_function = ConcreteFunction(
-> 3279 func_graph_module.func_graph_from_py_func(
3280 self._name,
3281 self._python_function,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
997 _, original_func = tf_decorator.unwrap(python_func)
998
--> 999 func_outputs = python_func(*func_args, **func_kwargs)
1000
1001 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
670 # the function a weak reference to itself to avoid a reference cycle.
671 with OptionalXlaContext(compile_with_xla):
--> 672 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
673 return out
674
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
984 except Exception as e: # pylint:disable=broad-except
985 if hasattr(e, "ag_error_metadata"):
--> 986 raise e.ag_error_metadata.to_exception(e)
987 else:
988 raise
ValueError: in user code:
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:855 train_function *
return step_function(self, iterator)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:845 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
return fn(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:838 run_step **
outputs = model.train_step(data)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:799 train_step
self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:530 minimize
return self.apply_gradients(grads_and_vars, name=name)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:630 apply_gradients
grads_and_vars = optimizer_utils.filter_empty_gradients(grads_and_vars)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/optimizer_v2/utils.py:75 filter_empty_gradients
raise ValueError("No gradients provided for any variable: %s." %
ValueError: No gradients provided for any variable: ['conv2d_2/kernel:0', 'conv2d_2/bias:0', 'conv2d_3/kernel:0', 'conv2d_3/bias:0', 'dense_3/kernel:0', 'dense_3/bias:0', 'dense_4/kernel:0', 'dense_4/bias:0', 'dense_5/kernel:0', 'dense_5/bias:0'].
Here is my implimentation of the Lambda layer thing:
filter_size = (3,3)
filters = 32
pool = 2
input_layer = keras.Input(shape=(100,300,1))
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(input_layer)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(conv_extractor)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
#conv_extractor = layers.Reshape(target_shape=(100 // (pool ** 2), (100 // (pool ** 2)) * filters))(conv_extractor)
shape = ((100 // 4), (300 // 4) * 32)
#conv_extractor = layers.Dense(512, activation='relu')(conv_extractor)
conv_extractor = layers.Reshape(target_shape=(23,2336))(conv_extractor)
gru_1 = GRU(512, return_sequences=True)(conv_extractor)
gru_1b = GRU(512, return_sequences=True, go_backwards=True)(conv_extractor)
gru1_merged = add([gru_1, gru_1b])
gru_2 = GRU(512, return_sequences=True)(gru1_merged)
gru_2b = GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)
x = layers.concatenate([gru_2, gru_2b]) # move concatenate layer aside
x = layers.Flatten()(x)
inner = layers.Dense(30, activation='LeakyReLU')(x)
inner = layers.Dense(10, activation='LeakyReLU')(inner)
inner = layers.Dense(3, activation='LeakyReLU')(inner)
inner layers.Lambda(keras.backend.round)(inner)
model = Model(input_layer,inner)
model.compile(loss = "MeanSquaredError", optimizer = optimizers.Adam(2e-4), metrics=['accuracy'])
model.fit(x_train, y_train, epochs = 999)
Why did I get this error? And how can I fix it? If it's not fixable, is there another way of solving my problem (e.g by modifying the poisson loss function)?
Add the smallest value (in this case is negative) so that everything is >= 0. Then use Poisson.

ValueError: Dimensions must be equal keras

I have a model of this structure:
filter_size = (3,3)
filters = 32
pool = 2
input_layer = keras.Input(shape=(100,300,1))
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(input_layer)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
conv_extractor = layers.Conv2D(filters,filter_size, activation='relu')(conv_extractor)
conv_extractor = layers.MaxPooling2D(pool_size=(pool, pool))(conv_extractor)
#conv_extractor = layers.Reshape(target_shape=(100 // (pool ** 2), (100 // (pool ** 2)) * filters))(conv_extractor)
shape = ((100 // 4), (300 // 4) * 32)
#conv_extractor = layers.Dense(512, activation='relu')(conv_extractor)
conv_extractor = layers.Reshape(target_shape=(23,2336))(conv_extractor)
gru_1 = GRU(512, return_sequences=True)(conv_extractor)
gru_1b = GRU(512, return_sequences=True, go_backwards=True)(conv_extractor)
gru1_merged = add([gru_1, gru_1b])
gru_2 = GRU(512, return_sequences=True)(gru1_merged)
gru_2b = GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)
inner = layers.Dense(30, activation='LeakyReLU')(concatenate([gru_2, gru_2b]))
inner = layers.Dense(10, activation='LeakyReLU')(inner)
inner = layers.Dense(3, activation='LeakyReLU')(inner)
model = Model(input_layer,inner)
model.compile(loss = "poisson", optimizer = optimizers.Adam(2e-4), metrics=['accuracy'])
All of the above seems to work, when trying to train using model.fit(x_train, y_train,epochs=3) I get the following error:
ValueError Traceback (most recent call last)
/var/folders/nc/c4mgwn897qbg8g52tp3mhbjr0000gp/T/ipykernel_3907/1977739458.py in <module>
----> 1 model.fit(x_train, y_train,epochs=3)
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1181 _r=1):
1182 callbacks.on_train_batch_begin(step)
-> 1183 tmp_logs = self.train_function(iterator)
1184 if data_handler.should_sync:
1185 context.async_wait()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
887
888 with OptionalXlaContext(self._jit_compile):
--> 889 result = self._call(*args, **kwds)
890
891 new_tracing_count = self.experimental_get_tracing_count()
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
931 # This is the first call of __call__, so we have to initialize.
932 initializers = []
--> 933 self._initialize(args, kwds, add_initializers_to=initializers)
934 finally:
935 # At this point we know that the initialization is complete (or less
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
761 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
762 self._concrete_stateful_fn = (
--> 763 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
764 *args, **kwds))
765
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
3048 args, kwargs = None, None
3049 with self._lock:
-> 3050 graph_function, _ = self._maybe_define_function(args, kwargs)
3051 return graph_function
3052
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3442
3443 self._function_cache.missed.add(call_context_key)
-> 3444 graph_function = self._create_graph_function(args, kwargs)
3445 self._function_cache.primary[cache_key] = graph_function
3446
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3277 arg_names = base_arg_names + missing_arg_names
3278 graph_function = ConcreteFunction(
-> 3279 func_graph_module.func_graph_from_py_func(
3280 self._name,
3281 self._python_function,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
997 _, original_func = tf_decorator.unwrap(python_func)
998
--> 999 func_outputs = python_func(*func_args, **func_kwargs)
1000
1001 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
670 # the function a weak reference to itself to avoid a reference cycle.
671 with OptionalXlaContext(compile_with_xla):
--> 672 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
673 return out
674
~/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
984 except Exception as e: # pylint:disable=broad-except
985 if hasattr(e, "ag_error_metadata"):
--> 986 raise e.ag_error_metadata.to_exception(e)
987 else:
988 raise
ValueError: in user code:
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:855 train_function *
return step_function(self, iterator)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:845 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
return fn(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:838 run_step **
outputs = model.train_step(data)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/training.py:800 train_step
self.compiled_metrics.update_state(y, y_pred, sample_weight)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/engine/compile_utils.py:460 update_state
metric_obj.update_state(y_t, y_p, sample_weight=mask)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/utils/metrics_utils.py:86 decorated
update_op = update_state_fn(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:177 update_state_fn
return ag_update_state(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:664 update_state **
matches = ag_fn(y_true, y_pred, **self._fn_kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/util/dispatch.py:206 wrapper
return target(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/keras/metrics.py:3485 sparse_categorical_accuracy
return math_ops.cast(math_ops.equal(y_true, y_pred), backend.floatx())
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/util/dispatch.py:206 wrapper
return target(*args, **kwargs)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/ops/math_ops.py:1729 equal
return gen_math_ops.equal(x, y, name=name)
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/ops/gen_math_ops.py:3228 equal
_, _, _op, _outputs = _op_def_library._apply_op_helper(
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/op_def_library.py:748 _apply_op_helper
op = g._create_op_internal(op_type_name, inputs, dtypes=None,
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/func_graph.py:599 _create_op_internal
return super(FuncGraph, self)._create_op_internal( # pylint: disable=protected-access
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:3557 _create_op_internal
ret = Operation(
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:2041 __init__
self._c_op = _create_c_op(self._graph, node_def, inputs,
/Users/jr123456jr987654321/opt/anaconda3/lib/python3.8/site-packages/tensorflow/python/framework/ops.py:1883 _create_c_op
raise ValueError(str(e))
ValueError: Dimensions must be equal, but are 3 and 23 for '{{node Equal}} = Equal[T=DT_FLOAT, incompatible_shape_error=true](Cast_1, Cast_2)' with input shapes: [?,3], [?,23].
FYI: the shape of x_train is 2000,100,300,1 and y_train is 2000,3
Your model output is (None, 23, 3), while it should be (None, 3) to match with your target variable (y_train) which is (2000,3).
Since your Dense layers input is a 3 dimensional (after concatenate layer), their output will be also a 3D (None, 23, 3). Simply add a Flatten layers before Dense layers.:
gru_2b = layers.GRU(512, return_sequences=True, go_backwards=True)(gru1_merged)
x = layers.concatenate([gru_2, gru_2b]) # move concatenate layer aside
x = layers.Flatten()(x) # add this
inner = layers.Dense(30, activation='LeakyReLU')(x)
Or you can remove return_sequence=True from your last GRU layers like this:
gru_2 = layers.GRU(512)(gru1_merged) # remove return_sequence=True
gru_2b = layers.GRU(512, go_backwards=True)(gru1_merged) # remove return_sequence=True
inner = layers.Dense(30, activation='LeakyReLU')(concatenate([gru_2, gru_2b]))

Why does my program keep on crashing after i write "epoch" even when I'm following a tutorial?

so I'm using python for some basic stuff, and I was following a tutorial on handwritten digits. But whenever I do:
model.fit(x_train, y_train, epochs=3)
it always crashes....
This is my piece of code that from the tutorial
import tensorflow as tf
mnist= tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test)= mnist.load_data()
x_train= tf.keras.utils.normalize(x_train, axis=1)
x_test= tf.keras.utils.normalize(x_test, axis=1)
model= tf.keras.models.Sequential()
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(128, activation= tf.nn.relu))
model.add(tf.keras.layers.Dense(128, activation= tf.nn.relu))
model.add(tf.keras.layers.Dense(10, activation= tf.nn.softmax))
#always trying to minimize "loss"
model.compile(optimizer= 'adam',
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=3)
and this is the error that I get:
Epoch 1/3
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-22-67c2d7c319b1> in <module>
19 loss='categorical_crossentropy',
20 metrics=['accuracy'])
---> 21 model.fit(x_train, y_train, epochs=3)
~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1181 _r=1):
1182 callbacks.on_train_batch_begin(step)
-> 1183 tmp_logs = self.train_function(iterator)
1184 if data_handler.should_sync:
1185 context.async_wait()
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in __call__(self, *args, **kwds)
887
888 with OptionalXlaContext(self._jit_compile):
--> 889 result = self._call(*args, **kwds)
890
891 new_tracing_count = self.experimental_get_tracing_count()
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
931 # This is the first call of __call__, so we have to initialize.
932 initializers = []
--> 933 self._initialize(args, kwds, add_initializers_to=initializers)
934 finally:
935 # At this point we know that the initialization is complete (or less
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
761 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
762 self._concrete_stateful_fn = (
--> 763 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
764 *args, **kwds))
765
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
3048 args, kwargs = None, None
3049 with self._lock:
-> 3050 graph_function, _ = self._maybe_define_function(args, kwargs)
3051 return graph_function
3052
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
3442
3443 self._function_cache.missed.add(call_context_key)
-> 3444 graph_function = self._create_graph_function(args, kwargs)
3445 self._function_cache.primary[cache_key] = graph_function
3446
~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3277 arg_names = base_arg_names + missing_arg_names
3278 graph_function = ConcreteFunction(
-> 3279 func_graph_module.func_graph_from_py_func(
3280 self._name,
3281 self._python_function,
~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
997 _, original_func = tf_decorator.unwrap(python_func)
998
--> 999 func_outputs = python_func(*func_args, **func_kwargs)
1000
1001 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
670 # the function a weak reference to itself to avoid a reference cycle.
671 with OptionalXlaContext(compile_with_xla):
--> 672 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
673 return out
674
~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
984 except Exception as e: # pylint:disable=broad-except
985 if hasattr(e, "ag_error_metadata"):
--> 986 raise e.ag_error_metadata.to_exception(e)
987 else:
988 raise
ValueError: in user code:
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:855 train_function *
return step_function(self, iterator)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:845 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:1285 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2833 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:3608 _call_for_each_replica
return fn(*args, **kwargs)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:838 run_step **
outputs = model.train_step(data)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:796 train_step
loss = self.compiled_loss(
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\engine\compile_utils.py:204 __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:155 __call__
losses = call_fn(y_true, y_pred)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:259 call **
return ag_fn(y_true, y_pred, **self._fn_kwargs)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\util\dispatch.py:206 wrapper
return target(*args, **kwargs)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\losses.py:1643 categorical_crossentropy
return backend.categorical_crossentropy(
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\util\dispatch.py:206 wrapper
return target(*args, **kwargs)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\keras\backend.py:4862 categorical_crossentropy
target.shape.assert_is_compatible_with(output.shape)
C:\Users\binju\anaconda3\lib\site-packages\tensorflow\python\framework\tensor_shape.py:1161 assert_is_compatible_with
raise ValueError("Shapes %s and %s are incompatible" % (self, other))
ValueError: Shapes (32, 1) and (32, 10) are incompatible
I've tried other tutorials too, but whenever I reach the model.fit part with epochs, it always seems to crash a lot?
Any help would be appreciated :)
The mnist dataset has integer labels as y values, as you can read here:
https://www.tensorflow.org/api_docs/python/tf/keras/datasets/mnist/load_data
You try to classify the data points into 10 classes with a one-hot classification, so you have a 10-dimensional neural network output, which does not correspond to your one-dimensional labels.
The solution is to convert your labels into one-hot encoded labels as well, you can find the solution for this problem here:
Convert array of indices to 1-hot encoded numpy array

ValueError: Input 0 of layer conv2d is incompatible with the layer: : expected min_ndim=4, found ndim=3. Full shape received: (2240, 70, 3)

I am working on CNN-LSTM model for training and my dataset contains 25760 images with dimension (70,70,3). During training I came across this error: "ValueError: Input 0 of layer conv2d is incompatible with the layer: : expected min_ndim=4, found ndim=3. Full shape received: (2240, 70, 3)".
Can anyone tell me what does it mean and how to solve it?
Shape of train data:(25760,70,70,3)
Code:
import tensorflow as tf
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, MaxPooling2D, LSTM, TimeDistributed
print(np.shape(X))
u=np.array(X)
v=np.array(y)
model=Sequential()
model.add(TimeDistributed(Conv2D(32,(5,5),padding='same',input_shape=(70,70,3))))
model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(MaxPooling2D(pool_size=(2,2))))
model.add(TimeDistributed(Conv2D(32,(5,5),padding='same')))
model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(MaxPooling2D(pool_size=(2,2))))
model.add(TimeDistributed(Conv2D(64,(5,5),padding='same')))
model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(MaxPooling2D(pool_size=(2,2))))
model.add(TimeDistributed(Conv2D(64,(5,5),padding='same')))
model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(MaxPooling2D(pool_size=(2,2))))
model.add(TimeDistributed(Conv2D(128,(5,5),padding='same')))
model.add(TimeDistributed(Activation('relu')))
model.add(TimeDistributed(MaxPooling2D(pool_size=(2,2))))
model.add(Flatten())
model.add(LSTM(100))
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dense(8))
model.add(Activation('softmax'))
model.compile(loss="sparse_categorical_crossentropy",optimizer="adam",metrics=['accuracy'])
model.fit(u,v,batch_size=32,epochs=20,validation_split=0.2)
model.save('cnn_lstm_1.h5')
Traceback:
ValueError Traceback (most recent call last)
<ipython-input-5-c86147b70dfb> in <module>
46 model.compile(loss="sparse_categorical_crossentropy",optimizer="adam",metrics=['accuracy'])
47
---> 48 model.fit(u,v,batch_size=32,epochs=20,validation_split=0.2)
49
50 model.save('cnn_lstm_1.h5')
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1098 _r=1):
1099 callbacks.on_train_batch_begin(step)
-> 1100 tmp_logs = self.train_function(iterator)
1101 if data_handler.should_sync:
1102 context.async_wait()
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
826 tracing_count = self.experimental_get_tracing_count()
827 with trace.Trace(self._name) as tm:
--> 828 result = self._call(*args, **kwds)
829 compiler = "xla" if self._experimental_compile else "nonXla"
830 new_tracing_count = self.experimental_get_tracing_count()
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
869 # This is the first call of __call__, so we have to initialize.
870 initializers = []
--> 871 self._initialize(args, kwds, add_initializers_to=initializers)
872 finally:
873 # At this point we know that the initialization is complete (or less
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
724 self._concrete_stateful_fn = (
725 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 726 *args, **kwds))
727
728 def invalid_creator_scope(*unused_args, **unused_kwds):
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2967 args, kwargs = None, None
2968 with self._lock:
-> 2969 graph_function, _ = self._maybe_define_function(args, kwargs)
2970 return graph_function
2971
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3359
3360 self._function_cache.missed.add(call_context_key)
-> 3361 graph_function = self._create_graph_function(args, kwargs)
3362 self._function_cache.primary[cache_key] = graph_function
3363
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3204 arg_names=arg_names,
3205 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3206 capture_by_value=self._capture_by_value),
3207 self._function_attributes,
3208 function_spec=self.function_spec,
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
988 _, original_func = tf_decorator.unwrap(python_func)
989
--> 990 func_outputs = python_func(*func_args, **func_kwargs)
991
992 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
632 xla_context.Exit()
633 else:
--> 634 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
635 return out
636
/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
975 except Exception as e: # pylint:disable=broad-except
976 if hasattr(e, "ag_error_metadata"):
--> 977 raise e.ag_error_metadata.to_exception(e)
978 else:
979 raise
ValueError: in user code:
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:805 train_function *
return step_function(self, iterator)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:795 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:1259 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2730 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:3417 _call_for_each_replica
return fn(*args, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:788 run_step **
outputs = model.train_step(data)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:754 train_step
y_pred = self(x, training=True)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:1012 __call__
outputs = call_fn(inputs, *args, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/sequential.py:389 call
outputs = layer(inputs, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:1012 __call__
outputs = call_fn(inputs, *args, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/layers/wrappers.py:241 call
y = self.layer(inputs, **kwargs)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/base_layer.py:998 __call__
input_spec.assert_input_compatibility(self.input_spec, inputs, self.name)
/opt/conda/lib/python3.7/site-packages/tensorflow/python/keras/engine/input_spec.py:239 assert_input_compatibility
str(tuple(shape)))
ValueError: Input 0 of layer conv2d is incompatible with the layer: : expected min_ndim=4, found ndim=3. Full shape received: (2240, 70, 3)
You are feeding sequence of images right?
If so, then your dataset should contain following dimensions
batch_size x sequence_length x img_width x img_height x channels
for example a typical batch of size 32 and sequence length of 5 would be would be 32x5x70x70x3
You can convert your dataset using pad_sequences() function to make your dataset shape to be training batch shape. Refer keras pad_sequences

How to use 'Recall' as metric in keras classifier?

How can use use the 'Recall' and other metrics in keras classifier. The following code only works for accuracy but if I change the metric to recall it fails.
Versions
"""
[('numpy', '1.19.1'),
('pandas', '1.1.1'),
('sklearn', '0.23.2'),
('tensorflow', '2.3.0'),
('keras', '2.4.3')]
"""
MWE
import numpy as np
import pandas as pd
import sklearn
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers.core import Dense
SEED = 100
np.random.seed(100)
X = np.random.random((20, 3))
y = np.random.randint(0,2,size=20)
print(x.shape, y.shape) # (20, 3) (20,)
n_inputs = X.shape[1]
model = Sequential([
Dense(n_inputs, input_shape=(n_inputs, ), activation='relu'),
Dense(32, activation='relu'),
Dense(2, activation='softmax')
])
METRIC = 'Recall' # Recall fails
# METRIC = 'accuracy' # accuracy works
model.compile('adam',loss='binary_crossentropy',metrics=[METRIC])
model.fit(X, y,validation_split=0.2,epochs=1)
Error
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-17-b372311b0ed4> in <module>
38 epochs=10,
39 shuffle=True,
---> 40 verbose=5)
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in _method_wrapper(self, *args, **kwargs)
106 def _method_wrapper(self, *args, **kwargs):
107 if not self._in_multi_worker_mode(): # pylint: disable=protected-access
--> 108 return method(self, *args, **kwargs)
109
110 # Running inside `run_distribute_coordinator` already.
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1096 batch_size=batch_size):
1097 callbacks.on_train_batch_begin(step)
-> 1098 tmp_logs = train_function(iterator)
1099 if data_handler.should_sync:
1100 context.async_wait()
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
695 self._concrete_stateful_fn = (
696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 697 *args, **kwds))
698
699 def invalid_creator_scope(*unused_args, **unused_kwds):
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3073 arg_names=arg_names,
3074 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3075 capture_by_value=self._capture_by_value),
3076 self._function_attributes,
3077 function_spec=self.function_spec,
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
ValueError: in user code:
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:806 train_function *
return step_function(self, iterator)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:796 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:1211 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2585 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/distribute/distribute_lib.py:2945 _call_for_each_replica
return fn(*args, **kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:789 run_step **
outputs = model.train_step(data)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:759 train_step
self.compiled_metrics.update_state(y, y_pred, sample_weight)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/engine/compile_utils.py:409 update_state
metric_obj.update_state(y_t, y_p, sample_weight=mask)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/utils/metrics_utils.py:90 decorated
update_op = update_state_fn(*args, **kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/metrics.py:176 update_state_fn
return ag_update_state(*args, **kwargs)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/metrics.py:1410 update_state **
sample_weight=sample_weight)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/keras/utils/metrics_utils.py:353 update_confusion_matrix_variables
y_pred.shape.assert_is_compatible_with(y_true.shape)
/Users/poudel/opt/miniconda3/envs/tf2/lib/python3.7/site-packages/tensorflow/python/framework/tensor_shape.py:1134 assert_is_compatible_with
raise ValueError("Shapes %s and %s are incompatible" % (self, other))
ValueError: Shapes (None, 2) and (None, 1) are incompatible
Change the final layer output 2 to 1.
import numpy as np
import pandas as pd
import sklearn
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers.core import Dense
SEED = 100
np.random.seed(100)
X = np.random.random((20, 3))
y = np.random.randint(0,2,size=20)
print(X.shape, y.shape) # (20, 3) (20,)
n_inputs = X.shape[1]
model = Sequential([
Dense(n_inputs, input_shape=(n_inputs, ), activation='relu'),
Dense(32, activation='relu'),
Dense(1, activation='relu')
])
METRIC = 'Recall' # Recall fails
# METRIC = 'accuracy' # accuracy works
model.compile('adam',loss='binary_crossentropy',metrics=[METRIC])
model.fit(X, y,validation_split=0.2,epochs=1)

Categories

Resources