I am getting a value error when using model.fit(), I can't understand what is the mistake. I had done all the process correctly i think.
This is my model,
model = Sequential()
model.add(Dense(42,activation='relu')) # Input layer
model.add(Dropout(0.25))
model.add(Dense(21,activation='relu')) # Hidden layer
model.add(Dropout(0.25))
model.add(Dense(10,activation='relu')) # Hidden layer
model.add(Dropout(0.25))
model.add(Dense(5,activation='relu')) # Hidden layer
model.add(Dropout(0.25))
model.add(Dense(11,activation='softmax')) # Output layer
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
Epoch 1/100
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-26-9dd45f56d29e> in <module>
----> 1 model.fit(x=scaled_x_train, y=y_train, validation_data=(scaled_x_test, y_test), epochs=100)
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)
106 def _method_wrapper(self, *args, **kwargs):
107 if not self._in_multi_worker_mode(): # pylint: disable=protected-access
--> 108 return method(self, *args, **kwargs)
109
110 # Running inside `run_distribute_coordinator` already.
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1096 batch_size=batch_size):
1097 callbacks.on_train_batch_begin(step)
-> 1098 tmp_logs = train_function(iterator)
1099 if data_handler.should_sync:
1100 context.async_wait()
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\eager\def_function.py in __call__(self, *args, **kwds)
778 else:
779 compiler = "nonXla"
--> 780 result = self._call(*args, **kwds)
781
782 new_tracing_count = self._get_tracing_count()
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
821 # This is the first call of __call__, so we have to initialize.
822 initializers = []
--> 823 self._initialize(args, kwds, add_initializers_to=initializers)
824 finally:
825 # At this point we know that the initialization is complete (or less
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
695 self._concrete_stateful_fn = (
696 self._stateful_fn._get_concrete_function_internal_garbage_collected( # pylint: disable=protected-access
--> 697 *args, **kwds))
698
699 def invalid_creator_scope(*unused_args, **unused_kwds):
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
2853 args, kwargs = None, None
2854 with self._lock:
-> 2855 graph_function, _, _ = self._maybe_define_function(args, kwargs)
2856 return graph_function
2857
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
3211
3212 self._function_cache.missed.add(call_context_key)
-> 3213 graph_function = self._create_graph_function(args, kwargs)
3214 self._function_cache.primary[cache_key] = graph_function
3215 return graph_function, args, kwargs
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3073 arg_names=arg_names,
3074 override_flat_arg_shapes=override_flat_arg_shapes,
-> 3075 capture_by_value=self._capture_by_value),
3076 self._function_attributes,
3077 function_spec=self.function_spec,
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
984 _, original_func = tf_decorator.unwrap(python_func)
985
--> 986 func_outputs = python_func(*func_args, **func_kwargs)
987
988 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
598 # __wrapped__ allows AutoGraph to swap in a converted function. We give
599 # the function a weak reference to itself to avoid a reference cycle.
--> 600 return weak_wrapped_fn().__wrapped__(*args, **kwds)
601 weak_wrapped_fn = weakref.ref(wrapped_fn)
602
~\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
971 except Exception as e: # pylint:disable=broad-except
972 if hasattr(e, "ag_error_metadata"):
--> 973 raise e.ag_error_metadata.to_exception(e)
974 else:
975 raise
ValueError: in user code:
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\training.py:806 train_function *
return step_function(self, iterator)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\training.py:796 step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:1211 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2585 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2945 _call_for_each_replica
return fn(*args, **kwargs)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\training.py:789 run_step **
outputs = model.train_step(data)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\training.py:749 train_step
y, y_pred, sample_weight, regularization_losses=self.losses)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\engine\compile_utils.py:204 __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\losses.py:149 __call__
losses = ag_call(y_true, y_pred)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\losses.py:253 call **
return ag_fn(y_true, y_pred, **self._fn_kwargs)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\util\dispatch.py:201 wrapper
return target(*args, **kwargs)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\losses.py:1535 categorical_crossentropy
return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\util\dispatch.py:201 wrapper
return target(*args, **kwargs)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\keras\backend.py:4687 categorical_crossentropy
target.shape.assert_is_compatible_with(output.shape)
C:\Users\psiva\anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\framework\tensor_shape.py:1134 assert_is_compatible_with
raise ValueError("Shapes %s and %s are incompatible" % (self, other))
ValueError: Shapes (None, 1) and (None, 11) are incompatible
please help me regarding this issue. This code I had written is according to the tensorflow 2.0 and python 3.7. Tell me the fixes I want to make in this code. My features are 42 and the output target variable has 11 classes.
Did you specify the input shape anywhere? You can specify it by making the code below the first line in the model. Documentation is here.
model.add(tf.keras.Input(shape=None,batch_size=None,name=None,dtype=None,sparse=False,
tensor=None, ragged=False, **kwargs)
#alternatively you can specify it in the first dense layer with
model.add(layers.Dense(21, activation="relu", input_shape=(put your input dimensions here)))
Also check the training and test data labels. The dimensions for the labels have to match the number of neurons (11) in your final layer. Since you are using categorical cross entropy these labels need to be one hot encoded. If your labels are integer encoded use sparse categorical cross entropy. Documentation is here.
You need to make sure your labels are one-hot encoded. Try:
y_train = tf.keras.utils.to_categorical(y_train, 11)
y_test= tf.keras.utils.to_categorical(y_test, 11)
Be absolutely sure that the number of neurons in your final layer is the number of columns in your labels.
assert model.layers[-1].units == y_train.shape[-1] == y_test.shape[-1]
I think the error might be caused by misstating the labels' shape. meaning: ( , 1) should change to ( , 11). I believe this code might help you.
from sklearn.preprocessing import OneHotEncoder
onehot_encoder = OneHotEncoder(sparse=False)
labels_i = onehot_encoder.fit_transform(np.reshape(labels, (-1, 1)))
this code onehots your labels. if you have 11 different classes, convert your labels` shape (_, 11).
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.