[英]model fit/ TypeError: 'NoneType' object is not callable
[英]Model fit / TypeError: 'NoneType' object is not callable
我正在嘗試使用 ResNet50 和我自己的分類器在分類任務上進行遷移學習,我有 28 個類要預測,我被困在model.fit()
。
我正在使用數據生成器為 model 提供
this is my model:
train_datagen = ImageDataGenerator(
rescale=1./255,
)
test_datagen = ImageDataGenerator(rescale=1./255)
train_dir = "/content/arabic/training"
train_generator = train_datagen.flow_from_directory(
train_dir,
color_mode = "rgb",
target_size=(32, 32),
batch_size=20,
class_mode='categorical')
validation_dir="/content/arabic/validation"
validation_generator = test_datagen.flow_from_directory(
validation_dir,
target_size=(32, 32),
batch_size=20,
class_mode='categorical')
base_model = keras.applications.ResNet50(
weights='imagenet', # Load weights pre-trained on ImageNet.
input_shape=(32, 32, 3),
include_top=False) # Do not include the ImageNet classifier at the top.
def modelFunctionTransfer(l2 = 0.001, lr=0.001, drop = 0.2, modelName = None):
model = models.Sequential()
if(modelName != None):
model.add(modelName)
model.add(layers.Flatten())
model.add(layers.Dense(512, kernel_regularizer=regularizers.l2(l2), activation='relu'))
model.add(layers.Dense(28, activation='softmax'))
opt = keras.optimizers.Adam(learning_rate=lr)
model.compile(loss="categorical_crossentropy",
optimizer=opt,
metrics=['acc'])
return model
model = modelFunctionTransfer(l2 = 0.001, lr = 0.001, drop = 0.5, modelName=base_model)
model.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
resnet50 (Functional) (None, 1, 1, 2048) 23587712
_________________________________________________________________
flatten_1 (Flatten) (None, 2048) 0
_________________________________________________________________
dense_2 (Dense) (None, 512) 1049088
_________________________________________________________________
dense_3 (Dense) (None, 28) 14364
=================================================================
Total params: 24,651,164
Trainable params: 1,063,452
Non-trainable params: 23,587,712
這是我得到錯誤的地方:
history = model.fit(train_generator,
steps_per_epoch = 672,
epochs = 30,
validation_data = validation_generator,
validation_steps = 30,
)
作為參考,這是我收到的完整錯誤消息:
Epoch 1/30
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-46-1dfe32a08202> in <module>()
10 history = model.fit(train_generator,
11 steps_per_epoch = 100,
---> 12 epochs = 30,
13
14 )
2 frames
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
1098 _r=1):
1099 callbacks.on_train_batch_begin(step)
-> 1100 tmp_logs = self.train_function(iterator)
1101 if data_handler.should_sync:
1102 context.async_wait()
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
826 tracing_count = self.experimental_get_tracing_count()
827 with trace.Trace(self._name) as tm:
--> 828 result = self._call(*args, **kwds)
829 compiler = "xla" if self._experimental_compile else "nonXla"
830 new_tracing_count = self.experimental_get_tracing_count()
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
853 # In this case we have created variables on the first call, so we run the
854 # defunned version which is guaranteed to never create variables.
--> 855 return self._stateless_fn(*args, **kwds) # pylint: disable=not-callable
856 elif self._stateful_fn is not None:
857 # Release the lock early so that multiple threads can perform the call
TypeError: 'NoneType' object is not callable
我復制了你的代碼,稍作改動。 我沒有你的數據集,所以我使用了只有 2 個類的數據集。 我使用的代碼在下面並且運行沒有錯誤。
train_datagen = ImageDataGenerator(
rescale=1./255,
)
test_datagen = ImageDataGenerator(rescale=1./255)
train_dir = r'c:\temp\people\train'
train_generator = train_datagen.flow_from_directory(
train_dir,
color_mode = "rgb",
target_size=(32, 32),
batch_size=20,
class_mode='categorical')
validation_dir=r'c:\temp\people\test'
validation_generator = test_datagen.flow_from_directory(
validation_dir,
target_size=(32, 32),
batch_size=20,
class_mode='categorical')
base_model = keras.applications.ResNet50(
weights='imagenet', # Load weights pre-trained on ImageNet.
input_shape=(32, 32, 3),
include_top=False) # Do not include the ImageNet classifier at the top.
def modelFunctionTransfer(l2 = 0.001, lr=0.001, drop = 0.2, modelName = None):
model = Sequential()
if(modelName != None):
model.add(modelName)
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(512, kernel_regularizer=regularizers.l2(l2),
activation='relu'))
model.add(tf.keras.layers.Dense(2, activation='softmax'))
opt = keras.optimizers.Adam(learning_rate=.001)
model.compile(loss="categorical_crossentropy",
optimizer=opt,
metrics=['acc'])
return model
model = modelFunctionTransfer(l2 = 0.001, lr = 0.001, drop = 0.5, modelName=base_model)
model.summary()
history = model.fit(train_generator, epochs = 30, validation_data = validation_generator)
在 model.fit 中,我通常不使用 steps_per_epoch 和 validation_steps,讓 model.fit 在內部計算這些值。
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.