我已经创建了一个Keras顺序模型并使用了Adam优化器.我想在每个时代之后获得学习率.这
stackoverflow question似乎回答了我的问题.但是,当我按照上面提到的解决方案时,我收到以下错误
set_model() missing 1 required positional argument: 'model'
这是我创建模型的代码:
model = Sequential()
model.add(Conv2D(64, (5, 5), input_shape=(IMG_HEIGHT, IMG_WIDTH, 3), activation='relu'))
model.add(Conv2D(64, (5, 5), activation='relu'))
model.add(MaxPooling2D((2, 2)))
model.add(Dropout(0.2))
model.add(Conv2D(128, (5, 5), activation='relu'))
model.add(Conv2D(128, (5, 5), activation='relu'))
model.add(MaxPooling2D((2, 2)))
model.add(Dropout(0.2))
model.add(Conv2D(256, (5, 5), activation='relu'))
model.add(Conv2D(256, (5, 5), activation='relu'))
model.add(MaxPooling2D((2, 2)))
model.add(BatchNormalization(axis=3))
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
learning_rate_reduction = ReduceLROnPlateau(monitor='val_acc',
patience=3,
verbose=1,
factor=0.4,
min_lr=0.0001)
csvlogger = CSVLogger("solution.csv", separator='\t')
checkpoint = ModelCheckpoint("models/best_model5.h5", monitor="val_acc", save_best_only=True, mode='max')
learning_rate_reduction = ReduceLROnPlateau(monitor='val_acc',
patience=3,
verbose=1,
factor=0.4,
min_lr=0.00001)
class MyCallback(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
lr = self.model.optimizer.lr
decay = self.model.optimizer.decay
iterations = self.model.optimizer.iterations
lr_with_decay = lr / (1. + decay * K.cast(iterations, K.dtype(decay)))
print(K.eval(lr_with_decay))
model.fit_generator(datagen.flow(x_train, y_train, batch_size=75),
epochs=10, validation_data=(x_validation, y_test),verbose=1,
steps_per_epoch=x_train.shape[0], callbacks=[csvlogger, checkpoint, MyCallback])
如何通过此错误“set_model()缺少1个必需的位置参数:’model’
“
下面是堆栈跟踪
TypeError Traceback (most recent call last)
<ipython-input-12-1826a19039cd> in <module>()
128 model.fit_generator(datagen.flow(x_train, y_train, batch_size=75),
129 epochs=10, validation_data=(x_validation, y_test),verbose=1,
--> 130 steps_per_epoch=x_train.shape[0], callbacks=[csvlogger, checkpoint, MyCallback])
131 model.save('trained_model5.h5')
132
/usr/local/lib/python3.6/dist-packages/keras/legacy/interfaces.py in wrapper(*args, **kwargs)
89 warnings.warn('Update your `' + object_name +
90 '` call to the Keras 2 API: ' + signature, stacklevel=2)
---> 91 return func(*args, **kwargs)
92 wrapper._original_function = func
93 return wrapper
/usr/local/lib/python3.6/dist-packages/keras/models.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
1274 use_multiprocessing=use_multiprocessing,
1275 shuffle=shuffle,
-> 1276 initial_epoch=initial_epoch)
1277
1278 @interfaces.legacy_generator_methods_support
/usr/local/lib/python3.6/dist-packages/keras/legacy/interfaces.py in wrapper(*args, **kwargs)
89 warnings.warn('Update your `' + object_name +
90 '` call to the Keras 2 API: ' + signature, stacklevel=2)
---> 91 return func(*args, **kwargs)
92 wrapper._original_function = func
93 return wrapper
/usr/local/lib/python3.6/dist-packages/keras/engine/training.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
2131 else:
2132 callback_model = self
-> 2133 callbacks.set_model(callback_model)
2134 callbacks.set_params({
2135 'epochs': epochs,
/usr/local/lib/python3.6/dist-packages/keras/callbacks.py in set_model(self, model)
50 def set_model(self, model):
51 for callback in self.callbacks:
---> 52 callback.set_model(model)
53
54 def on_epoch_begin(self, epoch, logs=None):
TypeError: set_model() missing 1 required positional argument: 'model'
另外,我的另一个问题是,上述解决方案是否正确.This tensorflow link about Adam Optimizer建议学习率计算如下:
lr_t <- learning_rate * sqrt(1 – beta2^t) / (1 – beta1^t)
这似乎与其他链接中提到的解决方案完全不同.我错过了什么?
最佳答案 实际上,在model.fit_generator方法的callbacks参数中,您传递的是类而不是该类的对象.
它应该是
my_calback_object = MyCallback() # create an object of the MyCallback class
model.fit_generator(datagen.flow(x_train, y_train, batch_size=75),
epochs=10, validation_data=(x_validation, y_test),
verbose=1, steps_per_epoch=x_train.shape[0],
callbacks=[csvlogger, checkpoint, my_callback_object])