44
loading...
This website collects cookies to deliver better user experience
Model architecture hyperparameters - Hyperparameters that control model's underlying mathematical function
Model training hyperparameters - Hyperparameters that control the training loop and the way the optimizer works
import keras_tuner as kt
from tensorflow import keras
def build_model(hp):
model = keras.Sequential([
keras.layers.Flatten(input_shape=(28, 28)),
keras.layers.Dense(units=hp.Int('first_hidden', min_value=32,
max_value=256, step=32), activation='relu'),
keras.layers.Dense(units=hp.Int('second_hidden', min_value=32,
max_value=256, step=32), activation='relu'),
keras.layers.Dense(units=10, activation='softmax')
])
model.compile(optimizer=keras.optimizers.Adam(
hp.Float('learning_rate', min_value=.005, max_value=.01, sampling='log')),
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
return model
tuner = kt.BayesianOptimization(
build_model,
objective='val_accuracy',
max_trials=10,
)
tuner.search(x_train, y_train, validation_split=0.1, epochs=10)
best_hps = tuner.get_best_hyperparameters(num_trials=1)[0]
number_of_trials