Skip to content

find_hyperparameters

airt.keras.experiments.find_hyperparameters(dataset_name: str, *, monotonicity_indicator: Dict[str, int], final_activation: Union[str, Callable[[TensorLike, TensorLike], TensorLike]], loss: Union[str, Callable[[TensorLike, TensorLike], TensorLike]], metrics: Union[str, Callable[[TensorLike, TensorLike], TensorLike]], hp_params_f: Optional[Callable[[HyperParameters], Dict[str, Any]]] = None, max_trials: int = 100, max_epochs: int = 50, batch_size: int = 8, objective: Union[str, Objective], direction: str, dir_root: Union[Path, str] = 'tuner', seed: int = 42, executions_per_trial: int = 3, max_consecutive_failed_trials: int = 5, patience: int = 10) -> Tuner ยค

Search for optimal hyperparameters

Parameters:

Name Type Description Default
dataset_name str

name of the dataset, one of "auto", "heart", compas", "blog", "loan"

required
monotonicity_indicator Dict[str, int]

monotonicity indicator as used in MonoDense.__init__

required
final_activation Union[str, Callable[[TensorLike, TensorLike], TensorLike]]

final activation of the neural network

required
loss Union[str, Callable[[TensorLike, TensorLike], TensorLike]]

Tensorflow loss function

required
metrics Union[str, Callable[[TensorLike, TensorLike], TensorLike]]

Tensorflow metrics function

required
hp_params_f Optional[Callable[[HyperParameters], Dict[str, Any]]]

a function constructing sampling hyperparameters using Keras Tuner

None
max_trials int

maximum number of trials

100
max_epochs int

maximum number of epochs in each trial

50
batch_size int

batch size

8
objective Union[str, Objective]

objective, typically f"val_{metrics}"

required
direction str

direction of the objective, either "min" or "max"

required
dir_root Union[Path, str]

root directory for storing Keras Tuner data

'tuner'
seed int

random seed used to guarantee reproducibility of results

42
executions_per_trial int

number of executions per trial. Set it to number higher than zero for small datasets

3
max_consecutive_failed_trials int

maximum number of failed trials as used in Keras Tuner

5
patience int

number of epoch with worse objective before stopping trial early

10

Returns:

Type Description
Tuner

An instance of Keras Tuner

Source code in airt/keras/experiments.py
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
def find_hyperparameters(
    dataset_name: str,
    *,
    monotonicity_indicator: Dict[str, int],
    final_activation: Union[str, Callable[[TensorLike, TensorLike], TensorLike]],
    loss: Union[str, Callable[[TensorLike, TensorLike], TensorLike]],
    metrics: Union[str, Callable[[TensorLike, TensorLike], TensorLike]],
    hp_params_f: Optional[Callable[[HyperParameters], Dict[str, Any]]] = None,
    max_trials: int = 100,
    max_epochs: int = 50,
    batch_size: int = 8,
    objective: Union[str, Objective],
    direction: str,
    dir_root: Union[Path, str] = "tuner",
    seed: int = 42,
    executions_per_trial: int = 3,
    max_consecutive_failed_trials: int = 5,
    patience: int = 10,
) -> Tuner:
    """Search for optimal hyperparameters

    Args:
        dataset_name: name of the dataset, one of "auto", "heart", compas", "blog", "loan"
        monotonicity_indicator: monotonicity indicator as used in `MonoDense.__init__`
        final_activation:  final activation of the neural network
        loss: Tensorflow loss function
        metrics: Tensorflow metrics function
        hp_params_f: a function constructing sampling hyperparameters using Keras Tuner
        max_trials: maximum number of trials
        max_epochs: maximum number of epochs in each trial
        batch_size: batch size
        objective: objective, typically f"val_{metrics}"
        direction: direction of the objective, either "min" or "max"
        dir_root: root directory for storing Keras Tuner data
        seed: random seed used to guarantee reproducibility of results
        executions_per_trial: number of executions per trial. Set it to number higher than zero for small datasets
        max_consecutive_failed_trials: maximum number of failed trials as used in Keras Tuner
        patience: number of epoch with worse objective before stopping trial early

    Returns:
        An instance of Keras Tuner

    """
    tf.keras.utils.set_random_seed(seed)

    train_df, test_df = get_train_n_test_data(dataset_name)
    train_ds, test_ds = df2ds(train_df), df2ds(test_df)

    oracle = _TestHyperModel(
        monotonicity_indicator=monotonicity_indicator,
        hp_params_f=hp_params_f,
        final_activation=final_activation,
        loss=loss,
        metrics=metrics,
        train_ds=train_ds,
        batch_size=batch_size,
    )

    tuner = BayesianOptimization(
        oracle,
        objective=Objective(objective, direction),
        max_trials=max_trials,
        seed=seed,
        directory=Path(dir_root),
        project_name=dataset_name,
        executions_per_trial=executions_per_trial,
        max_consecutive_failed_trials=max_consecutive_failed_trials,
    )

    stop_early = tf.keras.callbacks.EarlyStopping(monitor="val_loss", patience=patience)

    tuner.search(
        train_ds.shuffle(len(train_ds)).batch(batch_size).prefetch(2),
        validation_data=test_ds.batch(256),
        callbacks=[stop_early],
        epochs=max_epochs,
    )

    return tuner