[cad161]: / tests / tuning / test_update_config.py

Download this file

125 lines (99 with data), 3.3 kB

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import pytest
try:
import optuna
except ImportError:
optuna = None
if optuna is None:
pytest.skip("optuna not installed", allow_module_level=True)
from edsnlp.tune import update_config
@pytest.fixture
def minimal_config():
return {
"train": {
"layers": None,
},
".lr": {
"learning_rate": None,
},
}
@pytest.fixture
def hyperparameters():
return {
"train.layers": {
"type": "int",
"low": 2,
"high": 8,
"step": 2,
},
"'.lr'.learning_rate": {
"alias": "learning_rate",
"type": "float",
"low": 0.001,
"high": 0.1,
"log": True,
},
"train.batch_size": {
"alias": "batch_size",
"type": "categorical",
"choices": [32, 64, 128],
},
}
@pytest.fixture
def hyperparameters_with_invalid_type():
return {
"train.optimizer": {
"type": "string",
"choices": ["adam", "sgd"],
}
}
@pytest.fixture
def hyperparameters_with_invalid_path():
return {
"model.layers": {
"type": "int",
"low": 2,
"high": 8,
"step": 2,
},
}
@pytest.fixture
def trial():
study = optuna.create_study(direction="maximize")
trial = study.ask()
return trial
def test_update_config_with_values(minimal_config, hyperparameters):
values = {"learning_rate": 0.05, "train.layers": 6, "batch_size": 64}
_, updated_config = update_config(minimal_config, hyperparameters, values=values)
assert updated_config[".lr"]["learning_rate"] == values["learning_rate"]
assert updated_config["train"]["layers"] == values["train.layers"]
assert updated_config["train"]["batch_size"] == values["batch_size"]
def test_update_config_with_trial(minimal_config, hyperparameters, trial):
_, updated_config = update_config(minimal_config, hyperparameters, trial=trial)
learning_rate = updated_config[".lr"]["learning_rate"]
layers = updated_config["train"]["layers"]
batch_size = updated_config["train"]["batch_size"]
assert (
hyperparameters["'.lr'.learning_rate"]["low"]
<= learning_rate
<= hyperparameters["'.lr'.learning_rate"]["high"]
)
assert (
hyperparameters["train.layers"]["low"]
<= layers
<= hyperparameters["train.layers"]["high"]
)
assert layers % hyperparameters["train.layers"]["step"] == 0
assert batch_size in hyperparameters["train.batch_size"]["choices"]
def test_update_config_raises_error_on_unknown_parameter_type(
minimal_config, hyperparameters_with_invalid_type, trial
):
with pytest.raises(
ValueError,
match="Unknown parameter type 'string' for hyperparameter 'train.optimizer'.",
):
update_config(minimal_config, hyperparameters_with_invalid_type, trial=trial)
def test_update_config_raises_error_on_wrong_path(
minimal_config, hyperparameters_with_invalid_path, trial
):
with pytest.raises(KeyError, match="Path 'model' not found in config."):
update_config(minimal_config, hyperparameters_with_invalid_path, trial=trial)