-
-
Notifications
You must be signed in to change notification settings - Fork 26
/
Copy pathhyperparameters.jl
48 lines (34 loc) · 1.14 KB
/
hyperparameters.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
# Hyperparameter interface
"""
HyperParameter{T}
A hyperparameter is any state that influences the
training and is not a parameter of the model.
Hyperparameters can be scheduled using the [`Scheduler`](#)
callback.
"""
abstract type HyperParameter{T} end
"""
sethyperparameter!(learner, H, value)
Sets hyperparameter `H` to `value` on `learner`.
"""
function sethyperparameter! end
"""
stateaccess(::Type{HyperParameter})
Defines what `Learner` state is accessed when calling
`sethyperparameter!` and `gethyperparameter`. This is needed
so that [`Scheduler`](#) can access the state.
"""
stateaccess(::Type{HyperParameter}) = ()
# Implementations
"""
abstract type LearningRate <: HyperParameter
Hyperparameter for the optimizer's learning rate.
See [`Scheduler`](#) and [hyperparameter scheduling](./docs/tutorials/hyperparameters.md).
"""
abstract type LearningRate <: HyperParameter{Float64} end
stateaccess(::Type{LearningRate}) = (optimizer = Write(),)
sethyperparameter!(learner, ::Type{LearningRate}, value) =
setlearningrate!(learner.optimizer, value)
function setlearningrate!(optimizer, value)
optimizer.eta = value
end