Skip to content

Commit

Permalink
Merge pull request #1 from PumasAI-Labs/juanjose/indent-fix
Browse files Browse the repository at this point in the history
Change indent to 4
  • Loading branch information
storopoli authored Apr 9, 2024
2 parents 9896a2b + 022059e commit 3541bf3
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 26 deletions.
2 changes: 1 addition & 1 deletion .JuliaFormatter.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
indent = 2
indent = 4
format_docstrings = true
8 changes: 4 additions & 4 deletions 01-linear_regression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ x = rand(uniform, 1, num_samples) # samples stored columnwise
y = true_function.(x) + σ * ϵ

fig = scatter(vec(x), vec(y); axis = (xlabel = "x", ylabel = "y"), label = "data");
lines!(-1..1, true_function; color = :gray, label = "true");
axislegend(; position=:rb);
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend(; position = :rb);
fig

# 1.2. Model `true_function` with a linear regression model
Expand All @@ -41,6 +41,6 @@ ŷ = fitted_linreg(x)

fig = scatter(vec(x), vec(y); axis = (xlabel = "x", ylabel = "y"), label = "data");
scatter!(vec(x), vec(ŷ); label = "prediction");
lines!(-1..1, true_function; color = :gray, label = "true");
axislegend(; position=:rb);
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend(; position = :rb);
fig
6 changes: 3 additions & 3 deletions 02-complex_relationships.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ x = rand(uniform, 1, num_samples)
y = true_function.(x) + σ * ϵ

fig = scatter(vec(x), vec(y); axis = (xlabel = "x", ylabel = "y"), label = "data");
lines!(-1..1, true_function; color = :gray, label = "true");
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend();
fig

Expand All @@ -31,7 +31,7 @@ ŷ_ex22_50iter = fitted_linreg(x)

fig = scatter(vec(x), vec(y); axis = (xlabel = "x", ylabel = "y"), label = "data");
scatter!(vec(x), vec(ŷ_ex22_50iter); label = "prediction");
lines!(-1..1, true_function; color = :gray, label = "true");
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend();
fig

Expand All @@ -46,6 +46,6 @@ ŷ = fitted_nn(x)

fig = scatter(vec(x), vec(y); axis = (xlabel = "x", ylabel = "y"), label = "data");
scatter!(vec(x), vec(ŷ), label = "prediction");
lines!(-1..1, true_function; color = :gray, label = "true");
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend();
fig
12 changes: 4 additions & 8 deletions 03-bias-variance_tradeoff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ fig = scatter(vec(x), vec(y); axis = (xlabel = "x", ylabel = "y"), label = "data
scatter!(vec(x), vec(ŷ_underfit), label = "prediction (5 iterations)");
scatter!(vec(x), vec(ŷ), label = "prediction (50 iterations)");
scatter!(vec(x), vec(ŷ_overfit), label = "prediction (1000 iterations)");
lines!(-1..1, true_function; color = :gray, label = "true");
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend();
fig

Expand All @@ -46,19 +46,15 @@ ŷ_linreg = fitted_linreg(x)
fig = scatter(vec(x), vec(y); axis = (xlabel = "x", ylabel = "y"), label = "data");
scatter!(vec(x), vec(ŷ_linreg), label = "$max_iterations iterations");
scatter!(vec(x), vec(ŷ_ex22_50iter), label = "50 iterations");
lines!(-1..1, true_function; color = :gray, label = "true");
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend();
fig

# 3.3. The impact of the NN size

nn = MLPDomain(1, (32, tanh), (32, tanh), (1, identity); bias = true)
fitted_nn = fit(
nn,
target;
optim_alg = DeepPumas.BFGS(),
optim_options = (; iterations = 1_000)
)
fitted_nn =
fit(nn, target; optim_alg = DeepPumas.BFGS(), optim_options = (; iterations = 1_000))

= fitted_nn(x)

Expand Down
21 changes: 11 additions & 10 deletions 04-generalization.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,23 @@ x_valid = rand(uniform, 1, num_samples)
y_valid = true_function.(x_valid) + σ * ϵ_valid
target_valid = preprocess(x_valid, y_valid)

fig = scatter(vec(x_train), vec(y_train); axis = (xlabel = "x", ylabel = "y"), label = "training data");
fig = scatter(
vec(x_train),
vec(y_train);
axis = (xlabel = "x", ylabel = "y"),
label = "training data",
);
scatter!(vec(x_valid), vec(y_valid); label = "validation data");
lines!(-1..1, true_function; color = :gray, label = "true");
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend();
fig

# 4.2. Validation loss as a proxy for generalization performance

loss_train_l, loss_valid_l = [], []

fitted_nn = fit(
nn,
target_train;
optim_alg = DeepPumas.BFGS(),
optim_options = (; iterations = 10),
)
fitted_nn =
fit(nn, target_train; optim_alg = DeepPumas.BFGS(), optim_options = (; iterations = 10))
push!(loss_train_l, sum((fitted_nn(x_train) .- y_train) .^ 2))
push!(loss_valid_l, sum((fitted_nn(x_valid) .- y_valid) .^ 2))

Expand Down Expand Up @@ -112,6 +113,6 @@ ŷ_ho = nn_ho(x_valid)

fig = scatter(vec(x_valid), vec(y_valid); label = "validation data");
scatter!(vec(x_valid), vec(ŷ_ho), label = "prediction (hyperparam opt.)");
lines!(-1..1, true_function; color = :gray, label = "true");
axislegend(; position=:ct);
lines!(-1 .. 1, true_function; color = :gray, label = "true");
axislegend(; position = :ct);
fig

0 comments on commit 3541bf3

Please # to comment.