Skip to content
Snippets Groups Projects
Commit d1c652b8 authored by Pat Alt's avatar Pat Alt
Browse files

:cry:

parent 5c2e5e36
No related branches found
No related tags found
1 merge request!7669 initial run including fmnist lenet and new method
......@@ -91,7 +91,7 @@ function run_experiment(exper::Experiment; save_output::Bool=true, only_models::
# Model tuning:
if TUNE_MODEL
mach = tune_model(exper)
mach = tune_mlp(exper)
return mach
end
......
......@@ -2,10 +2,10 @@
#SBATCH --job-name="Grid-search Tabular (ECCCo)"
#SBATCH --time=06:00:00
#SBATCH --ntasks=1000
#SBATCH --ntasks=100
#SBATCH --cpus-per-task=1
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=4GB
#SBATCH --mem-per-cpu=8GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
......
......@@ -28,7 +28,7 @@ add_models = Dict(
# Parameter choices:
params = (
n_individuals=N_IND_SPECIFIED ? N_IND : 10,
builder=default_builder(n_hidden=128, n_layers=2, activation=Flux.swish),
builder=default_builder(n_hidden=128, n_layers=1, activation=Flux.swish),
𝒟x=Uniform(-1.0, 1.0),
α=[1.0, 1.0, 1e-2],
sampling_batch_size=10,
......
......@@ -6,11 +6,11 @@ Output path for tuned model.
tuned_model_path(exper::Experiment) = joinpath(exper.output_path, "tuned_model")
"""
tune_model(exper::Experiment; kwargs...)
tune_mlp(exper::Experiment; kwargs...)
Tunes MLP in place and saves the tuned model to disk.
"""
function tune_model(exper::Experiment; kwargs...)
function tune_mlp(exper::Experiment; kwargs...)
if !(is_multi_processed(exper) && MPI.Comm_rank(exper.parallelizer.comm) != 0)
@info "Tuning models."
# Output path:
......@@ -28,7 +28,7 @@ function tune_model(exper::Experiment; kwargs...)
X, y, _ = prepare_data(exper::Experiment)
# Tune model:
measure = collect(values(exper.model_measures))
mach = tune_model(model, X, y; tuning_params=exper.model_tuning_params, measure=measure, kwargs...)
mach = tune_mlp(model, X, y; tuning_params=exper.model_tuning_params, measure=measure, kwargs...)
# Machine is still on GPU, save CPU version of model:
best_results = fitted_params(mach)
Serialization.serialize(joinpath(model_tuning_path, "$(exper.save_name)_best_mlp.jls"), best_results)
......@@ -39,11 +39,11 @@ function tune_model(exper::Experiment; kwargs...)
end
"""
tune_model(mod::Supervised, X, y; tuning_params::NamedTuple, kwargs...)
tune_mlp(mod::Supervised, X, y; tuning_params::NamedTuple, kwargs...)
Tunes a model by performing a grid search over the parameters specified in `tuning_params`.
"""
function tune_model(
function tune_mlp(
model::Supervised, X, y;
tuning_params::NamedTuple,
measure::Vector=MODEL_MEASURE_VEC,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment