Skip to content
Snippets Groups Projects
Commit d1c652b8 authored by Pat Alt's avatar Pat Alt
Browse files

:cry:

parent 5c2e5e36
No related branches found
No related tags found
1 merge request!7669 initial run including fmnist lenet and new method
...@@ -91,7 +91,7 @@ function run_experiment(exper::Experiment; save_output::Bool=true, only_models:: ...@@ -91,7 +91,7 @@ function run_experiment(exper::Experiment; save_output::Bool=true, only_models::
# Model tuning: # Model tuning:
if TUNE_MODEL if TUNE_MODEL
mach = tune_model(exper) mach = tune_mlp(exper)
return mach return mach
end end
......
...@@ -2,10 +2,10 @@ ...@@ -2,10 +2,10 @@
#SBATCH --job-name="Grid-search Tabular (ECCCo)" #SBATCH --job-name="Grid-search Tabular (ECCCo)"
#SBATCH --time=06:00:00 #SBATCH --time=06:00:00
#SBATCH --ntasks=1000 #SBATCH --ntasks=100
#SBATCH --cpus-per-task=1 #SBATCH --cpus-per-task=1
#SBATCH --partition=compute #SBATCH --partition=compute
#SBATCH --mem-per-cpu=4GB #SBATCH --mem-per-cpu=8GB
#SBATCH --account=research-eemcs-insy #SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes. #SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
......
...@@ -28,7 +28,7 @@ add_models = Dict( ...@@ -28,7 +28,7 @@ add_models = Dict(
# Parameter choices: # Parameter choices:
params = ( params = (
n_individuals=N_IND_SPECIFIED ? N_IND : 10, n_individuals=N_IND_SPECIFIED ? N_IND : 10,
builder=default_builder(n_hidden=128, n_layers=2, activation=Flux.swish), builder=default_builder(n_hidden=128, n_layers=1, activation=Flux.swish),
𝒟x=Uniform(-1.0, 1.0), 𝒟x=Uniform(-1.0, 1.0),
α=[1.0, 1.0, 1e-2], α=[1.0, 1.0, 1e-2],
sampling_batch_size=10, sampling_batch_size=10,
......
...@@ -6,11 +6,11 @@ Output path for tuned model. ...@@ -6,11 +6,11 @@ Output path for tuned model.
tuned_model_path(exper::Experiment) = joinpath(exper.output_path, "tuned_model") tuned_model_path(exper::Experiment) = joinpath(exper.output_path, "tuned_model")
""" """
tune_model(exper::Experiment; kwargs...) tune_mlp(exper::Experiment; kwargs...)
Tunes MLP in place and saves the tuned model to disk. Tunes MLP in place and saves the tuned model to disk.
""" """
function tune_model(exper::Experiment; kwargs...) function tune_mlp(exper::Experiment; kwargs...)
if !(is_multi_processed(exper) && MPI.Comm_rank(exper.parallelizer.comm) != 0) if !(is_multi_processed(exper) && MPI.Comm_rank(exper.parallelizer.comm) != 0)
@info "Tuning models." @info "Tuning models."
# Output path: # Output path:
...@@ -28,7 +28,7 @@ function tune_model(exper::Experiment; kwargs...) ...@@ -28,7 +28,7 @@ function tune_model(exper::Experiment; kwargs...)
X, y, _ = prepare_data(exper::Experiment) X, y, _ = prepare_data(exper::Experiment)
# Tune model: # Tune model:
measure = collect(values(exper.model_measures)) measure = collect(values(exper.model_measures))
mach = tune_model(model, X, y; tuning_params=exper.model_tuning_params, measure=measure, kwargs...) mach = tune_mlp(model, X, y; tuning_params=exper.model_tuning_params, measure=measure, kwargs...)
# Machine is still on GPU, save CPU version of model: # Machine is still on GPU, save CPU version of model:
best_results = fitted_params(mach) best_results = fitted_params(mach)
Serialization.serialize(joinpath(model_tuning_path, "$(exper.save_name)_best_mlp.jls"), best_results) Serialization.serialize(joinpath(model_tuning_path, "$(exper.save_name)_best_mlp.jls"), best_results)
...@@ -39,11 +39,11 @@ function tune_model(exper::Experiment; kwargs...) ...@@ -39,11 +39,11 @@ function tune_model(exper::Experiment; kwargs...)
end end
""" """
tune_model(mod::Supervised, X, y; tuning_params::NamedTuple, kwargs...) tune_mlp(mod::Supervised, X, y; tuning_params::NamedTuple, kwargs...)
Tunes a model by performing a grid search over the parameters specified in `tuning_params`. Tunes a model by performing a grid search over the parameters specified in `tuning_params`.
""" """
function tune_model( function tune_mlp(
model::Supervised, X, y; model::Supervised, X, y;
tuning_params::NamedTuple, tuning_params::NamedTuple,
measure::Vector=MODEL_MEASURE_VEC, measure::Vector=MODEL_MEASURE_VEC,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment