Skip to content
Snippets Groups Projects
Commit ca4957e9 authored by Pat Alt's avatar Pat Alt
Browse files

set up for tuning

parent 025485a6
No related branches found
No related tags found
1 merge request!7669 initial run including fmnist lenet and new method
Showing
with 174 additions and 99 deletions
# Data:
dataname = "California Housing"
counterfactual_data, test_data = train_test_split(load_california_housing(nothing); test_size=TEST_SIZE)
nobs = size(counterfactual_data.X, 2)
# Default builder:
n_hidden = 32
activation = Flux.relu
builder = MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
)
# Model tuning:
model_tuning_params = DEFAULT_MODEL_TUNING_LARGE
# Tuning parameters:
tuning_params = DEFAULT_GENERATOR_TUNING
# Number of individuals:
n_ind = N_IND_SPECIFIED ? N_IND : 100
# Parameter choices:
params = (
n_hidden=32,
activation=Flux.relu,
builder=MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
),
α = [1.0, 1.0, 1e-1],
sampling_batch_size = 10,
sampling_steps = 30,
use_ensembling = true,
opt = Flux.Optimise.Descent(0.05)
)
run_experiment(
counterfactual_data, test_data;
dataname="California Housing",
epochs=100,
builder=builder,
α=[1.0, 1.0, 1e-1],
sampling_batch_size=10,
sampling_steps=30,
use_ensembling=true,
opt=Flux.Optimise.Descent(0.05),
n_individuals=n_ind,
min_batch_size=250,
use_variants=true,
Λ=[0.1, 0.2, 0.2],
nsamples=100,
niter_eccco=100
)
\ No newline at end of file
if !GRID_SEARCH
run_experiment(
counterfactual_data, test_data;
dataname=dataname,
params...
)
else
grid_search(
counterfactual_data, test_data;
dataname=dataname,
tuning_params=tuning_params
)
end
\ No newline at end of file
# Data:
dataname = "Credit Default"
counterfactual_data, test_data = train_test_split(load_credit_default(nothing); test_size=TEST_SIZE)
# Default builder:
n_hidden = 32
activation = Flux.relu
builder = MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
)
# Model tuning:
model_tuning_params = DEFAULT_MODEL_TUNING_LARGE
# Number of individuals:
n_ind = N_IND_SPECIFIED ? N_IND : 100
# Tuning parameters:
tuning_params = DEFAULT_GENERATOR_TUNING
run_experiment(
counterfactual_data, test_data;
dataname="Credit Default",
builder=builder,
# Parameter choices:
params = (
n_hidden = 32,
activation = Flux.relu,
builder = MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
),
α=[1.0, 1.0, 1e-1],
sampling_batch_size=10,
sampling_steps=30,
use_ensembling=true,
opt=Flux.Optimise.Descent(0.05),
n_individuals=n_ind,
use_variants=true,
Λ=[0.1, 0.2, 0.2],
nsamples=100,
niter_eccco=100
)
\ No newline at end of file
opt=Flux.Optimise.Descent(0.05)
)
if !GRID_SEARCH
run_experiment(
counterfactual_data, test_data;
dataname=dataname,
params...
)
else
grid_search(
counterfactual_data, test_data;
dataname=dataname,
tuning_params=tuning_params
)
end
\ No newline at end of file
# Data:
dataname = "German Credit"
counterfactual_data, test_data = train_test_split(load_german_credit(nothing); test_size=TEST_SIZE)
# Default builder:
n_hidden = 32
activation = Flux.relu
builder = MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
)
# Model tuning:
model_tuning_params = DEFAULT_MODEL_TUNING_LARGE
# Tuning parameters:
tuning_params = DEFAULT_GENERATOR_TUNING
# Number of individuals:
n_ind = N_IND_SPECIFIED ? N_IND : 100
# Parameter choices:
params = (
n_hidden=32,
activation=Flux.relu,
builder=MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
),
α = [1.0, 1.0, 1e-1],
sampling_batch_size = 10,
sampling_steps = 30,
use_ensembling = true,
opt = Flux.Optimise.Descent(0.05)
)
run_experiment(
counterfactual_data, test_data;
dataname="German Credit",
builder=builder,
α=[1.0, 1.0, 1e-1],
sampling_batch_size=10,
sampling_steps=30,
use_ensembling=true,
opt=Flux.Optimise.Descent(0.05),
n_individuals=n_ind,
use_variants=true,
Λ=[0.1, 0.2, 0.2],
nsamples=100,
niter_eccco=100
)
\ No newline at end of file
if !GRID_SEARCH
run_experiment(
counterfactual_data, test_data;
dataname=dataname,
params...
)
else
grid_search(
counterfactual_data, test_data;
dataname=dataname,
tuning_params=tuning_params
)
end
\ No newline at end of file
# Data:
dataname = "GMSC"
counterfactual_data, test_data = train_test_split(load_gmsc(nothing); test_size=TEST_SIZE)
nobs = size(counterfactual_data.X, 2)
# Default builder:
n_hidden = 32
activation = Flux.relu
builder = MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
)
# Model tuning:
model_tuning_params = DEFAULT_MODEL_TUNING_LARGE
# Number of individuals:
n_ind = N_IND_SPECIFIED ? N_IND : 100
# Tuning parameters:
tuning_params = DEFAULT_GENERATOR_TUNING
run_experiment(
counterfactual_data, test_data;
dataname="GMSC",
epochs=100,
builder = builder,
α=[1.0, 1.0, 1e-1],
sampling_batch_size=10,
# Parameter choices:
params = (
n_hidden=32,
activation=Flux.relu,
builder=MLJFlux.@builder Flux.Chain(
Dense(n_in, n_hidden, activation),
Dense(n_hidden, n_hidden, activation),
Dense(n_hidden, n_out),
),
α = [1.0, 1.0, 1e-1],
sampling_batch_size = 10,
sampling_steps = 30,
use_ensembling = true,
opt = Flux.Optimise.Descent(0.05),
n_individuals = n_ind,
min_batch_size = 250,
use_variants=true,
Λ=[0.1, 0.2, 0.2],
nsamples = 100,
niter_eccco = 100,
)
\ No newline at end of file
opt = Flux.Optimise.Descent(0.05)
)
if !GRID_SEARCH
run_experiment(
counterfactual_data, test_data;
dataname=dataname,
params...
)
else
grid_search(
counterfactual_data, test_data;
dataname=dataname,
tuning_params=tuning_params
)
end
#!/bin/bash
#SBATCH --job-name="Grid-search Tabular (ECCCo)"
#SBATCH --time=06:00:00
#SBATCH --ntasks=100
#SBATCH --cpus-per-task=1
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=4GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
module load 2023r1 openmpi
srun julia --project=experiments experiments/run_experiments.jl -- data=gmsc,german_credit,credit_default,california_housing output_path=results mpi grid_search > experiments/synthetic.log
\ No newline at end of file
#!/bin/bash
#SBATCH --job-name="Tune Synthetic Model (ECCCo)"
#SBATCH --time=03:00:00
#SBATCH --ntasks=1
#SBATCH --gpus-per-task=1
#SBATCH --cpus-per-task=1
#SBATCH --partition=gpu
#SBATCH --mem-per-cpu=8GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
srun julia --project=experiments experiments/run_experiments.jl -- data=linearly_separable,moons,circles output_path=results tune_model
\ No newline at end of file
#!/bin/bash
#SBATCH --job-name="Tune Tabular Model (ECCCo)"
#SBATCH --time=03:00:00
#SBATCH --ntasks=1
#SBATCH --gpus-per-task=1
#SBATCH --cpus-per-task=1
#SBATCH --partition=gpu
#SBATCH --mem-per-cpu=8GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
srun julia --project=experiments experiments/run_experiments.jl -- data=gmsc,german_credit,credit_default,california_housing output_path=results tune_model
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment