Skip to content
Snippets Groups Projects
Commit 7f6e9525 authored by pat-alt's avatar pat-alt
Browse files

final changes to grids and added mnist for daic

parent beb93007
No related branches found
No related tags found
1 merge request!8985 overshooting
Showing
with 42 additions and 68 deletions
#!/bin/bash
#SBATCH --job-name="Grid-search MNIST (ECCCo)"
#SBATCH --time=32:00:00
#SBATCH --ntasks=1000
#SBATCH --cpus-per-task=1
#SBATCH --job-name="MNIST Grid-search (ECCCo)"
#SBATCH --time=02:00:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=general
#SBATCH --mem-per-cpu=8GB
#SBATCH --mem-per-cpu=4GB
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
module use /opt/insy/modulefiles # Use DAIC INSY software collection
module load openmpi
srun julia --project=experiments experiments/run_experiments.jl -- data=mnist output_path=results mpi grid_search > experiments/grid_search_mnist.log
\ No newline at end of file
source experiments/slurm_header.sh
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=mnist output_path=results mpi grid_search threaded n_individuals=10 > experiments/logs/grid_search_mnist.log
#!/bin/bash
#SBATCH --job-name="California Housing (ECCCo)"
#SBATCH --time=00:30:00
#SBATCH --time=01:30:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
......@@ -13,4 +13,4 @@ module load 2023r1 openmpi
source experiments/slurm_header.sh
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=california_housing output_path=results mpi threaded n_individuals=100 n_runs=5 > experiments/logs/california_housing.log
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=california_housing output_path=results mpi threaded n_individuals=100 n_runs=50 > experiments/logs/california_housing.log
#!/bin/bash
#SBATCH --job-name="Circles (ECCCo)"
#SBATCH --time=00:35:00
#SBATCH --ntasks=10
#SBATCH --time=00:45:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=2GB
......@@ -13,4 +13,4 @@ module load 2023r1 openmpi
source experiments/slurm_header.sh
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=circles output_path=results mpi threaded n_individuals=100 n_runs=5 > experiments/logs/circles.log
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=circles output_path=results mpi threaded n_individuals=100 n_runs=50 > experiments/logs/circles.log
#!/bin/bash
#SBATCH --job-name="California Housing (ECCCo)"
#SBATCH --time=3:00:00
#SBATCH --ntasks=1000
#SBATCH --cpus-per-task=1
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=8GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
module load 2023r1 openmpi
srun julia --project=experiments experiments/run_experiments.jl -- data=california_housing output_path=results mpi > experiments/california_housing.log
#!/bin/bash
#SBATCH --job-name="German Credit (ECCCo)"
#SBATCH --time=1:00:00
#SBATCH --ntasks=100
#SBATCH --cpus-per-task=1
#SBATCH --time=01:00:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=8GB
#SBATCH --mem-per-cpu=4GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
module load 2023r1 openmpi
srun julia --project=experiments experiments/run_experiments.jl -- data=german_credit output_path=results mpi > experiments/german_credit.log
source experiments/slurm_header.sh
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=german_credit output_path=results mpi threaded n_individuals=100 n_runs=50 > experiments/logs/german_credit.log
#!/bin/bash
#SBATCH --job-name="GMSC (ECCCo)"
#SBATCH --time=3:00:00
#SBATCH --ntasks=100
#SBATCH --cpus-per-task=1
#SBATCH --time=01:30:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=8GB
#SBATCH --mem-per-cpu=4GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
module load 2023r1 openmpi
srun julia --project=experiments experiments/run_experiments.jl -- data=gmsc output_path=results mpi > experiments/gmsc.log
source experiments/slurm_header.sh
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=gmsc output_path=results mpi threaded n_individuals=100 n_runs=50 > experiments/logs/gmsc.log
\ No newline at end of file
#!/bin/bash
#SBATCH --job-name="Linearly Separable (ECCCo)"
#SBATCH --time=00:35:00
#SBATCH --ntasks=10
#SBATCH --time=00:45:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=2GB
......@@ -13,4 +13,4 @@ module load 2023r1 openmpi
source experiments/slurm_header.sh
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=linearly_separable output_path=results mpi threaded n_individuals=100 n_runs=5 > experiments/logs/linearly_separable.log
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=linearly_separable output_path=results mpi threaded n_individuals=100 n_runs=50 > experiments/logs/linearly_separable.log
#!/bin/bash
#SBATCH --job-name="Moons (ECCCo)"
#SBATCH --time=00:35:00
#SBATCH --ntasks=10
#SBATCH --time=00:45:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=2GB
......@@ -13,4 +13,4 @@ module load 2023r1 openmpi
source experiments/slurm_header.sh
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=moons output_path=results mpi threaded n_individuals=100 n_runs=5 > experiments/logs/moons.log
srun julia --project=experiments --threads $SLURM_CPUS_PER_TASK experiments/run_experiments.jl -- data=moons output_path=results mpi threaded n_individuals=100 n_runs=50 > experiments/logs/moons.log
#!/bin/bash
#SBATCH --job-name="Synthetic (ECCCo)"
#SBATCH --time=02:00:00
#SBATCH --ntasks=1000
#SBATCH --cpus-per-task=1
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=4GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
module load 2023r1 openmpi
srun julia --project=experiments experiments/run_experiments.jl -- data=linearly_separable,moons,circles output_path=results mpi > experiments/synthetic.log
#!/bin/bash
#SBATCH --job-name="Grid-search California Housing (ECCCo)"
#SBATCH --time=01:20:00
#SBATCH --time=01:40:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
......
#!/bin/bash
#SBATCH --job-name="Grid-search German Credit (ECCCo)"
#SBATCH --time=01:00:00
#SBATCH --time=01:30:00
#SBATCH --ntasks=15
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
#SBATCH --mem-per-cpu=2GB
#SBATCH --mem-per-cpu=4GB
#SBATCH --account=research-eemcs-insy
#SBATCH --mail-type=END # Set mail type to 'END' to receive a mail when the job finishes.
......
#!/bin/bash
#SBATCH --job-name="Grid-search GMSC (ECCCo)"
#SBATCH --time=01:20:00
#SBATCH --time=01:40:00
#SBATCH --ntasks=30
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
......
#!/bin/bash
#SBATCH --job-name="Grid-search Linearly Separable (ECCCo)"
#SBATCH --time=01:00:00
#SBATCH --time=01:30:00
#SBATCH --ntasks=15
#SBATCH --cpus-per-task=10
#SBATCH --partition=compute
......
......@@ -186,26 +186,22 @@ const GRID_SEARCH = "grid_search" ∈ ARGS
"Generator tuning parameters."
DEFAULT_GENERATOR_TUNING = (
Λ=[[0.1, 0.1, 0.1], [0.1, 0.1, 0.5],],
reg_strength = [0.0, 0.1, 1.0],
Λ=[[0.1, 0.1, 0.1], [0.1, 0.1, 0.2], [0.1, 0.1, 0.5],],
reg_strength = [0.0, 0.1, 0.5],
opt = [
Descent(0.01),
Descent(0.05),
Optimiser(ClipValue(0.01), Descent(0.01)),
Optimiser(ClipValue(0.05), Descent(0.05)),
],
decay = [(0.0, 1), (0.01, 1), (0.1, 1)],
)
"Generator tuning parameters for large datasets."
DEFAULT_GENERATOR_TUNING_LARGE = (
Λ = [[0.1, 0.1, 0.1], [0.1, 0.1, 0.2],],
reg_strength=[0.0, 0.1,],
Λ=[[0.1, 0.1, 0.1], [0.1, 0.1, 0.2], [0.1, 0.1, 0.5],],
reg_strength=[0.0, 0.1, 0.5],
opt = [
Descent(0.01),
Descent(0.05),
Optimiser(ClipValue(0.01), Descent(0.01)),
Optimiser(ClipValue(0.05), Descent(0.05)),
],
decay = [(0.0, 1), (0.01, 1), (0.1, 1)],
)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment