Commit 802fc322 authored by leo.bouscarrat's avatar leo.bouscarrat
Browse files

Resultfs for california

parent 5f15d2a5
...@@ -17,9 +17,7 @@ ...@@ -17,9 +17,7 @@
2, 2,
3, 3,
4, 4,
5, 5
6,
7
], ],
"subsets_used": "train+dev,train+dev", "subsets_used": "train+dev,train+dev",
"normalize_weights": false, "normalize_weights": false,
...@@ -31,7 +29,7 @@ ...@@ -31,7 +29,7 @@
], ],
"job_number": -1, "job_number": -1,
"extraction_strategy": "none", "extraction_strategy": "none",
"overwrite": false, "overwrite": true,
"extracted_forest_size": [ "extracted_forest_size": [
33, 33,
67, 67,
......
...@@ -17,9 +17,7 @@ ...@@ -17,9 +17,7 @@
2, 2,
3, 3,
4, 4,
5, 5
6,
7
], ],
"subsets_used": "train+dev,train+dev", "subsets_used": "train+dev,train+dev",
"normalize_weights": false, "normalize_weights": false,
...@@ -31,7 +29,7 @@ ...@@ -31,7 +29,7 @@
], ],
"job_number": -1, "job_number": -1,
"extraction_strategy": "omp", "extraction_strategy": "omp",
"overwrite": false, "overwrite": true,
"extracted_forest_size": [ "extracted_forest_size": [
33, 33,
67, 67,
......
...@@ -17,9 +17,7 @@ ...@@ -17,9 +17,7 @@
2, 2,
3, 3,
4, 4,
5, 5
6,
7
], ],
"subsets_used": "train+dev,train+dev", "subsets_used": "train+dev,train+dev",
"normalize_weights": false, "normalize_weights": false,
...@@ -31,7 +29,7 @@ ...@@ -31,7 +29,7 @@
], ],
"job_number": -1, "job_number": -1,
"extraction_strategy": "random", "extraction_strategy": "random",
"overwrite": false, "overwrite": true,
"extracted_forest_size": [ "extracted_forest_size": [
33, 33,
67, 67,
......
#!/bin/bash #!/bin/bash
core_number=14 core_number=5
walltime=1:00 walltime=1:00
seeds='1 2 3 4 5 6 7' seeds='1 2 3 4 5'
for dataset in california_housing for dataset in california_housing
#for dataset in kin8nm #for dataset in kin8nm
#for dataset in gamma #for dataset in gamma
#for dataset in breast_cancer diabetes diamonds california_housing boston linnerud steel-plates kr-vs-kp digits iris kin8nm lfw_pairs linnerud olivetti_faces wine spambase gamma #for dataset in breast_cancer diabetes diamonds california_housing boston linnerud steel-plates kr-vs-kp digits iris kin8nm lfw_pairs linnerud olivetti_faces wine spambase gamma
do do
oarsub -p "(gpu is null)" -n "$dataset base" -l /core=$core_number,walltime=1:00 "conda activate test_env && python code/train.py --dataset_name=$dataset --seeds $seeds --extraction_strategy=none --save_experiment_configuration 4 none_with_params --extracted_forest_size_stop=1 --extracted_forest_size_samples=30 --experiment_id=1 --models_dir=models/$dataset/stage4 --subsets_used train+dev,train+dev" oarsub -p "(gpu is null)" -n "$dataset base" -l /core=1,walltime=1:00 "conda activate test_env && python code/train.py --dataset_name=$dataset --seeds $seeds --extraction_strategy=none --save_experiment_configuration 4 none_with_params --extracted_forest_size_stop=1 --extracted_forest_size_samples=30 --experiment_id=1 --models_dir=models/$dataset/stage4 --subsets_used train+dev,train+dev --overwrite"
oarsub -p "(gpu is null)" -n "$dataset random" -l /core=$core_number,walltime=1:00 "conda activate test_env && python code/train.py --dataset_name=$dataset --seeds $seeds --extraction_strategy=random --save_experiment_configuration 4 random_with_params --extracted_forest_size_stop=1 --extracted_forest_size_samples=30 --experiment_id=2 --models_dir=models/$dataset/stage4 --subsets_used train+dev,train+dev" #oarsub -p "(gpu is null)" -n "$dataset random" -l /core=$core_number,walltime=1:00 "conda activate test_env && python code/train.py --dataset_name=$dataset --seeds $seeds --extraction_strategy=random --save_experiment_configuration 4 random_with_params --extracted_forest_size_stop=1 --extracted_forest_size_samples=30 --experiment_id=2 --models_dir=models/$dataset/stage4 --subsets_used train+dev,train+dev --overwrite"
oarsub -p "(gpu is null)" -n "$dataset omp" -l /core=$core_number,walltime=1:00 "conda activate test_env && python code/train.py --dataset_name=$dataset --seeds $seeds --extraction_strategy=omp --save_experiment_configuration 4 omp_with_params --extracted_forest_size_stop=1 --extracted_forest_size_samples=30 --experiment_id=3 --models_dir=models/$dataset/stage4 --subsets_used train+dev,train+dev" #oarsub -p "(gpu is null)" -n "$dataset omp" -l /core=$core_number,walltime=1:00 "conda activate test_env && python code/train.py --dataset_name=$dataset --seeds $seeds --extraction_strategy=omp --save_experiment_configuration 4 omp_with_params --extracted_forest_size_stop=1 --extracted_forest_size_samples=30 --experiment_id=3 --models_dir=models/$dataset/stage4 --subsets_used train+dev,train+dev --overwrite"
done done
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment