From f3d8416b8313d95a5eb0280f9ff9bd671ba67c62 Mon Sep 17 00:00:00 2001 From: "valentin.emiya" <valentin.emiya@lif.univ-mrs.fr> Date: Fri, 4 Dec 2020 08:51:45 +0100 Subject: [PATCH] fix test keep eigen vec --- python/tffpy/experiments/exp_variance.py | 3 ++- .../tffpy/experiments/tests/test_exp_solve_tff.py | 2 +- python/tffpy/experiments/tests/test_exp_variance.py | 2 +- python/tffpy/experiments/yafe_slurm.py | 13 +++++-------- 4 files changed, 9 insertions(+), 11 deletions(-) diff --git a/python/tffpy/experiments/exp_variance.py b/python/tffpy/experiments/exp_variance.py index 20ff950..749fadc 100644 --- a/python/tffpy/experiments/exp_variance.py +++ b/python/tffpy/experiments/exp_variance.py @@ -65,7 +65,8 @@ class VarianceExperiment(SolveTffExperiment): def __init__(self, force_reset=False, suffix=''): SolveTffExperiment.__init__(self, force_reset=force_reset, - suffix='Variance' + suffix) + suffix='Variance' + suffix, + keep_eigenvectors=[]) def display_results(self): res = self.load_results(array_type='xarray') diff --git a/python/tffpy/experiments/tests/test_exp_solve_tff.py b/python/tffpy/experiments/tests/test_exp_solve_tff.py index 9c165e3..204caaf 100644 --- a/python/tffpy/experiments/tests/test_exp_solve_tff.py +++ b/python/tffpy/experiments/tests/test_exp_solve_tff.py @@ -71,7 +71,7 @@ class TestSolveTffExperiment(unittest.TestCase): light_exp = SolveTffExperiment.get_experiment( setting='light', force_reset=False) - for idt in range(light_exp.n_tasks): + for idt in light_exp.keep_eigenvectors: light_exp.plot_task(idt=idt, fontsize=16) plt.close('all') light_exp.plot_results() diff --git a/python/tffpy/experiments/tests/test_exp_variance.py b/python/tffpy/experiments/tests/test_exp_variance.py index 1a2d7ce..fbb8c6e 100644 --- a/python/tffpy/experiments/tests/test_exp_variance.py +++ b/python/tffpy/experiments/tests/test_exp_variance.py @@ -71,7 +71,7 @@ class TestVarianceExperiment(unittest.TestCase): light_exp = VarianceExperiment.get_experiment( setting='light', force_reset=False) - for idt in range(light_exp.n_tasks): + for idt in light_exp.keep_eigenvectors: light_exp.plot_task(idt=idt, fontsize=16) plt.close('all') light_exp.plot_results() diff --git a/python/tffpy/experiments/yafe_slurm.py b/python/tffpy/experiments/yafe_slurm.py index 0ecb018..6a4bed5 100644 --- a/python/tffpy/experiments/yafe_slurm.py +++ b/python/tffpy/experiments/yafe_slurm.py @@ -61,7 +61,7 @@ import sys def generate_slurm_script(script_file_path, xp_var_name, task_ids=None, n_simultaneous_jobs=10, slurm_walltime='02:00:00', - activate_env_command=None, use_gpu=False): + activate_env_command=None, use_cpu_gpu='cpu'): """Generate a script to launch an experiment using Slurm. Tasks are divided into batches that are executed by oar jobs. @@ -95,9 +95,8 @@ def generate_slurm_script(script_file_path, xp_var_name, task_ids=None, ``source activate some_conda_env`` when using conda. If ``activate_env_command`` is ``None``, no virtual environment is activated. - use_gpu : bool - Flag specifying if a gpu ressource is needed when running the - experiment. This has not been implemented yet. + use_cpu_gpu : {'all', 'cpu', 'gpu'} + Parameter to choose using CPU, GPU or both. """ script_file_path = Path(script_file_path) script_dir = script_file_path.parent @@ -127,10 +126,8 @@ def generate_slurm_script(script_file_path, xp_var_name, task_ids=None, script += '#SBATCH --output={}/stdout_%A_%a.slurm\n'.format(log_dir) script += '#SBATCH --error={}/stderr_%A_%a.slurm\n'.format(log_dir) script += '#SBATCH --time={}\n'.format(slurm_walltime) - # if use_gpu: - # script += '#SBATCH -p gpu IS NOT NULL\n' - # else: - # script += '#SBATCH -p gpu IS NULL\n' + if use_cpu_gpu in ('cpu', 'gpu'): + script += '#SBATCH --partition={}\n'.format(use_cpu_gpu) script += 'srun -N1 -n1 {}/run_$SLURM_ARRAY_TASK_ID.sh'.format(script_dir) -- GitLab