Skip to content
Snippets Groups Projects
Commit f3d8416b authored by valentin.emiya's avatar valentin.emiya
Browse files

fix test keep eigen vec

parent 42e53ce7
No related branches found
No related tags found
No related merge requests found
Pipeline #6091 passed
...@@ -65,7 +65,8 @@ class VarianceExperiment(SolveTffExperiment): ...@@ -65,7 +65,8 @@ class VarianceExperiment(SolveTffExperiment):
def __init__(self, force_reset=False, suffix=''): def __init__(self, force_reset=False, suffix=''):
SolveTffExperiment.__init__(self, SolveTffExperiment.__init__(self,
force_reset=force_reset, force_reset=force_reset,
suffix='Variance' + suffix) suffix='Variance' + suffix,
keep_eigenvectors=[])
def display_results(self): def display_results(self):
res = self.load_results(array_type='xarray') res = self.load_results(array_type='xarray')
......
...@@ -71,7 +71,7 @@ class TestSolveTffExperiment(unittest.TestCase): ...@@ -71,7 +71,7 @@ class TestSolveTffExperiment(unittest.TestCase):
light_exp = SolveTffExperiment.get_experiment( light_exp = SolveTffExperiment.get_experiment(
setting='light', force_reset=False) setting='light', force_reset=False)
for idt in range(light_exp.n_tasks): for idt in light_exp.keep_eigenvectors:
light_exp.plot_task(idt=idt, fontsize=16) light_exp.plot_task(idt=idt, fontsize=16)
plt.close('all') plt.close('all')
light_exp.plot_results() light_exp.plot_results()
......
...@@ -71,7 +71,7 @@ class TestVarianceExperiment(unittest.TestCase): ...@@ -71,7 +71,7 @@ class TestVarianceExperiment(unittest.TestCase):
light_exp = VarianceExperiment.get_experiment( light_exp = VarianceExperiment.get_experiment(
setting='light', force_reset=False) setting='light', force_reset=False)
for idt in range(light_exp.n_tasks): for idt in light_exp.keep_eigenvectors:
light_exp.plot_task(idt=idt, fontsize=16) light_exp.plot_task(idt=idt, fontsize=16)
plt.close('all') plt.close('all')
light_exp.plot_results() light_exp.plot_results()
......
...@@ -61,7 +61,7 @@ import sys ...@@ -61,7 +61,7 @@ import sys
def generate_slurm_script(script_file_path, xp_var_name, task_ids=None, def generate_slurm_script(script_file_path, xp_var_name, task_ids=None,
n_simultaneous_jobs=10, slurm_walltime='02:00:00', n_simultaneous_jobs=10, slurm_walltime='02:00:00',
activate_env_command=None, use_gpu=False): activate_env_command=None, use_cpu_gpu='cpu'):
"""Generate a script to launch an experiment using Slurm. """Generate a script to launch an experiment using Slurm.
Tasks are divided into batches that are executed by oar jobs. Tasks are divided into batches that are executed by oar jobs.
...@@ -95,9 +95,8 @@ def generate_slurm_script(script_file_path, xp_var_name, task_ids=None, ...@@ -95,9 +95,8 @@ def generate_slurm_script(script_file_path, xp_var_name, task_ids=None,
``source activate some_conda_env`` when using conda. ``source activate some_conda_env`` when using conda.
If ``activate_env_command`` is ``None``, no virtual environment is If ``activate_env_command`` is ``None``, no virtual environment is
activated. activated.
use_gpu : bool use_cpu_gpu : {'all', 'cpu', 'gpu'}
Flag specifying if a gpu ressource is needed when running the Parameter to choose using CPU, GPU or both.
experiment. This has not been implemented yet.
""" """
script_file_path = Path(script_file_path) script_file_path = Path(script_file_path)
script_dir = script_file_path.parent script_dir = script_file_path.parent
...@@ -127,10 +126,8 @@ def generate_slurm_script(script_file_path, xp_var_name, task_ids=None, ...@@ -127,10 +126,8 @@ def generate_slurm_script(script_file_path, xp_var_name, task_ids=None,
script += '#SBATCH --output={}/stdout_%A_%a.slurm\n'.format(log_dir) script += '#SBATCH --output={}/stdout_%A_%a.slurm\n'.format(log_dir)
script += '#SBATCH --error={}/stderr_%A_%a.slurm\n'.format(log_dir) script += '#SBATCH --error={}/stderr_%A_%a.slurm\n'.format(log_dir)
script += '#SBATCH --time={}\n'.format(slurm_walltime) script += '#SBATCH --time={}\n'.format(slurm_walltime)
# if use_gpu: if use_cpu_gpu in ('cpu', 'gpu'):
# script += '#SBATCH -p gpu IS NOT NULL\n' script += '#SBATCH --partition={}\n'.format(use_cpu_gpu)
# else:
# script += '#SBATCH -p gpu IS NULL\n'
script += 'srun -N1 -n1 {}/run_$SLURM_ARRAY_TASK_ID.sh'.format(script_dir) script += 'srun -N1 -n1 {}/run_$SLURM_ARRAY_TASK_ID.sh'.format(script_dir)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment