diff --git a/snakemake_executor_plugin_slurm/__init__.py b/snakemake_executor_plugin_slurm/__init__.py index e17d72b0..54fedab2 100644 --- a/snakemake_executor_plugin_slurm/__init__.py +++ b/snakemake_executor_plugin_slurm/__init__.py @@ -6,6 +6,7 @@ import atexit import csv from io import StringIO +from itertools import groupby import os from pathlib import Path import re @@ -168,6 +169,20 @@ def warn_on_jobcontext(self, done=None): def additional_general_args(self): return "--executor slurm-jobstep --jobs 1" + def run_jobs(self, jobs: List[JobExecutorInterface]): + for rule_name, group in groupby(jobs, key=lambda job: job.rule.name): + same_rule_jobs = list(group) # Materialize the generator + if len(same_rule_jobs) == 1: + self.run_job(same_rule_jobs[0]) + else: + # TODO submit as array + # share code with run_job + + # TODO in the future: give a hint to the scheduler to select preferably + # many jobs from the same rule if possible, in order to have + # more efficient array jobs. This should be somehow tunable, because + # it might contradict other efficiency goals. + ... def run_job(self, job: JobExecutorInterface): # Implement here how to run a job. # You can access the job's resources, etc.