diff --git a/AUTHORS.md b/AUTHORS.md index 88903bc..2308b35 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -3,6 +3,7 @@ The list of contributors in alphabetical order: - [Agisilaos Kounelis](https://orcid.org/0000-0001-9312-3189) +- [Alp Tuna](https://orcid.org/0009-0001-1915-3993) - [Audrius Mecionis](https://orcid.org/0000-0002-3759-1663) - [Camila Diaz](https://orcid.org/0000-0001-5543-797X) - [Giuseppe Steduto](https://orcid.org/0009-0002-1258-8553) diff --git a/Dockerfile b/Dockerfile index cac853d..05e0bd7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -89,22 +89,36 @@ RUN chmod +x /usr/local/bin/magick # Are we debugging? ARG DEBUG=0 # hadolint ignore=DL3013 -RUN if [ "${DEBUG}" -gt 0 ]; then pip install --no-cache-dir -e ".[debug,xrootd]"; else pip install --no-cache-dir ".[xrootd]"; fi; +RUN if [ "${DEBUG}" -gt 0 ]; then pip install --no-cache-dir -e ".[debug]"; else pip install --no-cache-dir .; fi; # Are we building with locally-checked-out shared modules? # hadolint ignore=DL3008,DL3013 RUN apt-get update -y && \ apt-get install -y --no-install-recommends \ - git && \ + cmake \ + g++ \ + gcc \ + git \ + libssl-dev \ + make \ + python3.12-dev \ + uuid-dev && \ if test -e modules/reana-commons; then \ if [ "${DEBUG}" -gt 0 ]; then \ - pip install --no-cache-dir -e "modules/reana-commons[snakemake_reports]" --upgrade; \ + pip install --no-cache-dir -e "modules/reana-commons[snakemake,snakemake-xrootd]" --upgrade; \ else \ - pip install --no-cache-dir "modules/reana-commons[snakemake_reports]" --upgrade; \ + pip install --no-cache-dir "modules/reana-commons[snakemake,snakemake-xrootd]" --upgrade; \ fi \ fi && \ apt-get remove -y \ - git && \ + cmake \ + g++ \ + gcc \ + git \ + libssl-dev \ + make \ + python3.12-dev \ + uuid-dev && \ apt-get autoremove -y && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* diff --git a/reana_workflow_engine_snakemake/cli.py b/reana_workflow_engine_snakemake/cli.py index f608ea4..1baf672 100644 --- a/reana_workflow_engine_snakemake/cli.py +++ b/reana_workflow_engine_snakemake/cli.py @@ -20,7 +20,7 @@ from reana_commons.workflow_engine import create_workflow_engine_command from reana_workflow_engine_snakemake.config import LOGGING_MODULE -from reana_workflow_engine_snakemake.executor import run_jobs +from reana_workflow_engine_snakemake.runner import run_jobs logging.basicConfig(level=REANA_LOG_LEVEL, format=REANA_LOG_FORMAT) @@ -49,8 +49,6 @@ def run_snakemake_workflow_engine_adapter( log.info(f"Workflow spec received: {workflow_file}") publisher.publish_workflow_status(workflow_uuid, running_status) success = run_jobs( - rjc_api_client, - publisher, workflow_workspace, workflow_file, workflow_parameters, diff --git a/reana_workflow_engine_snakemake/executor.py b/reana_workflow_engine_snakemake/executor.py index cf9fb52..191866d 100644 --- a/reana_workflow_engine_snakemake/executor.py +++ b/reana_workflow_engine_snakemake/executor.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # This file is part of REANA. -# Copyright (C) 2021, 2022, 2023, 2024 CERN. +# Copyright (C) 2024 CERN. # # REANA is free software; you can redistribute it and/or modify it # under the terms of the MIT License; see LICENSE file for more details. @@ -10,27 +10,31 @@ import os import logging -import asyncio -from collections import namedtuple -from typing import Callable +from dataclasses import dataclass, field +from typing import List, Generator, Optional from bravado.exception import HTTPNotFound from reana_commons.config import REANA_DEFAULT_SNAKEMAKE_ENV_IMAGE from reana_commons.utils import build_progress_message -from snakemake import snakemake -from snakemake.common import async_lock -from snakemake.executors import ClusterExecutor, GenericClusterExecutor -from snakemake.jobs import Job -from snakemake.resources import DefaultResources -from snakemake import scheduler # for monkeypatch + +from reana_commons.api_client import JobControllerAPIClient +from reana_commons.publisher import WorkflowStatusPublisher + +from snakemake_interface_executor_plugins.executors.base import SubmittedJobInfo +from snakemake_interface_executor_plugins.executors.remote import RemoteExecutor +from snakemake_interface_executor_plugins.settings import ( + CommonSettings, +) +from snakemake_interface_executor_plugins.jobs import ( + JobExecutorInterface, +) +from snakemake_interface_common.exceptions import WorkflowError from reana_workflow_engine_snakemake.config import ( - DEFAULT_SNAKEMAKE_REPORT_FILENAME, LOGGING_MODULE, MOUNT_CVMFS, - SNAKEMAKE_MAX_PARALLEL_JOBS, - POLL_JOBS_STATUS_SLEEP_IN_SECONDS, WORKFLOW_KERBEROS, + POLL_JOBS_STATUS_SLEEP_IN_SECONDS, JobStatus, RunStatus, ) @@ -42,21 +46,73 @@ log = logging.getLogger(LOGGING_MODULE) -REANAClusterJob = namedtuple("REANAClusterJob", "job callback error_callback") - +# Required: +# Specify common settings shared by various executors. + +common_settings = CommonSettings( + # define whether your executor plugin executes locally + # or remotely. In virtually all cases, it will be remote execution + # (cluster, cloud, etc.). Only Snakemake's standard execution + # plugins (snakemake-executor-plugin-dryrun, snakemake-executor-plugin-local) + # are expected to specify False here. + non_local_exec=True, + # Whether the executor implies to not have a shared file system + implies_no_shared_fs=False, + # whether to deploy workflow sources to default storage provider before execution + job_deploy_sources=True, + # whether arguments for setting the storage provider shall be passed to jobs + pass_default_storage_provider_args=True, + # whether arguments for setting default resources shall be passed to jobs + pass_default_resources_args=True, + # whether environment variables shall be passed to jobs (if False, use + # self.envvars() to obtain a dict of environment variables and their values + # and pass them e.g. as secrets to the execution backend) + pass_envvar_declarations_to_cmd=True, + # whether the default storage provider shall be deployed before the job is run on + # the remote node. Usually set to True if the executor does not assume a shared fs + auto_deploy_default_storage_provider=False, + # specify initial amount of seconds to sleep before checking for job status + init_seconds_before_status_checks=0, +) -class REANAClusterExecutor(GenericClusterExecutor): - """REANA Cluster Snakemake executor implementation.""" - def run( - self, - job: Job, - callback: Callable = None, - submit_callback: Callable = None, - error_callback: Callable = None, - ): - """Override GenericClusterExecutor run method.""" - super()._run(job) +# Required: +# Implementation of your executor +class Executor(RemoteExecutor): + """REANA Snakemake executor implementation.""" + + def __post_init__(self): + """Override generic executor __post__init method.""" + # IMPORTANT: in your plugin, only access methods and properties of + # Snakemake objects (like Workflow, Persistence, etc.) that are + # defined in the interfaces found in the + # snakemake-interface-executor-plugins and the + # snakemake-interface-common package. + # Other parts of those objects are NOT guaranteed to remain + # stable across new releases. + + # To ensure that the used interfaces are not changing, you should + # depend on these packages as >=a.b.c, Generator[SubmittedJobInfo, None, None]: + """Override generic executor check_active_jobs method.""" + # Check the status of active jobs. + + # You have to iterate over the given list active_jobs. + # If you provided it above, each will have its external_jobid set according + # to the information you provided at submission time. + # For jobs that have finished successfully, you have to call + # self.report_job_success(active_job). + # For jobs that have errored, you have to call + # self.report_job_error(active_job). + # This will also take care of providing a proper error message. + # Usually there is no need to perform additional logging here. + # Jobs that are still running have to be yielded. + # + # For queries to the remote middleware, please use + # self.status_rate_limiter like this: + # + # async with self.status_rate_limiter: + # # query remote middleware here + # + # To modify the time until the next call of this method, + # you can set self.next_sleep_seconds here. + + self.next_sleep_seconds = POLL_JOBS_STATUS_SLEEP_IN_SECONDS + + log.debug(f"Checking status of {len(active_jobs)} jobs") + + for active_job in active_jobs: + async with self.status_rate_limiter: + try: + job_id = active_job.external_jobid + + status = self._get_job_status_from_controller(job_id) + + if status == JobStatus.finished.name or active_job.job.is_norun: + self.report_job_success(active_job) + self._handle_job_status( + active_job.external_jobid, + active_job.job.name, + job_status=JobStatus.finished, + workflow_status=RunStatus.running, + ) + + elif status in ( + JobStatus.failed.name, + JobStatus.stopped.name, + ): + self.report_job_error(active_job) + self._handle_job_status( + active_job.external_jobid, + active_job.job.name, + job_status=JobStatus.failed, + workflow_status=RunStatus.failed, + ) + + else: + yield active_job + + except WorkflowError as e: + log.error( + f"Something went wrong while checking the status of the active jobs.\nError message{str(e)}" + ) + self.report_job_error(active_job) + + def cancel_jobs(self, active_jobs: List[SubmittedJobInfo]): + """Override generic executor cancel_jobs method.""" + # Cancel all active jobs. + # This method is called when Snakemake is interrupted. + + for active_job in active_jobs: + job_id = active_job.external_jobid + + self.rjc_api_client.delete_job(job_id) + + workflow_uuid = os.getenv("workflow_uuid", "default") + self.publisher.publish_workflow_status( + workflow_uuid, + RunStatus.failed, + message="Snakemake is interrupted and all jobs are cancelled", + ) @staticmethod - def _get_container_image(job: Job) -> str: + def _get_container_image(job: JobExecutorInterface) -> str: if job.container_img_url: container_image = job.container_img_url.replace("docker://", "") log.info(f"Environment: {container_image}") @@ -126,11 +261,14 @@ def _get_container_image(job: Job) -> str: return container_image def _handle_job_status( - self, job: Job, job_status: JobStatus, workflow_status: RunStatus + self, + job_id: str, + job_name: str, + job_status: JobStatus, + workflow_status: RunStatus, ) -> None: workflow_uuid = os.getenv("workflow_uuid", "default") - job_id = job.reana_job_id - log.info(f"{job.name} job is {job_status.name}. job_id: {job_id}") + log.info(f"{job_name} job is {job_status.name}. job_id: {job_id}") message = None if job_id: message = { @@ -142,26 +280,6 @@ def _handle_job_status( workflow_uuid, workflow_status.value, message=message ) - def handle_job_success(self, job: Job) -> None: - """Override job success method to publish job status.""" - # override handle_touch = True, to enable `touch()` in Snakefiles - # `touch()` is responsible for checking output files existence - super(ClusterExecutor, self).handle_job_success( - job, upload_remote=False, handle_log=False, handle_touch=True - ) - - self._handle_job_status( - job, job_status=JobStatus.finished, workflow_status=RunStatus.running - ) - - def handle_job_error(self, job: Job) -> None: - """Override job error method to publish job status.""" - super().handle_job_error(job) - - self._handle_job_status( - job, job_status=JobStatus.failed, workflow_status=RunStatus.failed - ) - def _get_job_status_from_controller(self, job_id: str) -> str: """Get job status from controller. @@ -189,109 +307,15 @@ def _get_job_status_from_controller(self, job_id: str) -> str: ) return JobStatus.failed.name - async def _wait_for_jobs(self): - """Override _wait_for_jobs method to poll job-controller for job statuses. + def _submit_job(self, rjc_api_client, publisher, job_request_body): + """Submit job to REANA Job Controller.""" + response = rjc_api_client.submit(**job_request_body) + job_id = str(response["job_id"]) - Original GenericClusterExecutor._wait_for_jobs method checks success/failure via .jobfinished or .jobfailed files. - """ - while True: - async with async_lock(self.lock): - if not self.wait: - return - active_jobs = self.active_jobs - self.active_jobs = [] - still_running = [] - - for active_job in active_jobs: - job_id = active_job.job.reana_job_id - - status = self._get_job_status_from_controller(job_id) - - if status == JobStatus.finished.name or active_job.job.is_norun: - active_job.callback(active_job.job) - elif status in ( - JobStatus.failed.name, - JobStatus.stopped.name, - ): - active_job.error_callback(active_job.job) - else: - still_running.append(active_job) - - async with async_lock(self.lock): - # Even though we have set active_jobs to a new empty list at the - # beginning of _wait_for_jobs, here that list might not be empty anymore - # as more jobs might have been added while we were fetching the job - # statuses from r-j-controller. For this reason we have to extend the - # list, instead of simply setting active_jobs to still_running. - self.active_jobs.extend(still_running) - - await asyncio.sleep(POLL_JOBS_STATUS_SLEEP_IN_SECONDS) - - -def submit_job(rjc_api_client, publisher, job_request_body): - """Submit job to REANA Job Controller.""" - response = rjc_api_client.submit(**job_request_body) - job_id = str(response["job_id"]) - - log.info(f"submitted job: {job_id}") - publish_job_submission( - workflow_uuid=job_request_body["workflow_uuid"], - publisher=publisher, - reana_job_id=job_id, - ) - return job_id - - -def run_jobs( - rjc_api_client, - publisher, - workflow_workspace, - workflow_file, - workflow_parameters, - operational_options={}, -): - """Run Snakemake jobs using custom REANA executor.""" - workflow_file_path = os.path.join(workflow_workspace, workflow_file) - common_snakemake_args = dict( - snakefile=workflow_file_path, - config=workflow_parameters, - workdir=workflow_workspace, - keep_logger=True, - # Since Snakemake v7.3.0, the workflow logs include Snakemake-percieved native - # resource information on memory and storage (`mem_mb`, `disk_mb`) for each - # Snakemake rule run on cloud. However, REANA overrides these when running - # user jobs, so we should hide these in order not to present any misleading - # information to users. For this reason, the default resources are overridden - # here with the only the "bare" ones (`tmpdir`). - default_resources=DefaultResources(mode="bare"), - ) - - def _generate_report(): - """Generate HTML report.""" - success = snakemake( - **common_snakemake_args, - report=operational_options.get("report", DEFAULT_SNAKEMAKE_REPORT_FILENAME), + log.info(f"submitted job: {job_id}") + publish_job_submission( + workflow_uuid=job_request_body["workflow_uuid"], + publisher=publisher, + reana_job_id=job_id, ) - if not success: - log.error("Error generating workflow HTML report.") - - # Inject RJC API client and workflow status publisher in the REANA executor - REANAClusterExecutor.rjc_api_client = rjc_api_client - REANAClusterExecutor.publisher = publisher - # Monkeypatch GenericClusterExecutor class in `scheduler` module - scheduler.GenericClusterExecutor = REANAClusterExecutor - - success = snakemake( - **common_snakemake_args, - printshellcmds=True, - # FIXME: Can be anything as it's not directly used. It's supposed - # to be the shell command to submit to job e.g. `condor_q`, - # but we call RJC API client instead. - cluster="reana", - notemp=True, - nodes=SNAKEMAKE_MAX_PARALLEL_JOBS, # enables DAG parallelization - ) - # Once the workflow is finished, generate the report, - # taking into account the metadata generated. - _generate_report() - return success + return job_id diff --git a/reana_workflow_engine_snakemake/runner.py b/reana_workflow_engine_snakemake/runner.py new file mode 100644 index 0000000..fdebf2d --- /dev/null +++ b/reana_workflow_engine_snakemake/runner.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# +# This file is part of REANA. +# Copyright (C) 2021, 2022, 2023, 2024 CERN. +# +# REANA is free software; you can redistribute it and/or modify it +# under the terms of the MIT License; see LICENSE file for more details. + +"""REANA-Workflow-Engine-Snakemake runner.""" + +import os +import logging +from pathlib import Path + +from snakemake.api import SnakemakeApi +from snakemake.settings.types import ( + ConfigSettings, + DAGSettings, + DeploymentSettings, + OutputSettings, + ResourceSettings, + StorageSettings, + WorkflowSettings, +) + +from snakemake_interface_executor_plugins.registry import ExecutorPluginRegistry +from snakemake_interface_report_plugins.registry import ReportPluginRegistry +from snakemake_interface_report_plugins.settings import ( + ReportSettingsBase, +) +from snakemake_interface_common.exceptions import WorkflowError + +from reana_workflow_engine_snakemake.config import ( + LOGGING_MODULE, + SNAKEMAKE_MAX_PARALLEL_JOBS, + DEFAULT_SNAKEMAKE_REPORT_FILENAME, +) + +from reana_workflow_engine_snakemake import executor as reana_executor + +log = logging.getLogger(LOGGING_MODULE) + + +my_registry = ExecutorPluginRegistry() +my_registry.register_plugin("reana", reana_executor) + + +def _generate_report(dag_api, workflow_workspace, report_file_name): + """Generate HTML report.""" + from snakemake.report import html_reporter + + registry = ReportPluginRegistry() + registry.register_plugin("html", html_reporter) + report_plugin = registry.get_plugin("html") + + report_args = ReportSettingsBase() + report_args.report_html_path = os.path.join(workflow_workspace, report_file_name) + report_args.report_html_stylesheet_path = None + + report_settings = report_plugin.get_settings(args=report_args) + dag_api.create_report(reporter="html", report_settings=report_settings) + + +def run_jobs( + workflow_workspace, + workflow_file, + workflow_parameters, + operational_options={}, +): + """Run Snakemake jobs using custom REANA executor.""" + workflow_file_path = os.path.join(workflow_workspace, workflow_file) + with SnakemakeApi( + OutputSettings( + printshellcmds=True, + ) + ) as snakemake_api: + try: + workflow_api = snakemake_api.workflow( + resource_settings=ResourceSettings(nodes=SNAKEMAKE_MAX_PARALLEL_JOBS), + config_settings=ConfigSettings(config=workflow_parameters), + storage_settings=StorageSettings(), + storage_provider_settings=dict(), + workflow_settings=WorkflowSettings(), + deployment_settings=DeploymentSettings(), + snakefile=Path(workflow_file_path), + workdir=Path(workflow_workspace), + ) + dag_api = workflow_api.dag( + dag_settings=DAGSettings(), + ) + + dag_api.execute_workflow( + executor="reana", + ) + + report_file_name = operational_options.get( + "report", DEFAULT_SNAKEMAKE_REPORT_FILENAME + ) + _generate_report(dag_api, workflow_workspace, report_file_name) + return True + + except WorkflowError as e: + snakemake_api.print_exception(e) + return False diff --git a/requirements.txt b/requirements.txt index ac7681d..7d3cb8b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,10 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --annotation-style=line --extra=xrootd --output-file=requirements.txt setup.py +# pip-compile --annotation-style=line --output-file=requirements.txt setup.py # amqp==5.2.0 # via kombu appdirs==1.4.4 # via fs, snakemake +argparse-dataclass==2.0.0 # via snakemake-interface-common, snakemake-interface-executor-plugins arrow==1.3.0 # via isoduration attrs==23.2.0 # via jsonschema, referencing bracex==2.4 # via wcmatch @@ -15,18 +16,21 @@ certifi==2024.7.4 # via requests charset-normalizer==3.3.2 # via requests checksumdir==1.1.9 # via reana-commons click==8.1.7 # via reana-commons -configargparse==1.7 # via snakemake +conda-inject==1.3.2 # via snakemake +configargparse==1.7 # via snakemake, snakemake-interface-common connection-pool==0.0.3 # via snakemake datrie==0.8.2 # via snakemake docutils==0.21.2 # via snakemake -dpath==2.2.0 # via yte +dpath==2.2.0 # via snakemake, yte fastjsonschema==2.20.0 # via nbformat fqdn==1.5.1 # via jsonschema fs==2.4.16 # via reana-commons +gherkin-official==29.0.0 # via reana-commons gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via snakemake humanfriendly==10.0 # via snakemake idna==3.7 # via jsonschema, requests +immutables==0.21 # via snakemake importlib-resources==6.4.0 # via swagger-spec-validator isoduration==20.11.0 # via jsonschema jinja2==3.1.4 # via snakemake @@ -43,18 +47,19 @@ msgpack==1.0.8 # via bravado-core msgpack-python==0.5.6 # via bravado nbformat==5.10.4 # via snakemake packaging==24.1 # via snakemake +parse==1.20.2 # via reana-commons plac==1.4.3 # via yte platformdirs==4.2.2 # via jupyter-core psutil==6.0.0 # via snakemake -pulp==2.7.0 # via reana-commons, snakemake -pygments==2.18.0 # via snakemake +pulp==2.7.0 # via snakemake +pygments==2.18.0 # via reana-workflow-engine-snakemake (setup.py) python-dateutil==2.9.0.post0 # via arrow, bravado, bravado-core pytz==2024.1 # via bravado-core -pyyaml==6.0.1 # via bravado, bravado-core, reana-commons, snakemake, swagger-spec-validator, yte -reana-commons[snakemake-reports]==0.95.0a3 # via reana-workflow-engine-snakemake (setup.py) +pyyaml==6.0.1 # via bravado, bravado-core, conda-inject, reana-commons, snakemake, swagger-spec-validator, yte +reana-commons[snakemake,snakemake-xrootd]==0.95.0a5 # via reana-workflow-engine-snakemake (setup.py) referencing==0.35.1 # via jsonschema, jsonschema-specifications requests==2.32.3 # via bravado, bravado-core, snakemake -reretry==0.11.8 # via snakemake +reretry==0.11.8 # via snakemake, snakemake-interface-storage-plugins rfc3339-validator==0.1.4 # via jsonschema rfc3987==1.3.8 # via jsonschema rpds-py==0.19.0 # via jsonschema, referencing @@ -62,12 +67,15 @@ simplejson==3.19.2 # via bravado, bravado-core six==1.16.0 # via bravado, bravado-core, fs, mock, python-dateutil, rfc3339-validator smart-open==7.0.4 # via snakemake smmap==5.0.1 # via gitdb -snakemake[reports]==7.32.4 # via reana-commons -stopit==1.1.2 # via snakemake +snakemake==8.24.1 # via reana-commons +snakemake-interface-common==1.17.4 # via reana-commons, snakemake, snakemake-interface-executor-plugins, snakemake-interface-report-plugins, snakemake-interface-storage-plugins, snakemake-storage-plugin-xrootd +snakemake-interface-executor-plugins==9.3.2 # via reana-commons, snakemake +snakemake-interface-report-plugins==1.1.0 # via reana-commons, snakemake +snakemake-interface-storage-plugins==3.3.0 # via reana-commons, snakemake, snakemake-storage-plugin-xrootd +snakemake-storage-plugin-xrootd==0.1.4 # via reana-commons swagger-spec-validator==3.0.4 # via bravado-core tabulate==0.9.0 # via snakemake -throttler==1.2.2 # via snakemake -toposort==1.10 # via snakemake +throttler==1.2.2 # via snakemake, snakemake-interface-executor-plugins, snakemake-interface-storage-plugins traitlets==5.14.3 # via jupyter-core, nbformat types-python-dateutil==2.9.0.20240316 # via arrow typing-extensions==4.12.2 # via bravado, swagger-spec-validator @@ -77,8 +85,8 @@ vine==5.1.0 # via amqp, kombu wcmatch==8.4.1 # via reana-commons webcolors==24.6.0 # via jsonschema werkzeug==3.0.3 # via reana-commons -wrapt==1.16.0 # via smart-open, snakemake -xrootd==5.6.0 # via reana-workflow-engine-snakemake (setup.py) +wrapt==1.16.0 # via smart-open, snakemake, snakemake-interface-storage-plugins +xrootd==5.7.1 # via snakemake-storage-plugin-xrootd yte==1.5.4 # via snakemake # The following packages are considered to be unsafe in a requirements file: diff --git a/setup.py b/setup.py index e6c9b39..0675c02 100644 --- a/setup.py +++ b/setup.py @@ -32,9 +32,6 @@ "tests": [ "pytest-reana>=0.95.0a2,<0.96.0", ], - "xrootd": [ - "xrootd==5.6.0", - ], } extras_require["all"] = [] @@ -44,7 +41,8 @@ extras_require["all"].extend(reqs) install_requires = [ - "reana-commons[snakemake_reports]>=0.95.0a2,<0.96.0", + "reana-commons[snakemake,snakemake-xrootd]>=0.95.0a5,<0.96.0", + "pygments>=2.18.0", # necessary for Snakemake reports ] packages = find_packages()