From d4c85db2d2e68ee484a4b0c1878af1812b7e9b34 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 7 Jul 2021 10:29:57 +0100 Subject: [PATCH 1/5] Fix 2 typos --- gxformat2/abstract.py | 4 ++-- gxformat2/cytoscape.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/gxformat2/abstract.py b/gxformat2/abstract.py index 7b99c0e..bd568fd 100644 --- a/gxformat2/abstract.py +++ b/gxformat2/abstract.py @@ -11,8 +11,8 @@ CWL_VERSION = "v1.2" SCRIPT_DESCRIPTION = """ -This script converts the an executable Galaxy workflow (in either format - -Format 2 or native .ga) into an abstract CWL representation. +This script converts an executable Galaxy workflow (in either format - Format 2 +or native .ga) into an abstract CWL representation. In order to represent Galaxy tool executions in the Common Workflow Language workflow language, they are serialized as v1.2+ abstract 'Operation' classes. diff --git a/gxformat2/cytoscape.py b/gxformat2/cytoscape.py index d2dc985..c5b0e68 100644 --- a/gxformat2/cytoscape.py +++ b/gxformat2/cytoscape.py @@ -13,8 +13,8 @@ CYTOSCAPE_JS_TEMPLATE = pkg_resources.resource_filename(__name__, 'cytoscape.html') MAIN_TS_PREFIX = "toolshed.g2.bx.psu.edu/repos/" SCRIPT_DESCRIPTION = """ -This script converts the an executable Galaxy workflow (in either format - -Format 2 or native .ga) into a format for visualization with Cytoscape +This script converts an executable Galaxy workflow (in either format - Format 2 +or native .ga) into a format for visualization with Cytoscape (https://cytoscape.org/). If the target output path ends with .html this script will output a HTML From 4e7874c1697b28252b7c2dc87e6ff3fee6026678 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 7 Jul 2021 12:31:47 +0100 Subject: [PATCH 2/5] Fixes for mypy Fix: ``` gxformat2/yaml.py:8: error: Library stubs not installed for "yaml" (or incompatible with Python 3.8) gxformat2/yaml.py:8: note: Hint: "python3 -m pip install types-PyYAML" gxformat2/interface.py:10: error: Library stubs not installed for "six" (or incompatible with Python 3.8) gxformat2/interface.py:10: note: Hint: "python3 -m pip install types-six" gxformat2/cytoscape.py:8: error: Library stubs not installed for "pkg_resources" (or incompatible with Python 3.8) gxformat2/cytoscape.py:8: note: Hint: "python3 -m pip install types-setuptools" gxformat2/schema/v19_09.py:76: error: Library stubs not installed for "requests" (or incompatible with Python 3.8) gxformat2/schema/v19_09.py:76: note: Hint: "python3 -m pip install types-requests" gxformat2/schema/v19_09.py:76: note: (or run "mypy --install-types" to install all missing stub packages) gxformat2/schema/v19_09.py:76: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports ``` --- gxformat2/cytoscape.py | 4 +--- gxformat2/interface.py | 4 +--- requirements.txt | 1 - tox.ini | 2 ++ 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/gxformat2/cytoscape.py b/gxformat2/cytoscape.py index c5b0e68..2fe71e4 100644 --- a/gxformat2/cytoscape.py +++ b/gxformat2/cytoscape.py @@ -5,12 +5,10 @@ import string import sys -import pkg_resources - from gxformat2.model import ensure_step_position from gxformat2.normalize import steps_normalized -CYTOSCAPE_JS_TEMPLATE = pkg_resources.resource_filename(__name__, 'cytoscape.html') +CYTOSCAPE_JS_TEMPLATE = os.path.join(os.path.dirname(__file__), 'cytoscape.html') MAIN_TS_PREFIX = "toolshed.g2.bx.psu.edu/repos/" SCRIPT_DESCRIPTION = """ This script converts an executable Galaxy workflow (in either format - Format 2 diff --git a/gxformat2/interface.py b/gxformat2/interface.py index f42202b..3a3dc99 100644 --- a/gxformat2/interface.py +++ b/gxformat2/interface.py @@ -7,11 +7,9 @@ import abc import bioblend # type: ignore -import six -@six.add_metaclass(abc.ABCMeta) -class ImporterGalaxyInterface(object): +class ImporterGalaxyInterface(metaclass=abc.ABCMeta): """An abstract interface describing Galaxy operations used by gxformat2. Specifically containing definitions of operations required to load diff --git a/requirements.txt b/requirements.txt index 0673807..85d0339 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ -six>=1.9.0 bioblend pyyaml diff --git a/tox.ini b/tox.ini index 45b6c1a..8fc4143 100644 --- a/tox.ini +++ b/tox.ini @@ -24,6 +24,8 @@ deps = lint,lintdocstrings: flake8 lintdocstrings: flake8_docstrings mypy: mypy + mypy: types-PyYAML + mypy: types-requests unit,lintdocs: -rrequirements.txt unit,lintdocs: -rdev-requirements.txt lintreadme: readme From 219123988ab6ab0fd916ac0fc07d19e4d230fd50 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 7 Jul 2021 13:06:54 +0100 Subject: [PATCH 3/5] pyupgrade to Python 3.6 --- docs/conf.py | 17 ++++++++--------- gxformat2/_labels.py | 4 ++-- gxformat2/abstract.py | 4 ++-- gxformat2/converter.py | 25 ++++++++++++------------- gxformat2/cytoscape.py | 8 ++++---- gxformat2/export.py | 4 ++-- gxformat2/interface.py | 2 +- gxformat2/lint.py | 8 ++++---- gxformat2/linting.py | 10 +++++----- gxformat2/main.py | 2 +- gxformat2/markdown_parse.py | 8 ++++---- gxformat2/model.py | 2 +- gxformat2/normalize.py | 12 ++++++------ gxformat2/yaml.py | 2 +- scripts/commit_version.py | 14 +++++++------- scripts/lint_sphinx_output.py | 2 -- scripts/new_version.py | 26 +++++++++++--------------- scripts/print_version_for_release.py | 3 +-- setup.py | 5 ++--- tests/_helpers.py | 3 +-- tests/test_basic.py | 2 +- tests/test_cytoscape.py | 6 +++--- tests/test_export_abstract.py | 6 +++--- tests/test_lint.py | 4 ++-- tests/test_markdown_validate.py | 2 +- tests/test_post_job_action_import.py | 4 ++-- tests/test_to_native.py | 4 ++-- 27 files changed, 89 insertions(+), 100 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ab524e4..87f92d9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # complexity documentation build configuration file, created by # sphinx-quickstart on Tue Jul 9 22:26:36 2013. @@ -62,8 +61,8 @@ master_doc = 'index' # General information about the project. -project = u'gxformat2' -copyright = u'2015' +project = 'gxformat2' +copyright = '2015' # The version info for the project you're documenting, acts as replacement # for |version| and |release|, also used in various other places throughout @@ -217,8 +216,8 @@ # [howto/manual]). latex_documents = [ ('index', 'gxformat2.tex', - u'gxformat2 Documentation', - u'Galaxy Project and Community', 'manual'), + 'gxformat2 Documentation', + 'Galaxy Project and Community', 'manual'), ] # The name of an image file (relative to this directory) to place at @@ -248,8 +247,8 @@ # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'galaxy-lib', - u'Galaxy-lib Documentation', - [u'Galaxy Project and Community'], 1) + 'Galaxy-lib Documentation', + ['Galaxy Project and Community'], 1) ] # If true, show URL addresses after external links. @@ -263,8 +262,8 @@ # dir menu entry, description, category) texinfo_documents = [ ('index', 'galaxy-lib', - u'Galaxy-Lib Documentation', - u'Galaxy Project and Community', + 'Galaxy-Lib Documentation', + 'Galaxy Project and Community', 'galaxy-lib', 'One line description of project.', 'Miscellaneous'), diff --git a/gxformat2/_labels.py b/gxformat2/_labels.py index 5bee51b..346f0da 100644 --- a/gxformat2/_labels.py +++ b/gxformat2/_labels.py @@ -1,7 +1,7 @@ """Utilities for handling unlabelled objects when translating workflow formats.""" -class Labels(object): +class Labels: """Track labels assigned and generate anonymous ones.""" def __init__(self): @@ -13,7 +13,7 @@ def ensure_new_output_label(self, label: str): """Ensure supplied label has value or generate an anonymous one.""" if label is None: self.anonymous_labels += 1 - label = "_anonymous_output_%d" % self.anonymous_labels + label = f"_anonymous_output_{self.anonymous_labels}" assert label not in self.seen_labels self.seen_labels.add(label) return label diff --git a/gxformat2/abstract.py b/gxformat2/abstract.py index bd568fd..4278154 100644 --- a/gxformat2/abstract.py +++ b/gxformat2/abstract.py @@ -73,7 +73,7 @@ def _format2_step_to_abstract(format2_step, requirements): step_run = from_dict(format2_run, subworkflow=True) abstract_step["run"] = step_run else: - raise NotImplementedError("Unknown runnabled type encountered [%s]" % format2_run_class) + raise NotImplementedError(f"Unknown runnabled type encountered [{format2_run_class}]") else: step_run = { "class": "Operation", @@ -166,7 +166,7 @@ def main(argv=None): if workflow_path == "-": workflow_dict = ordered_load(sys.stdin) else: - with open(workflow_path, "r") as f: + with open(workflow_path) as f: workflow_dict = ordered_load(f) abstract_dict = from_dict(workflow_dict) diff --git a/gxformat2/converter.py b/gxformat2/converter.py index 236bd36..1e0ae9d 100644 --- a/gxformat2/converter.py +++ b/gxformat2/converter.py @@ -1,5 +1,4 @@ """Functionality for converting a Format 2 workflow into a standard Galaxy workflow.""" -from __future__ import print_function import argparse import copy @@ -94,13 +93,13 @@ def rename_arg(argument): def clean_connection(value): if value and "#" in value and SUPPORT_LEGACY_CONNECTIONS: # Hope these are just used by Galaxy testing workflows and such, and not in production workflows. - log.warn("Legacy workflow syntax for connections [%s] will not be supported in the future" % value) + log.warn(f"Legacy workflow syntax for connections [{value}] will not be supported in the future") value = value.replace("#", "/", 1) else: return value -class ImportOptions(object): +class ImportOptions: def __init__(self): self.deduplicate_subworkflows = False @@ -151,7 +150,7 @@ def steps_as_list(format2_workflow: dict, add_ids: bool = False, inputs_offset: Add keys as labels instead of IDs. Why am I doing this? """ if "steps" not in format2_workflow: - raise Exception("No 'steps' key in dict, keys are %s" % format2_workflow.keys()) + raise Exception(f"No 'steps' key in dict, keys are {format2_workflow.keys()}") steps = format2_workflow["steps"] steps = convert_dict_to_id_list_if_needed(steps, add_label=True, mutate=mutate) if add_ids: @@ -229,9 +228,9 @@ def _python_to_workflow(as_python, conversion_context): step_type = step.get("type", "tool") step_type = STEP_TYPE_ALIASES.get(step_type, step_type) if step_type not in STEP_TYPES: - raise Exception("Unknown step type encountered %s" % step_type) + raise Exception(f"Unknown step type encountered {step_type}") step["type"] = step_type - eval("transform_%s" % step_type)(conversion_context, step) + eval(f"transform_{step_type}")(conversion_context, step) outputs = as_python.pop("outputs", []) outputs = convert_dict_to_id_list_if_needed(outputs) @@ -512,7 +511,7 @@ def run_tool_to_step(conversion_context, step, run_action): step["tool_uuid"] = tool_description.get("uuid") -class BaseConversionContext(object): +class BaseConversionContext: def __init__(self): self.labels = {} @@ -559,7 +558,7 @@ def get_runnable_description(self, run_action): run_action_path = run_action["@import"] runnable_path = os.path.join(self.workflow_directory, run_action_path) - with open(runnable_path, "r") as f: + with open(runnable_path) as f: runnable_description = ordered_load(f) run_action = runnable_description @@ -572,7 +571,7 @@ def get_runnable_description(self, run_action): class ConversionContext(BaseConversionContext): def __init__(self, galaxy_interface, workflow_directory, import_options: Optional[ImportOptions] = None): - super(ConversionContext, self).__init__() + super().__init__() self.import_options = import_options or ImportOptions() self.graph_ids = OrderedDict() # type: Dict self.graph_id_subworkflow_conversion_contexts = {} # type: Dict @@ -595,7 +594,7 @@ def get_subworkflow_conversion_context_graph(self, graph_id): class SubworkflowConversionContext(BaseConversionContext): def __init__(self, parent_context): - super(SubworkflowConversionContext, self).__init__() + super().__init__() self.parent_context = parent_context @property @@ -632,7 +631,7 @@ def _is_link(value): def _join_prefix(prefix, key): if prefix: - new_key = "%s|%s" % (prefix, key) + new_key = f"{prefix}|{key}" else: new_key = key return new_key @@ -657,7 +656,7 @@ def _init_connect_dict(step): elif isinstance(value, dict) and 'default' in value: continue elif isinstance(value, dict): - raise KeyError('step input must define either source or default %s' % value) + raise KeyError(f'step input must define either source or default {value}') connect[key] = [value] connection_keys.add(key) @@ -731,7 +730,7 @@ def main(argv=None): workflow_directory = os.path.abspath(format2_path) galaxy_interface = None - with open(format2_path, "r") as f: + with open(format2_path) as f: has_workflow = ordered_load(f) output = python_to_workflow(has_workflow, galaxy_interface=galaxy_interface, workflow_directory=workflow_directory) diff --git a/gxformat2/cytoscape.py b/gxformat2/cytoscape.py index 2fe71e4..1519323 100644 --- a/gxformat2/cytoscape.py +++ b/gxformat2/cytoscape.py @@ -33,7 +33,7 @@ def to_cytoscape(workflow_path: str, output_path=None): for i, step in enumerate(steps): step_id = step.get("id") or step.get("label") or str(i) step_type = step.get("type") or 'tool' - classes = ["type_%s" % step_type] + classes = [f"type_{step_type}"] if step_type in ['tool', 'subworkflow']: classes.append("runnable") else: @@ -42,7 +42,7 @@ def to_cytoscape(workflow_path: str, output_path=None): tool_id = step.get("tool_id") if tool_id and tool_id.startswith(MAIN_TS_PREFIX): tool_id = tool_id[len(MAIN_TS_PREFIX):] - label = step.get("id") or step.get("label") or ("tool:%s" % tool_id) or str(i) + label = step.get("id") or step.get("label") or (f"tool:{tool_id}") or str(i) ensure_step_position(step, i) node_position = dict(x=int(step["position"]["left"]), y=int(step["position"]["top"])) repo_link = None @@ -68,12 +68,12 @@ def to_cytoscape(workflow_path: str, output_path=None): from_step, output = value.split("/", 1) else: from_step, output = value, None - edge_id = "%s__to__%s" % (step_id, from_step) + edge_id = f"{step_id}__to__{from_step}" edge_data = {"id": edge_id, "source": from_step, "target": step_id, "input": key, "output": output} elements.append({"group": "edges", "data": edge_data}) if output_path.endswith(".html"): - with open(CYTOSCAPE_JS_TEMPLATE, "r") as f: + with open(CYTOSCAPE_JS_TEMPLATE) as f: template = f.read() viz = string.Template(template).safe_substitute(elements=json.dumps(elements)) with open(output_path, "w") as f: diff --git a/gxformat2/export.py b/gxformat2/export.py index c86e2bc..026e12b 100644 --- a/gxformat2/export.py +++ b/gxformat2/export.py @@ -239,7 +239,7 @@ def _to_source(has_output_name, label_map, output_id=None): if output_name == "output": source = output_label else: - source = "%s/%s" % (output_label, output_name) + source = f"{output_label}/{output_name}" return source @@ -252,7 +252,7 @@ def main(argv=None): format2_path = args.input_path output_path = args.output_path or (format2_path + ".gxwf.yml") - with open(format2_path, "r") as f: + with open(format2_path) as f: native_workflow_dict = json.load(f) as_dict = from_galaxy_native(native_workflow_dict) diff --git a/gxformat2/interface.py b/gxformat2/interface.py index 3a3dc99..6d20982 100644 --- a/gxformat2/interface.py +++ b/gxformat2/interface.py @@ -28,7 +28,7 @@ def import_tool(self, tool): raise NotImplementedError() -class BioBlendImporterGalaxyInterface(object): +class BioBlendImporterGalaxyInterface: """Implementation of :class:`ImporterGalaxyInterface` using bioblend.""" def __init__(self, **kwds): diff --git a/gxformat2/lint.py b/gxformat2/lint.py index de6475a..c897b0e 100644 --- a/gxformat2/lint.py +++ b/gxformat2/lint.py @@ -46,7 +46,7 @@ def ensure_key_has_value(lint_context, has_keys, key, value, has_class=None, has def _lint_step_errors(lint_context, step): step_errors = step.get("errors") if step_errors is not None: - lint_context.warn("tool step contains error indicated during Galaxy export - %s" % step_errors) + lint_context.warn(f"tool step contains error indicated during Galaxy export - {step_errors}") def lint_ga_path(lint_context, path): @@ -147,7 +147,7 @@ def _validate_report(lint_context, workflow_dict): try: validate_galaxy_markdown(markdown) except ValueError as e: - lint_context.error("Report markdown validation failed [%s]" % e) + lint_context.error(f"Report markdown validation failed [{e}]") def _lint_training(lint_context, workflow_dict): @@ -159,7 +159,7 @@ def _lint_training(lint_context, workflow_dict): else: tags = workflow_dict["tags"] if lint_context.training_topic not in tags: - lint_context.warn("Missing expected training topic (%s) as workflow tag." % lint_context.training_topic) + lint_context.warn(f"Missing expected training topic ({lint_context.training_topic}) as workflow tag.") # Move up into individual lints - all workflows should have docs. format2_dict = ensure_format2(workflow_dict) if "doc" not in format2_dict: @@ -174,7 +174,7 @@ def main(argv=None): argv = sys.argv args = _parser().parse_args(argv[1:]) path = args.path - with open(path, "r") as f: + with open(path) as f: try: workflow_dict = ordered_load(f) except Exception: diff --git a/gxformat2/linting.py b/gxformat2/linting.py index 18420b7..9c5a9aa 100644 --- a/gxformat2/linting.py +++ b/gxformat2/linting.py @@ -8,7 +8,7 @@ DEFAULT_TRAINING_LINT = None -class LintContext(object): +class LintContext: """Track running status (state) of linting.""" def __init__(self, level=LEVEL_WARN, training_topic=DEFAULT_TRAINING_LINT): @@ -46,15 +46,15 @@ def print_messages(self): """Print error messages and update state at the end of linting.""" for message in self.error_messages: self.found_errors = True - print(".. ERROR: %s" % message) + print(f".. ERROR: {message}") if self.level != LEVEL_ERROR: for message in self.warn_messages: self.found_warns = True - print(".. WARNING: %s" % message) + print(f".. WARNING: {message}") if self.level == LEVEL_ALL: for message in self.info_messages: - print(".. INFO: %s" % message) + print(f".. INFO: {message}") for message in self.valid_messages: - print(".. CHECK: %s" % message) + print(f".. CHECK: {message}") diff --git a/gxformat2/main.py b/gxformat2/main.py index 7fdb8a6..d4a27cf 100644 --- a/gxformat2/main.py +++ b/gxformat2/main.py @@ -18,7 +18,7 @@ def convert_and_import_workflow(has_workflow, **kwds): workflow_path = has_workflow if workflow_directory is None: workflow_directory = os.path.dirname(has_workflow) - with open(workflow_path, "r") as f: + with open(workflow_path) as f: has_workflow = ordered_load(f) if workflow_directory is not None: diff --git a/gxformat2/markdown_parse.py b/gxformat2/markdown_parse.py index 93a424c..434402e 100644 --- a/gxformat2/markdown_parse.py +++ b/gxformat2/markdown_parse.py @@ -40,12 +40,12 @@ GALAXY_FLAVORED_MARKDOWN_CONTAINER_REGEX = r'(?P%s)' % "|".join(GALAXY_FLAVORED_MARKDOWN_CONTAINERS) ARG_VAL_REGEX = r'''[\w_\-]+|\"[^\"]+\"|\'[^\']+\'''' -FUNCTION_ARG = r'\s*\w+\s*=\s*(?:%s)\s*' % ARG_VAL_REGEX +FUNCTION_ARG = fr'\s*\w+\s*=\s*(?:{ARG_VAL_REGEX})\s*' # embed commas between arguments -FUNCTION_MULTIPLE_ARGS = r'(?P%s)(?P(?:,%s)*)' % (FUNCTION_ARG, FUNCTION_ARG) +FUNCTION_MULTIPLE_ARGS = fr'(?P{FUNCTION_ARG})(?P(?:,{FUNCTION_ARG})*)' FUNCTION_MULTIPLE_ARGS_PATTERN = re.compile(FUNCTION_MULTIPLE_ARGS) FUNCTION_CALL_LINE_TEMPLATE = r'\s*%s\s*\((?:' + FUNCTION_MULTIPLE_ARGS + r')?\)\s*' -GALAXY_MARKDOWN_FUNCTION_CALL_LINE = re.compile(FUNCTION_CALL_LINE_TEMPLATE % GALAXY_FLAVORED_MARKDOWN_CONTAINER_REGEX) +GALAXY_MARKDOWN_FUNCTION_CALL_LINE = re.compile(FUNCTION_CALL_LINE_TEMPLATE % (GALAXY_FLAVORED_MARKDOWN_CONTAINER_REGEX, )) WHITE_SPACE_ONLY_PATTERN = re.compile(r"^[\s]+$") @@ -119,7 +119,7 @@ def _validate_arg(arg_str): if expecting_container_close_for: template = "Invalid line %d: %s" - msg = template % (last_line_no, "close of block for [{expected_for}] expected".format(expected_for=expecting_container_close_for)) + msg = template % (last_line_no, f"close of block for [{expecting_container_close_for}] expected") raise ValueError(msg) diff --git a/gxformat2/model.py b/gxformat2/model.py index eb18d1d..db6eaa3 100644 --- a/gxformat2/model.py +++ b/gxformat2/model.py @@ -162,7 +162,7 @@ def inputs_as_native_steps(workflow_dict: dict): native_type = format2_type input_def["parameter_type"] = native_type else: - raise Exception("Unknown input type [%s] encountered." % input_type) + raise Exception(f"Unknown input type [{input_type}] encountered.") step_def = input_def step_def.update({ diff --git a/gxformat2/normalize.py b/gxformat2/normalize.py index 8e6b76e..938d26d 100644 --- a/gxformat2/normalize.py +++ b/gxformat2/normalize.py @@ -15,7 +15,7 @@ NON_INPUT_TYPES = ["tool", "subworkflow", "pause"] -class Inputs(object): +class Inputs: """An abstraction around a Galaxy workflow's inputs.""" def __init__(self, workflow_dict): @@ -60,7 +60,7 @@ def count(self): return len(self._inputs) -class NormalizedWorkflow(object): +class NormalizedWorkflow: """Present a view of a Format2 workflow that has been normalized. In a normalized view: @@ -151,13 +151,13 @@ def _replace_anonymous_output_references(workflow_dict: dict): step, output_name = output_source.split("/", 1) if ":" in output_name: subworkflow_label, subworkflow_output = output_name.split(":", 1) - assert subworkflow_label in runs_by_label, "%s not in %s" % (subworkflow_label, runs_by_label.keys()) + assert subworkflow_label in runs_by_label, f"{subworkflow_label} not in {runs_by_label.keys()}" run = runs_by_label[subworkflow_label] subworkflow_outputs = run["outputs"] assert isinstance(subworkflow_outputs, dict) for subworkflow_output_name, output_def in subworkflow_outputs.items(): - if output_def["outputSource"] == "%s/%s" % (subworkflow_label, subworkflow_output): - output["outputSource"] = "%s/%s" % (step, subworkflow_output_name) + if output_def["outputSource"] == f"{subworkflow_label}/{subworkflow_output}": + output["outputSource"] = f"{step}/{subworkflow_output_name}" def _ensure_implicit_step_outs(workflow_dict: dict): @@ -207,7 +207,7 @@ def register_output_source(output_source): def _ensure_format2(workflow_dict=None, workflow_path=None): if workflow_path is not None: assert workflow_dict is None - with open(workflow_path, "r") as f: + with open(workflow_path) as f: workflow_dict = ordered_load(f) workflow_dict = ensure_format2(workflow_dict) diff --git a/gxformat2/yaml.py b/gxformat2/yaml.py index 7d3e3e5..8b55478 100644 --- a/gxformat2/yaml.py +++ b/gxformat2/yaml.py @@ -10,7 +10,7 @@ def ordered_load_path(path: str, **kwds): """Safe and ordered load of YAML from specified path.""" - with open(path, "r") as f: + with open(path) as f: return ordered_load(f, **kwds) diff --git a/scripts/commit_version.py b/scripts/commit_version.py index 7c5a558..ef24f07 100644 --- a/scripts/commit_version.py +++ b/scripts/commit_version.py @@ -17,19 +17,19 @@ def main(argv): mod_path = os.path.join(PROJECT_DIRECTORY, source_dir, "__init__.py") if not DEV_RELEASE: history_path = os.path.join(PROJECT_DIRECTORY, "HISTORY.rst") - history = open(history_path, "r").read() + history = open(history_path).read() today = datetime.datetime.today() today_str = today.strftime('%Y-%m-%d') - history = history.replace(".dev0", " (%s)" % today_str) + history = history.replace(".dev0", f" ({today_str})") open(history_path, "w").write(history) - mod = open(mod_path, "r").read() - mod = re.sub("__version__ = '[\d\.]*\.dev0'", - "__version__ = '%s'" % version, + mod = open(mod_path).read() + mod = re.sub(r"__version__ = '[\d\.]*\.dev0'", + f"__version__ = '{version}'", mod) mod = open(mod_path, "w").write(mod) - shell(["git", "commit", "-m", "Version %s" % version, - "HISTORY.rst", "%s/__init__.py" % source_dir]) + shell(["git", "commit", "-m", f"Version {version}", + "HISTORY.rst", f"{source_dir}/__init__.py"]) shell(["git", "tag", version]) diff --git a/scripts/lint_sphinx_output.py b/scripts/lint_sphinx_output.py index a836956..05f73ed 100644 --- a/scripts/lint_sphinx_output.py +++ b/scripts/lint_sphinx_output.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import re import sys diff --git a/scripts/new_version.py b/scripts/new_version.py index aaf8f81..b65f6bf 100644 --- a/scripts/new_version.py +++ b/scripts/new_version.py @@ -25,37 +25,33 @@ def main(argv): else: dev_version = re.compile(r'dev([\d]+)').search(version).group(1) new_dev_version = int(dev_version) + 1 - new_version = version.replace("dev%s" % dev_version, "dev%s" % new_dev_version) + new_version = version.replace(f"dev{dev_version}", f"dev{new_dev_version}") history_path = os.path.join(PROJECT_DIRECTORY, "HISTORY.rst") if not DEV_RELEASE: - history = open(history_path, "r").read() + history = open(history_path).read() def extend(from_str, line): from_str += "\n" - return history.replace(from_str, from_str + line + "\n" ) + return history.replace(from_str, from_str + line + "\n") - history = extend(".. to_doc", """ + history = extend(".. to_doc", f""" --------------------- -%s.dev0 +{new_version}.dev0 --------------------- - """ % new_version) + """) open(history_path, "w").write(history) mod_path = os.path.join(PROJECT_DIRECTORY, source_dir, "__init__.py") - mod = open(mod_path, "r").read() + mod = open(mod_path).read() if not DEV_RELEASE: - mod = re.sub("__version__ = '[\d\.]+'", - "__version__ = '%s.dev0'" % new_version, - mod, 1) + mod = re.sub(r"__version__ = '[\d\.]+'", f"__version__ = '{new_version}.dev0'", mod, 1) else: - mod = re.sub("dev%s" % dev_version, - "dev%s" % new_dev_version, - mod, 1) + mod = re.sub(f"dev{dev_version}", f"dev{new_dev_version}", mod, 1) mod = open(mod_path, "w").write(mod) - shell(["git", "commit", "-m", "Starting work on %s" % new_version, - "HISTORY.rst", "%s/__init__.py" % source_dir]) + shell(["git", "commit", "-m", f"Starting work on {new_version}", + "HISTORY.rst", f"{source_dir}/__init__.py"]) def shell(cmds, **kwds): diff --git a/scripts/print_version_for_release.py b/scripts/print_version_for_release.py index 60b2880..7b6470c 100644 --- a/scripts/print_version_for_release.py +++ b/scripts/print_version_for_release.py @@ -1,4 +1,3 @@ -from __future__ import print_function from distutils.version import LooseVersion import ast import os @@ -10,7 +9,7 @@ _version_re = re.compile(r'__version__\s+=\s+(.*)') -with open('%s/__init__.py' % source_dir, 'rb') as f: +with open(f'{source_dir}/__init__.py', 'rb') as f: version = str(ast.literal_eval(_version_re.search( f.read().decode('utf-8')).group(1))) diff --git a/setup.py b/setup.py index 8196216..75b1225 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- import ast import os @@ -14,11 +13,11 @@ _version_re = re.compile(r'__version__\s+=\s+(.*)') -with open('%s/__init__.py' % SOURCE_DIR, 'rb') as f: +with open(f'{SOURCE_DIR}/__init__.py', 'rb') as f: init_contents = f.read().decode('utf-8') def get_var(var_name): - pattern = re.compile(r'%s\s+=\s+(.*)' % var_name) + pattern = re.compile(fr'{var_name}\s+=\s+(.*)') match = pattern.search(init_contents).group(1) return str(ast.literal_eval(match)) diff --git a/tests/_helpers.py b/tests/_helpers.py index 3592c31..d3ee6fa 100644 --- a/tests/_helpers.py +++ b/tests/_helpers.py @@ -44,8 +44,7 @@ def copy_without_workflow_output_labels(native_as_dict): def native_workflow_outputs(native_as_dict): steps = native_as_dict.get("steps") for step in steps.values(): - for workflow_output in step.get("workflow_outputs", []): - yield workflow_output + yield from step.get("workflow_outputs", []) def round_trip(has_yaml): diff --git a/tests/test_basic.py b/tests/test_basic.py index cc17ce9..9495a45 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -261,7 +261,7 @@ def test_round_trip_whens(): def test_export_native_no_labels(): # Ensure outputs don't get mapped to 'null' key and ensure - native_unicycler = ordered_load(open(os.path.join(TEST_PATH, "unicycler.ga"), "r").read()) + native_unicycler = ordered_load(open(os.path.join(TEST_PATH, "unicycler.ga")).read()) before_output_count = 0 for workflow_output in native_workflow_outputs(native_unicycler): before_output_count += 1 diff --git a/tests/test_cytoscape.py b/tests/test_cytoscape.py index 005b565..be61b90 100644 --- a/tests/test_cytoscape.py +++ b/tests/test_cytoscape.py @@ -14,16 +14,16 @@ def test_main_output_json(): out_file = tempfile.NamedTemporaryFile(prefix="cytoscape_elements", suffix=".json") main([EXAMPLE_PATH, out_file.name]) - with open(out_file.name, "r") as f: + with open(out_file.name) as f: elements = json.load(f) assert isinstance(elements, list) - assert "" not in open(out_file.name, "r").read() + assert "" not in open(out_file.name).read() def test_main_output_html(): out_file = tempfile.NamedTemporaryFile(prefix="cytoscape_elements", suffix=".html") main([EXAMPLE_PATH, out_file.name]) - assert "" in open(out_file.name, "r").read() + assert "" in open(out_file.name).read() def test_interop_generation(): diff --git a/tests/test_export_abstract.py b/tests/test_export_abstract.py index 4f95feb..4c7ab28 100644 --- a/tests/test_export_abstract.py +++ b/tests/test_export_abstract.py @@ -56,8 +56,8 @@ def test_abstract_export(): for name, example in EXAMPLES.items(): format2, native = _both_formats(example) - _run_example(format2, _examples_path_for("%s_from_format2.cwl" % name)) - _run_example(native, _examples_path_for("%s_from_native.cwl" % name)) + _run_example(format2, _examples_path_for(f"{name}_from_format2.cwl")) + _run_example(native, _examples_path_for(f"{name}_from_native.cwl")) def test_basic_workflow(): @@ -111,7 +111,7 @@ def test_string_inputs(): def _run_example_path(path): out = _examples_path_for(path) - with open(path, "r") as f: + with open(path) as f: return _run_example(ordered_load(f), out) diff --git a/tests/test_lint.py b/tests/test_lint.py index c4028ab..8a1313f 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -245,7 +245,7 @@ def setup_module(module): # ensure that round tripping all green format2 workflows still lint green. for file_name in os.listdir(TEST_LINT_EXAMPLES): if file_name.startswith("0_format2") and "roundtrip" not in file_name: - roundtrip_contents = round_trip(open(os.path.join(TEST_LINT_EXAMPLES, file_name), "r").read()) + roundtrip_contents = round_trip(open(os.path.join(TEST_LINT_EXAMPLES, file_name)).read()) base = os.path.splitext(file_name)[0][len("0_"):] _dump_with_exit_code(roundtrip_contents, 0, base + "_roundtrip") @@ -287,7 +287,7 @@ def test_lint_examples(): expected_exit_code = int(file_name[0]) actual_exit_code = main(["lint", file_path]) if actual_exit_code != expected_exit_code: - contents = open(file_path, "r").read() + contents = open(file_path).read() template = "File [%s] didn't lint properly - expected exit code [%d], got [%d]. Contents:\n%s" raise AssertionError(template % (file_name, expected_exit_code, actual_exit_code, contents)) diff --git a/tests/test_markdown_validate.py b/tests/test_markdown_validate.py index 884ed3f..a61e2b1 100644 --- a/tests/test_markdown_validate.py +++ b/tests/test_markdown_validate.py @@ -13,7 +13,7 @@ def assert_markdown_invalid(markdown, at_line=None): failed = True if at_line is not None: assert "Invalid line %d" % (at_line + 1) in str(e) - assert failed, "Expected markdown [%s] to fail validation but it did not." % markdown + assert failed, f"Expected markdown [{markdown}] to fail validation but it did not." def test_markdown_validation(): diff --git a/tests/test_post_job_action_import.py b/tests/test_post_job_action_import.py index 93a8f77..4c90f8f 100644 --- a/tests/test_post_job_action_import.py +++ b/tests/test_post_job_action_import.py @@ -80,8 +80,8 @@ def test_post_job_action_to_native(wf_template): ])} expected_pja = json.dumps(expected_pja, sort_keys=True) converted_pjas = json.dumps(native['steps']['1']['post_job_actions'], sort_keys=True) - assert expected_pja == converted_pjas, "Expected:\n%s\nActual:\n%s'" % (expected_pja, converted_pjas) + assert expected_pja == converted_pjas, f"Expected:\n{expected_pja}\nActual:\n{converted_pjas}'" assert_valid_native(native) roundtrip_workflow = from_native(native) out_def = roundtrip_workflow['steps']['cat']['out']['out_file1'] - assert action_key in out_def, "%s not in %s" % (action_key, out_def) + assert action_key in out_def, f"{action_key} not in {out_def}" diff --git a/tests/test_to_native.py b/tests/test_to_native.py index 0aa6bb8..54be624 100644 --- a/tests/test_to_native.py +++ b/tests/test_to_native.py @@ -20,7 +20,7 @@ def test_basic_workflow(): with open(format2_path, "w") as f: f.write(BASIC_WORKFLOW) out = _run_example_path(format2_path) - with open(out, "r") as f: + with open(out) as f: as_native = json.load(f) assert as_native["name"] == "Simple workflow" assert as_native["annotation"] == "Simple workflow that no-op cats a file and then selects 10 random lines.\n" @@ -44,7 +44,7 @@ def test_int_input(): with open(format2_path, "w") as f: f.write(INT_INPUT) out = _run_example_path(format2_path) - with open(out, "r") as f: + with open(out) as f: as_native = json.load(f) int_step = as_native["steps"]["1"] From 3903a054926d5c577377d3ef0f257b19f4ecb974 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 7 Jul 2021 13:12:40 +0100 Subject: [PATCH 4/5] Cancel in progress jobs on git push --- .github/workflows/publishdocs.yml | 4 ++-- .github/workflows/tox.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publishdocs.yml b/.github/workflows/publishdocs.yml index 98de0af..23ec4c5 100644 --- a/.github/workflows/publishdocs.yml +++ b/.github/workflows/publishdocs.yml @@ -1,7 +1,7 @@ name: Publish gxformat2 Docs on: - push: - branches: + push: + branches: - master jobs: deploy: diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml index 0724707..82c566d 100644 --- a/.github/workflows/tox.yml +++ b/.github/workflows/tox.yml @@ -1,5 +1,8 @@ name: Python CI on: [push, pull_request] +concurrency: + group: tox-${{ github.ref }} + cancel-in-progress: true jobs: build: runs-on: ubuntu-latest From 25384082f87960fab9d6cc49a7f916e62c63ccf9 Mon Sep 17 00:00:00 2001 From: Nicola Soranzo Date: Wed, 7 Jul 2021 17:25:14 +0100 Subject: [PATCH 5/5] Apply suggestions from code review Co-authored-by: Marius van den Beek --- gxformat2/cytoscape.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gxformat2/cytoscape.py b/gxformat2/cytoscape.py index 1519323..08b5123 100644 --- a/gxformat2/cytoscape.py +++ b/gxformat2/cytoscape.py @@ -42,7 +42,7 @@ def to_cytoscape(workflow_path: str, output_path=None): tool_id = step.get("tool_id") if tool_id and tool_id.startswith(MAIN_TS_PREFIX): tool_id = tool_id[len(MAIN_TS_PREFIX):] - label = step.get("id") or step.get("label") or (f"tool:{tool_id}") or str(i) + label = step.get("id") or step.get("label") or (f"tool:{tool_id}" if tool_id else str(i)) ensure_step_position(step, i) node_position = dict(x=int(step["position"]["left"]), y=int(step["position"]["top"])) repo_link = None