Skip to content

Commit

Permalink
Merge pull request #286 from nf-core/dev
Browse files Browse the repository at this point in the history
Dev -> Master for v1.12.0 release
  • Loading branch information
drpatelh committed Feb 29, 2024
2 parents 04ee503 + 755ca35 commit 8ec2d93
Show file tree
Hide file tree
Showing 174 changed files with 15,748 additions and 2,773 deletions.
15 changes: 14 additions & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,20 @@ end_of_line = unset
insert_final_newline = unset
trim_trailing_whitespace = unset
indent_style = unset
indent_size = unset
[/subworkflows/nf-core/**]
charset = unset
end_of_line = unset
insert_final_newline = unset
trim_trailing_whitespace = unset
indent_style = unset

[/assets/email*]
indent_size = unset

# ignore Readme
[README.md]
indent_style = unset

# ignore python
[*.{py,md}]
indent_style = unset
3 changes: 3 additions & 0 deletions .github/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ If you're not used to this workflow with git, you can start with some [docs from

## Tests

You can optionally test your changes by running the pipeline locally. Then it is recommended to use the `debug` profile to
receive warnings about process selectors and other debug info. Example: `nextflow run . -profile debug,test,docker --outdir <OUTDIR>`.

When you create a pull request with changes, [GitHub Actions](https://github.com/features/actions) will run automatic tests.
Typically, pull-requests are only fully reviewed when these tests are passing, though of course we can help out before then.

Expand Down
1 change: 1 addition & 0 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/fetc
- [ ] If necessary, also make a PR on the nf-core/fetchngs _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.
- [ ] Make sure your code lints (`nf-core lint`).
- [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir <OUTDIR>`).
- [ ] Check for unexpected warnings in debug mode (`nextflow run . -profile debug,test,docker --outdir <OUTDIR>`).
- [ ] Usage Documentation in `docs/usage.md` is updated.
- [ ] Output Documentation in `docs/output.md` is updated.
- [ ] `CHANGELOG.md` is updated.
Expand Down
152 changes: 152 additions & 0 deletions .github/python/find_changed_files.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
#!/usr/bin/env python

## This script is used to generate scan *.nf.test files for function/process/workflow name and return as a JSON list
# It is functionally similar to nf-test list but fills a gap until feature https://github.com/askimed/nf-test/issues/196 is added

import argparse
import json
import logging
import re

from itertools import chain
from pathlib import Path


def parse_args() -> argparse.Namespace:
"""
Parse command line arguments and return an ArgumentParser object.
Returns:
argparse.ArgumentParser: The ArgumentParser object with the parsed arguments.
"""
parser = argparse.ArgumentParser(
description="Scan *.nf.test files for function/process/workflow name and return as a JSON list"
)
parser.add_argument(
"-p",
"--paths",
nargs="+",
default=["."],
help="List of directories or files to scan",
)
parser.add_argument(
"-l",
"--log-level",
choices=["DEBUG", "INFO", "WARNING", "ERROR"],
default="INFO",
help="Logging level",
)
parser.add_argument(
"-t",
"--types",
nargs="+",
choices=["function", "process", "workflow", "pipeline"],
default=["function", "process", "workflow", "pipeline"],
help="Types of tests to include.",
)
return parser.parse_args()


def find_files(paths: list[str]) -> list[Path]:
"""
Find all files matching pattern *.nf.test recursively from a list of paths.
Args:
paths (list): List of directories or files to scan.
Returns:
list: List of files matching the pattern *.nf.test.
"""
# this is a bit clunky
result = []
for path in paths:
path_obj = Path(path)
# If Path is the exact nf-test file add to list:
if path_obj.match("*.nf.test"):
result.append(path_obj)
# Else recursively search for nf-test files:
else:
for file in path_obj.rglob("*.nf.test"):
result.append(file)
return result


def process_files(files: list[Path]) -> list[str]:
"""
Process the files and return lines that begin with 'workflow', 'process', or 'function' and have a single string afterwards.
Args:
files (list): List of files to process.
Returns:
list: List of lines that match the criteria.
"""
result = []
for file in files:
with open(file, "r") as f:
is_pipeline_test = True
lines = f.readlines()
for line in lines:
line = line.strip()
if line.startswith(("workflow", "process", "function")):
words = line.split()
if len(words) == 2 and re.match(r'^".*"$', words[1]):
result.append(line)
is_pipeline_test = False

# If no results included workflow, process or function
# Add a dummy result to fill the 'pipeline' category
if is_pipeline_test:
result.append("pipeline 'PIPELINE'")

return result


def generate(
lines: list[str], types: list[str] = ["function", "process", "workflow", "pipeline"]
) -> dict[str, list[str]]:
"""
Generate a dictionary of function, process and workflow lists from the lines.
Args:
lines (list): List of lines to process.
types (list): List of types to include.
Returns:
dict: Dictionary with function, process and workflow lists.
"""
result: dict[str, list[str]] = {
"function": [],
"process": [],
"workflow": [],
"pipeline": [],
}
for line in lines:
words = line.split()
if len(words) == 2:
keyword = words[0]
name = words[1].strip("'\"") # Strip both single and double quotes
if keyword in types:
result[keyword].append(name)
return result


if __name__ == "__main__":

# Utility stuff
args = parse_args()
logging.basicConfig(level=args.log_level)

# Parse nf-test files for targets of tests
files = find_files(args.paths)
lines = process_files(files)
result = generate(lines)

# Get only relevant results (specified by -t)
# Unique using a set
target_results = list(
{item for sublist in map(result.get, args.types) for item in sublist}
)

# Print to stdout
print(json.dumps(target_results))
2 changes: 1 addition & 1 deletion .github/workflows/branch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
# NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets
- name: Post PR comment
if: failure()
uses: mshick/add-pr-comment@v1
uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2
with:
message: |
## This PR is against the `master` branch :x:
Expand Down
136 changes: 105 additions & 31 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@ on:

env:
NXF_ANSI_LOG: false
NFTEST_VER: "0.8.1"
NFT_VER: "0.8.4"
NFT_WORKDIR: "~"
NFT_DIFF: "pdiff"
NFT_DIFF_ARGS: "--line-numbers --expand-tabs=2"

concurrency:
group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
Expand All @@ -24,64 +27,110 @@ jobs:
name: Check for changes
runs-on: ubuntu-latest
outputs:
# Expose matched filters as job 'tags' output variable
tags: ${{ steps.filter.outputs.changes }}
changes: ${{ steps.changed_files.outputs.any_modified }}
tags: ${{ steps.list.outputs.tags }}
steps:
- uses: actions/setup-python@v4
with:
python-version: "3.11"
architecture: "x64"

- uses: actions/checkout@v3
- name: Combine all tags.yml files
id: get_username
run: find . -name "tags.yml" -not -path "./.github/*" -exec cat {} + > .github/tags.yml
- name: debug
run: cat .github/tags.yml
- uses: dorny/paths-filter@v2
id: filter
with:
filters: ".github/tags.yml"
fetch-depth: 0

define_nxf_versions:
name: Choose nextflow versions to test against depending on target branch
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.nxf_versions.outputs.matrix }}
steps:
- id: nxf_versions
- uses: tj-actions/changed-files@v42
id: changed_files
with:
dir_names: "true"
output_renamed_files_as_deleted_and_added: "true"
# Define list of additional rules for testing paths
# Mostly, we define additional 'pipeline' or 'all' tests here
files_yaml: |
".":
- .github/workflows/**
- nf-test.config
- nextflow.config
tests:
- assets/*
- bin/*
- conf/*
- main.nf
- nextflow_schema.json
files_ignore: |
.git*
.gitpod.yml
.prettierignore
.prettierrc.yml
**.md
**.png
modules.json
pyproject.toml
tower.yml
- name: debug
run: |
if [[ "${{ github.event_name }}" == "pull_request" && "${{ github.base_ref }}" == "dev" && "${{ matrix.NXF_VER }}" != "latest-everything" ]]; then
echo matrix='["latest-everything"]' | tee -a $GITHUB_OUTPUT
else
echo matrix='["latest-everything", "23.04.0"]' | tee -a $GITHUB_OUTPUT
fi
echo ${{ steps.changed_files.outputs.any_modified }}
echo ${{ steps.changed_files.outputs.all_changed_files }}
echo ${{ steps.changed_files.outputs.changed_keys }}
- name: nf-test list tags
id: list
if: ${{ steps.changed_files.outputs.any_modified }}
run: |
echo tags=$(python \
.github/python/find_changed_files.py \
-t pipeline workflow process \
-p ${{ steps.changed_files.outputs.all_changed_files }} ${{ steps.changed_files.outputs.changed_keys }} \
) >> $GITHUB_OUTPUT
- name: debug2
run: |
echo ${{ steps.list.outputs.tags }}
test:
name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }}
needs: [changes, define_nxf_versions]
if: needs.changes.outputs.tags != '[]'
name: ${{ matrix.tags }} ${{ matrix.profile }} NF-${{ matrix.NXF_VER }}
needs: [changes]
if: needs.changes.outputs.changes
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
NXF_VER: ${{ fromJson(needs.define_nxf_versions.outputs.matrix) }}
NXF_VER:
- "latest-everything"
- "23.04"
tags: ["${{ fromJson(needs.changes.outputs.tags) }}"]
profile:
- "docker"

steps:
- name: Check out pipeline code
uses: actions/checkout@v3
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4

- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
uses: nf-core/setup-nextflow@b9f764e8ba5c76b712ace14ecbfcef0e40ae2dd8 # v1
with:
version: "${{ matrix.NXF_VER }}"

- uses: actions/setup-python@v4
with:
python-version: "3.11"
architecture: "x64"

- name: Install pdiff to see diff between nf-test snapshots
run: |
python -m pip install --upgrade pip
pip install pdiff
- name: Cache nf-test installation
id: cache-software
uses: actions/cache@v3
with:
path: |
/usr/local/bin/nf-test
/home/runner/.nf-test/nf-test.jar
key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest
key: ${{ runner.os }}-${{ env.NFT_VER }}-nftest

- name: Install nf-test
if: steps.cache-software.outputs.cache-hit != 'true'
Expand All @@ -91,7 +140,12 @@ jobs:
- name: Run nf-test
run: |
nf-test test --tag ${{ matrix.tags }} --profile "test,${{ matrix.profile }}" --junitxml=test.xml
nf-test test --verbose --tag ${{ matrix.tags }} --profile "+${{ matrix.profile }}" --junitxml=test.xml --tap=test.tap
- uses: pcolby/tap-summary@v1
with:
path: >-
test.tap
- name: Output log on failure
if: failure()
Expand All @@ -104,3 +158,23 @@ jobs:
if: always() # always run even if the previous step fails
with:
report_paths: test.xml

confirm-pass:
runs-on: ubuntu-latest
needs:
- changes
- test
if: always()
steps:
- name: All tests ok
if: ${{ !contains(needs.*.result, 'failure') }}
run: exit 0
- name: One or more tests failed
if: ${{ contains(needs.*.result, 'failure') }}
run: exit 1

- name: debug-print
if: always()
run: |
echo "toJSON(needs) = ${{ toJSON(needs) }}"
echo "toJSON(needs.*.result) = ${{ toJSON(needs.*.result) }}"
Loading

0 comments on commit 8ec2d93

Please # to comment.