diff --git a/.editorconfig b/.editorconfig
index 9b990088..b6b31907 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -22,11 +22,3 @@ indent_size = unset
[/assets/email*]
indent_size = unset
-
-# ignore Readme
-[README.md]
-indent_style = unset
-
-# ignore python
-[*.{py}]
-indent_style = unset
diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml
index 865a3db1..98cc2b85 100644
--- a/.github/workflows/branch.yml
+++ b/.github/workflows/branch.yml
@@ -19,7 +19,7 @@ jobs:
# NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets
- name: Post PR comment
if: failure()
- uses: mshick/add-pr-comment@v2
+ uses: mshick/add-pr-comment@v1
with:
message: |
## This PR is against the `master` branch :x:
diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml
index e37cfda5..694e90ec 100644
--- a/.github/workflows/clean-up.yml
+++ b/.github/workflows/clean-up.yml
@@ -10,7 +10,7 @@ jobs:
issues: write
pull-requests: write
steps:
- - uses: actions/stale@v9
+ - uses: actions/stale@v7
with:
stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days."
stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful."
diff --git a/.github/workflows/download_pipeline.yml b/.github/workflows/download_pipeline.yml
deleted file mode 100644
index 8611458a..00000000
--- a/.github/workflows/download_pipeline.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-name: Test successful pipeline download with 'nf-core download'
-
-# Run the workflow when:
-# - dispatched manually
-# - when a PR is opened or reopened to master branch
-# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev.
-on:
- workflow_dispatch:
- pull_request:
- types:
- - opened
- branches:
- - master
- pull_request_target:
- branches:
- - master
-
-env:
- NXF_ANSI_LOG: false
-
-jobs:
- download:
- runs-on: ubuntu-latest
- steps:
- - name: Install Nextflow
- uses: nf-core/setup-nextflow@v1
-
- - uses: actions/setup-python@v5
- with:
- python-version: "3.11"
- architecture: "x64"
- - uses: eWaterCycle/setup-singularity@v7
- with:
- singularity-version: 3.8.3
-
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- pip install git+https://github.com/nf-core/tools.git@dev
-
- - name: Get the repository name and current branch set as environment variable
- run: |
- echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV}
- echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV}
- echo "REPO_BRANCH=${GITHUB_REF#refs/heads/}" >> ${GITHUB_ENV}
-
- - name: Download the pipeline
- env:
- NXF_SINGULARITY_CACHEDIR: ./
- run: |
- nf-core download ${{ env.REPO_LOWERCASE }} \
- --revision ${{ env.REPO_BRANCH }} \
- --outdir ./${{ env.REPOTITLE_LOWERCASE }} \
- --compress "none" \
- --container-system 'singularity' \
- --container-library "quay.io" -l "docker.io" -l "ghcr.io" \
- --container-cache-utilisation 'amend' \
- --download-configuration
-
- - name: Inspect download
- run: tree ./${{ env.REPOTITLE_LOWERCASE }}
-
- - name: Run the downloaded pipeline
- env:
- NXF_SINGULARITY_CACHEDIR: ./
- NXF_SINGULARITY_HOME_MOUNT: true
- run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml
index e0c4d0ac..5ffb0085 100644
--- a/.github/workflows/fix-linting.yml
+++ b/.github/workflows/fix-linting.yml
@@ -4,7 +4,7 @@ on:
types: [created]
jobs:
- fix-linting:
+ deploy:
# Only run if comment is on a PR with the main repo, and if it contains the magic keywords
if: >
contains(github.event.comment.html_url, '/pull/') &&
@@ -13,17 +13,10 @@ jobs:
runs-on: ubuntu-latest
steps:
# Use the @nf-core-bot token to check out so we can push later
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
+ - uses: actions/checkout@v4
with:
token: ${{ secrets.nf_core_bot_auth_token }}
- # indication that the linting is being fixed
- - name: React on comment
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
- with:
- comment-id: ${{ github.event.comment.id }}
- reactions: eyes
-
# Action runs on the issue comment, so we don't get the PR by default
# Use the gh cli to check out the PR
- name: Checkout Pull Request
@@ -31,59 +24,32 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
- # Install and run pre-commit
- - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
- with:
- python-version: 3.11
+ - uses: actions/setup-node@v4
- - name: Install pre-commit
- run: pip install pre-commit
+ - name: Install Prettier
+ run: npm install -g prettier @prettier/plugin-php
- - name: Run pre-commit
- id: pre-commit
- run: pre-commit run --all-files
- continue-on-error: true
+ # Check that we actually need to fix something
+ - name: Run 'prettier --check'
+ id: prettier_status
+ run: |
+ if prettier --check ${GITHUB_WORKSPACE}; then
+ echo "result=pass" >> $GITHUB_OUTPUT
+ else
+ echo "result=fail" >> $GITHUB_OUTPUT
+ fi
- # indication that the linting has finished
- - name: react if linting finished succesfully
- if: steps.pre-commit.outcome == 'success'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
- with:
- comment-id: ${{ github.event.comment.id }}
- reactions: "+1"
+ - name: Run 'prettier --write'
+ if: steps.prettier_status.outputs.result == 'fail'
+ run: prettier --write ${GITHUB_WORKSPACE}
- name: Commit & push changes
- id: commit-and-push
- if: steps.pre-commit.outcome == 'failure'
+ if: steps.prettier_status.outputs.result == 'fail'
run: |
git config user.email "core@nf-co.re"
git config user.name "nf-core-bot"
git config push.default upstream
git add .
git status
- git commit -m "[automated] Fix code linting"
+ git commit -m "[automated] Fix linting with Prettier"
git push
-
- - name: react if linting errors were fixed
- id: react-if-fixed
- if: steps.commit-and-push.outcome == 'success'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
- with:
- comment-id: ${{ github.event.comment.id }}
- reactions: hooray
-
- - name: react if linting errors were not fixed
- if: steps.commit-and-push.outcome == 'failure'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
- with:
- comment-id: ${{ github.event.comment.id }}
- reactions: confused
-
- - name: react if linting errors were not fixed
- if: steps.commit-and-push.outcome == 'failure'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
- with:
- issue-number: ${{ github.event.issue.number }}
- body: |
- @${{ github.actor }} I tried to fix the linting errors, but it didn't work. Please fix them manually.
- See [CI log](https://github.com/genomic-medicine-sweden/tomte/actions/runs/${{ github.run_id }}) for more details.
diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml
index 81cd098e..b2260d4c 100644
--- a/.github/workflows/linting.yml
+++ b/.github/workflows/linting.yml
@@ -11,22 +11,61 @@ on:
types: [published]
jobs:
- pre-commit:
+ EditorConfig:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- - name: Set up Python 3.11
- uses: actions/setup-python@v5
+ - uses: actions/setup-node@v4
+
+ - name: Install editorconfig-checker
+ run: npm install -g editorconfig-checker
+
+ - name: Run ECLint check
+ run: editorconfig-checker -exclude README.md $(find .* -type f | grep -v '.git\|.py\|.md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile\|.bed\|.tsv')
+
+ Prettier:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - uses: actions/setup-node@v4
+
+ - name: Install Prettier
+ run: npm install -g prettier
+
+ - name: Run Prettier --check
+ run: prettier --check ${GITHUB_WORKSPACE}
+
+ PythonBlack:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Check code lints with Black
+ uses: psf/black@stable
+
+ # If the above check failed, post a comment on the PR explaining the failure
+ - name: Post PR comment
+ if: failure()
+ uses: mshick/add-pr-comment@v1
with:
- python-version: 3.11
- cache: "pip"
+ message: |
+ ## Python linting (`black`) is failing
+
+ To keep the code consistent with lots of contributors, we run automated code consistency checks.
+ To fix this CI test, please run:
+
+ * Install [`black`](https://black.readthedocs.io/en/stable/): `pip install black`
+ * Fix formatting errors in your pipeline: `black .`
+
+ Once you push these changes the test should pass, and you can hide this comment :+1:
- - name: Install pre-commit
- run: pip install pre-commit
+ We highly recommend setting up Black in your code editor so that this formatting is done automatically on save. Ask about it on Slack for help!
- - name: Run pre-commit
- run: pre-commit run --all-files
+ Thanks again for your contribution!
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+ allow-repeats: false
nf-core:
runs-on: ubuntu-latest
@@ -37,7 +76,7 @@ jobs:
- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
- - uses: actions/setup-python@v5
+ - uses: actions/setup-python@v4
with:
python-version: "3.11"
architecture: "x64"
@@ -60,7 +99,7 @@ jobs:
- name: Upload linting log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@v4
+ uses: actions/upload-artifact@v3
with:
name: linting-logs
path: |
diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml
index 147bcd10..0bbcd30f 100644
--- a/.github/workflows/linting_comment.yml
+++ b/.github/workflows/linting_comment.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download lint results
- uses: dawidd6/action-download-artifact@v3
+ uses: dawidd6/action-download-artifact@v2
with:
workflow: linting.yml
workflow_conclusion: completed
diff --git a/.github/workflows/release-announcements.yml b/.github/workflows/release-announcements.yml
index 21ac3f06..6ad33927 100644
--- a/.github/workflows/release-announcements.yml
+++ b/.github/workflows/release-announcements.yml
@@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/setup-python@v5
+ - uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
@@ -56,7 +56,7 @@ jobs:
bsky-post:
runs-on: ubuntu-latest
steps:
- - uses: zentered/bluesky-post-action@v0.1.0
+ - uses: zentered/bluesky-post-action@v0.0.2
with:
post: |
Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}!
diff --git a/.gitpod.yml b/.gitpod.yml
index 363d5b1d..acf72695 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -7,7 +7,6 @@ tasks:
- name: unset JAVA_TOOL_OPTIONS
command: |
unset JAVA_TOOL_OPTIONS
-
vscode:
extensions: # based on nf-core.nf-core-extensionpack
- codezombiech.gitignore # Language support for .gitignore files
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index af57081f..0c31cdb9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,10 +1,5 @@
repos:
- repo: https://github.com/pre-commit/mirrors-prettier
- rev: "v3.1.0"
+ rev: "v2.7.1"
hooks:
- id: prettier
- - repo: https://github.com/editorconfig-checker/editorconfig-checker.python
- rev: "2.7.3"
- hooks:
- - id: editorconfig-checker
- alias: ec
diff --git a/assets/email_template.html b/assets/email_template.html
index f06f650b..716f3e31 100644
--- a/assets/email_template.html
+++ b/assets/email_template.html
@@ -12,7 +12,7 @@
-
genomic-medicine-sweden/tomte ${version}
+genomic-medicine-sweden/tomte v${version}
Run Name: $runName
<% if (!success){
diff --git a/lib/WorkflowMain.groovy b/lib/WorkflowMain.groovy
index c3d365c6..f437aa3c 100755
--- a/lib/WorkflowMain.groovy
+++ b/lib/WorkflowMain.groovy
@@ -24,7 +24,7 @@ class WorkflowMain {
//
// Validate parameters and print summary to screen
//
- public static void initialise(workflow, params, log, args) {
+ public static void initialise(workflow, params, log) {
// Print workflow version and exit on --version
if (params.version) {
@@ -35,8 +35,6 @@ class WorkflowMain {
// Check that a -profile or Nextflow config has been provided to run the pipeline
NfcoreTemplate.checkConfigProvided(workflow, log)
- // Check that the profile doesn't contain spaces and doesn't end with a trailing comma
- checkProfile(workflow.profile, args, log)
// Check that conda channels are set-up correctly
if (workflow.profile.tokenize(',').intersect(['conda', 'mamba']).size() >= 1) {
@@ -62,16 +60,4 @@ class WorkflowMain {
}
return null
}
-
- //
- // Exit pipeline if --profile contains spaces
- //
- private static void checkProfile(profile, args, log) {
- if (profile.endsWith(',')) {
- Nextflow.error "Profile cannot end with a trailing comma. Please remove the comma from the end of the profile string.\nHint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
- }
- if (args[0]) {
- log.warn "nf-core pipelines do not accept positional arguments. The positional argument `${args[0]}` has been detected.\n Hint: A common mistake is to provide multiple values to `-profile` separated by spaces. Please use commas to separate profiles instead,e.g., `-profile docker,test`."
- }
- }
}
diff --git a/lib/nfcore_external_java_deps.jar b/lib/nfcore_external_java_deps.jar
new file mode 100644
index 00000000..805c8bb5
Binary files /dev/null and b/lib/nfcore_external_java_deps.jar differ
diff --git a/main.nf b/main.nf
index 68142a92..756d37c7 100644
--- a/main.nf
+++ b/main.nf
@@ -61,7 +61,7 @@ if (params.validate_params) {
validateParameters()
}
-WorkflowMain.initialise(workflow, params, log, args)
+WorkflowMain.initialise(workflow, params, log)
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/modules.json b/modules.json
index eddbd2b6..e79fb80b 100644
--- a/modules.json
+++ b/modules.json
@@ -33,7 +33,7 @@
},
"custom/dumpsoftwareversions": {
"branch": "master",
- "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93",
+ "git_sha": "bba7e362e4afead70653f84d8700588ea28d0f9e",
"installed_by": ["modules"]
},
"ensemblvep/vep": {
@@ -48,7 +48,7 @@
},
"fastqc": {
"branch": "master",
- "git_sha": "c9488585ce7bd35ccd2a30faa2371454c8112fb9",
+ "git_sha": "65ad3e0b9a4099592e1102e92e10455dc661cf53",
"installed_by": ["modules"]
},
"gatk4/asereadcounter": {
@@ -93,7 +93,7 @@
},
"multiqc": {
"branch": "master",
- "git_sha": "8ec825f465b9c17f9d83000022995b4f7de6fe93",
+ "git_sha": "4ab13872435962dadc239979554d13709e20bf29",
"installed_by": ["modules"]
},
"picard/collectrnaseqmetrics": {
diff --git a/modules/nf-core/custom/dumpsoftwareversions/environment.yml b/modules/nf-core/custom/dumpsoftwareversions/environment.yml
index 9b3272bc..f0c63f69 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/environment.yml
+++ b/modules/nf-core/custom/dumpsoftwareversions/environment.yml
@@ -4,4 +4,4 @@ channels:
- bioconda
- defaults
dependencies:
- - bioconda::multiqc=1.19
+ - bioconda::multiqc=1.17
diff --git a/modules/nf-core/custom/dumpsoftwareversions/main.nf b/modules/nf-core/custom/dumpsoftwareversions/main.nf
index f2187611..7685b33c 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/main.nf
+++ b/modules/nf-core/custom/dumpsoftwareversions/main.nf
@@ -4,8 +4,8 @@ process CUSTOM_DUMPSOFTWAREVERSIONS {
// Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' :
- 'biocontainers/multiqc:1.19--pyhdfd78af_0' }"
+ 'https://depot.galaxyproject.org/singularity/multiqc:1.17--pyhdfd78af_0' :
+ 'biocontainers/multiqc:1.17--pyhdfd78af_0' }"
input:
path versions
diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
index b1e1630b..eec1db10 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
+++ b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test
@@ -31,12 +31,7 @@ nextflow_process {
then {
assertAll(
{ assert process.success },
- { assert snapshot(
- process.out.versions,
- file(process.out.mqc_yml[0]).readLines()[0..10],
- file(process.out.yml[0]).readLines()[0..7]
- ).match()
- }
+ { assert snapshot(process.out).match() }
)
}
}
diff --git a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
index 5f59a936..4274ed57 100644
--- a/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
+++ b/modules/nf-core/custom/dumpsoftwareversions/tests/main.nf.test.snap
@@ -1,33 +1,27 @@
{
"Should run without failures": {
"content": [
- [
- "versions.yml:md5,76d454d92244589d32455833f7c1ba6d"
- ],
- [
- "data: \"\\n\\n \\n \\n Process Name | \\n \\",
- " \\ Software | \\n Version | \\n
\\n \\n\\",
- " \\n\\n\\n CUSTOM_DUMPSOFTWAREVERSIONS | \\n python | \\n\\",
- " \\ 3.11.7 | \\n
\\n\\n\\n | \\n \\",
- " \\ yaml | \\n 5.4.1 | \\n
\\n\\n\\n\\",
- " \\n\\n TOOL1 | \\n tool1 | \\n\\",
- " \\ 0.11.9 | \\n
\\n\\n\\n\\n\\n TOOL2 | \\n\\",
- " \\ tool2 | \\n 1.9 | \\n
\\n\\n\\n\\",
- " \\n\\n Workflow | \\n Nextflow | \\n\\"
- ],
- [
- "CUSTOM_DUMPSOFTWAREVERSIONS:",
- " python: 3.11.7",
- " yaml: 5.4.1",
- "TOOL1:",
- " tool1: 0.11.9",
- "TOOL2:",
- " tool2: '1.9'",
- "Workflow:"
- ]
+ {
+ "0": [
+ "software_versions.yml:md5,1c851188476409cda5752ce971b20b58"
+ ],
+ "1": [
+ "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d"
+ ],
+ "2": [
+ "versions.yml:md5,3843ac526e762117eedf8825b40683df"
+ ],
+ "mqc_yml": [
+ "software_versions_mqc.yml:md5,2570f4ba271ad08357b0d3d32a9cf84d"
+ ],
+ "versions": [
+ "versions.yml:md5,3843ac526e762117eedf8825b40683df"
+ ],
+ "yml": [
+ "software_versions.yml:md5,1c851188476409cda5752ce971b20b58"
+ ]
+ }
],
- "timestamp": "2024-01-09T23:01:18.710682"
+ "timestamp": "2023-11-03T14:43:22.157011"
}
-}
\ No newline at end of file
+}
diff --git a/modules/nf-core/fastqc/tests/main.nf.test b/modules/nf-core/fastqc/tests/main.nf.test
index 1f21c664..b9e8f926 100644
--- a/modules/nf-core/fastqc/tests/main.nf.test
+++ b/modules/nf-core/fastqc/tests/main.nf.test
@@ -3,20 +3,24 @@ nextflow_process {
name "Test Process FASTQC"
script "../main.nf"
process "FASTQC"
-
tag "modules"
tag "modules_nfcore"
tag "fastqc"
- test("sarscov2 single-end [fastq]") {
+ test("Single-Read") {
when {
+ params {
+ outdir = "$outputDir"
+ }
process {
"""
- input[0] = Channel.of([
+ input[0] = [
[ id: 'test', single_end:true ],
- [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ]
- ])
+ [
+ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)
+ ]
+ ]
"""
}
}
@@ -24,189 +28,82 @@ nextflow_process {
then {
assertAll (
{ assert process.success },
-
// NOTE The report contains the date inside it, which means that the md5sum is stable per day, but not longer than that. So you can't md5sum it.
// looks like this:
// https://github.com/nf-core/modules/pull/3903#issuecomment-1743620039
-
- { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" },
- { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
- { assert path(process.out.html[0][1]).text.contains("
File type | Conventional base calls |
") },
-
- { assert snapshot(process.out.versions).match("versions") }
- )
- }
- }
-
- test("sarscov2 paired-end [fastq]") {
-
- when {
- process {
- """
- input[0] = Channel.of([
- [id: 'test', single_end: false], // meta map
- [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true),
- file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true) ]
- ])
- """
- }
- }
-
- then {
- assertAll (
- { assert process.success },
-
- { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" },
- { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" },
- { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" },
- { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" },
- { assert path(process.out.html[0][1][0]).text.contains("File type | Conventional base calls |
") },
- { assert path(process.out.html[0][1][1]).text.contains("File type | Conventional base calls |
") },
-
- { assert snapshot(process.out.versions).match("versions") }
- )
- }
- }
-
- test("sarscov2 interleaved [fastq]") {
-
- when {
- process {
- """
- input[0] = Channel.of([
- [id: 'test', single_end: false], // meta map
- file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_interleaved.fastq.gz', checkIfExists: true)
- ])
- """
- }
- }
-
- then {
- assertAll (
- { assert process.success },
-
- { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" },
- { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
- { assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
-
- { assert snapshot(process.out.versions).match("versions") }
+ { assert process.out.html.get(0).get(1) ==~ ".*/test_fastqc.html" },
+ { assert path(process.out.html.get(0).get(1)).getText().contains("File type | Conventional base calls |
") },
+ { assert snapshot(process.out.versions).match("versions") },
+ { assert process.out.zip.get(0).get(1) ==~ ".*/test_fastqc.zip" }
)
}
}
-
- test("sarscov2 paired-end [bam]") {
-
- when {
- process {
- """
- input[0] = Channel.of([
- [id: 'test', single_end: false], // meta map
- file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true)
- ])
- """
- }
- }
-
- then {
- assertAll (
- { assert process.success },
-
- { assert process.out.html[0][1] ==~ ".*/test_fastqc.html" },
- { assert process.out.zip[0][1] ==~ ".*/test_fastqc.zip" },
- { assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
-
- { assert snapshot(process.out.versions).match("versions") }
- )
- }
- }
-
- test("sarscov2 multiple [fastq]") {
-
- when {
- process {
- """
- input[0] = Channel.of([
- [id: 'test', single_end: false], // meta map
- [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true),
- file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_2.fastq.gz', checkIfExists: true),
- file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_1.fastq.gz', checkIfExists: true),
- file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test2_2.fastq.gz', checkIfExists: true) ]
- ])
- """
- }
- }
-
- then {
- assertAll (
- { assert process.success },
-
- { assert process.out.html[0][1][0] ==~ ".*/test_1_fastqc.html" },
- { assert process.out.html[0][1][1] ==~ ".*/test_2_fastqc.html" },
- { assert process.out.html[0][1][2] ==~ ".*/test_3_fastqc.html" },
- { assert process.out.html[0][1][3] ==~ ".*/test_4_fastqc.html" },
- { assert process.out.zip[0][1][0] ==~ ".*/test_1_fastqc.zip" },
- { assert process.out.zip[0][1][1] ==~ ".*/test_2_fastqc.zip" },
- { assert process.out.zip[0][1][2] ==~ ".*/test_3_fastqc.zip" },
- { assert process.out.zip[0][1][3] ==~ ".*/test_4_fastqc.zip" },
- { assert path(process.out.html[0][1][0]).text.contains("File type | Conventional base calls |
") },
- { assert path(process.out.html[0][1][1]).text.contains("File type | Conventional base calls |
") },
- { assert path(process.out.html[0][1][2]).text.contains("File type | Conventional base calls |
") },
- { assert path(process.out.html[0][1][3]).text.contains("File type | Conventional base calls |
") },
-
- { assert snapshot(process.out.versions).match("versions") }
- )
- }
- }
-
- test("sarscov2 custom_prefix") {
-
- when {
- process {
- """
- input[0] = Channel.of([
- [ id:'mysample', single_end:true ], // meta map
- file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true)
- ])
- """
- }
- }
-
- then {
- assertAll (
- { assert process.success },
-
- { assert process.out.html[0][1] ==~ ".*/mysample_fastqc.html" },
- { assert process.out.zip[0][1] ==~ ".*/mysample_fastqc.zip" },
- { assert path(process.out.html[0][1]).text.contains("File type | Conventional base calls |
") },
-
- { assert snapshot(process.out.versions).match("versions") }
- )
- }
- }
-
- test("sarscov2 single-end [fastq] - stub") {
-
- options "-stub"
-
- when {
- process {
- """
- input[0] = Channel.of([
- [ id: 'test', single_end:true ],
- [ file(params.modules_testdata_base_path + 'genomics/sarscov2/illumina/fastq/test_1.fastq.gz', checkIfExists: true) ]
- ])
- """
- }
- }
-
- then {
- assertAll (
- { assert process.success },
- { assert snapshot(process.out.html.collect { file(it[1]).getName() } +
- process.out.zip.collect { file(it[1]).getName() } +
- process.out.versions ).match() }
- )
- }
- }
-
+// TODO
+// //
+// // Test with paired-end data
+// //
+// workflow test_fastqc_paired_end {
+// input = [
+// [id: 'test', single_end: false], // meta map
+// [
+// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
+// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)
+// ]
+// ]
+
+// FASTQC ( input )
+// }
+
+// //
+// // Test with interleaved data
+// //
+// workflow test_fastqc_interleaved {
+// input = [
+// [id: 'test', single_end: false], // meta map
+// file(params.test_data['sarscov2']['illumina']['test_interleaved_fastq_gz'], checkIfExists: true)
+// ]
+
+// FASTQC ( input )
+// }
+
+// //
+// // Test with bam data
+// //
+// workflow test_fastqc_bam {
+// input = [
+// [id: 'test', single_end: false], // meta map
+// file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)
+// ]
+
+// FASTQC ( input )
+// }
+
+// //
+// // Test with multiple samples
+// //
+// workflow test_fastqc_multiple {
+// input = [
+// [id: 'test', single_end: false], // meta map
+// [
+// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true),
+// file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true),
+// file(params.test_data['sarscov2']['illumina']['test2_1_fastq_gz'], checkIfExists: true),
+// file(params.test_data['sarscov2']['illumina']['test2_2_fastq_gz'], checkIfExists: true)
+// ]
+// ]
+
+// FASTQC ( input )
+// }
+
+// //
+// // Test with custom prefix
+// //
+// workflow test_fastqc_custom_prefix {
+// input = [
+// [ id:'mysample', single_end:true ], // meta map
+// file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)
+// ]
+
+// FASTQC ( input )
+// }
}
diff --git a/modules/nf-core/fastqc/tests/main.nf.test.snap b/modules/nf-core/fastqc/tests/main.nf.test.snap
index 5d624bb8..636a32ce 100644
--- a/modules/nf-core/fastqc/tests/main.nf.test.snap
+++ b/modules/nf-core/fastqc/tests/main.nf.test.snap
@@ -1,20 +1,10 @@
{
- "sarscov2 single-end [fastq] - stub": {
- "content": [
- [
- "test.html",
- "test.zip",
- "versions.yml:md5,e1cc25ca8af856014824abd842e93978"
- ]
- ],
- "timestamp": "2024-01-17T18:40:57.254299"
- },
"versions": {
"content": [
[
"versions.yml:md5,e1cc25ca8af856014824abd842e93978"
]
],
- "timestamp": "2024-01-17T18:36:50.033627"
+ "timestamp": "2023-10-09T23:40:54+0000"
}
}
\ No newline at end of file
diff --git a/modules/nf-core/multiqc/environment.yml b/modules/nf-core/multiqc/environment.yml
index 7625b752..bc0bdb5b 100644
--- a/modules/nf-core/multiqc/environment.yml
+++ b/modules/nf-core/multiqc/environment.yml
@@ -4,4 +4,4 @@ channels:
- bioconda
- defaults
dependencies:
- - bioconda::multiqc=1.19
+ - bioconda::multiqc=1.18
diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf
index 1b9f7c43..00cc48d2 100644
--- a/modules/nf-core/multiqc/main.nf
+++ b/modules/nf-core/multiqc/main.nf
@@ -3,8 +3,8 @@ process MULTIQC {
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/multiqc:1.19--pyhdfd78af_0' :
- 'biocontainers/multiqc:1.19--pyhdfd78af_0' }"
+ 'https://depot.galaxyproject.org/singularity/multiqc:1.18--pyhdfd78af_0' :
+ 'biocontainers/multiqc:1.18--pyhdfd78af_0' }"
input:
path multiqc_files, stageAs: "?/*"
@@ -43,7 +43,7 @@ process MULTIQC {
stub:
"""
- mkdir multiqc_data
+ touch multiqc_data
touch multiqc_plots
touch multiqc_report.html
diff --git a/modules/nf-core/multiqc/meta.yml b/modules/nf-core/multiqc/meta.yml
index 45a9bc35..f1aa660e 100644
--- a/modules/nf-core/multiqc/meta.yml
+++ b/modules/nf-core/multiqc/meta.yml
@@ -1,3 +1,4 @@
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/meta-schema.json
name: multiqc
description: Aggregate results from bioinformatics analyses across many samples into a single report
keywords:
diff --git a/modules/nf-core/multiqc/tests/main.nf.test b/modules/nf-core/multiqc/tests/main.nf.test
index d0438eda..c2dad217 100644
--- a/modules/nf-core/multiqc/tests/main.nf.test
+++ b/modules/nf-core/multiqc/tests/main.nf.test
@@ -7,9 +7,12 @@ nextflow_process {
tag "modules_nfcore"
tag "multiqc"
- test("sarscov2 single-end [fastqc]") {
+ test("MULTIQC: FASTQC") {
when {
+ params {
+ outdir = "$outputDir"
+ }
process {
"""
input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
@@ -23,46 +26,24 @@ nextflow_process {
then {
assertAll(
{ assert process.success },
- { assert process.out.report[0] ==~ ".*/multiqc_report.html" },
- { assert process.out.data[0] ==~ ".*/multiqc_data" },
- { assert snapshot(process.out.versions).match("versions") }
+ { assert path(process.out.report.get(0)).exists() },
+ { assert path(process.out.data.get(0)).exists() },
+ { assert path(process.out.versions.get(0)).getText().contains("multiqc") }
)
}
}
- test("sarscov2 single-end [fastqc] [config]") {
+ test("MULTIQC: FASTQC and a config file") {
when {
- process {
- """
- input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
- input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true))
- input[2] = []
- input[3] = []
- """
+ params {
+ outdir = "$outputDir"
}
- }
-
- then {
- assertAll(
- { assert process.success },
- { assert process.out.report[0] ==~ ".*/multiqc_report.html" },
- { assert process.out.data[0] ==~ ".*/multiqc_data" },
- { assert snapshot(process.out.versions).match("versions") }
- )
- }
- }
-
- test("sarscov2 single-end [fastqc] - stub") {
-
- options "-stub"
-
- when {
process {
"""
input[0] = Channel.of([file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz_fastqc_zip'], checkIfExists: true)])
- input[1] = []
+ input[1] = Channel.of(file("https://github.com/nf-core/tools/raw/dev/nf_core/pipeline-template/assets/multiqc_config.yml", checkIfExists: true))
input[2] = []
input[3] = []
"""
@@ -72,10 +53,9 @@ nextflow_process {
then {
assertAll(
{ assert process.success },
- { assert snapshot(process.out.report.collect { file(it).getName() } +
- process.out.data.collect { file(it).getName() } +
- process.out.plots.collect { file(it).getName() } +
- process.out.versions ).match() }
+ { assert path(process.out.report.get(0)).exists() },
+ { assert path(process.out.data.get(0)).exists() },
+ { assert path(process.out.versions.get(0)).getText().contains("multiqc") }
)
}
diff --git a/modules/nf-core/multiqc/tests/main.nf.test.snap b/modules/nf-core/multiqc/tests/main.nf.test.snap
deleted file mode 100644
index d37e7304..00000000
--- a/modules/nf-core/multiqc/tests/main.nf.test.snap
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- "versions": {
- "content": [
- [
- "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d"
- ]
- ],
- "timestamp": "2024-01-09T23:02:49.911994"
- },
- "sarscov2 single-end [fastqc] - stub": {
- "content": [
- [
- "multiqc_report.html",
- "multiqc_data",
- "multiqc_plots",
- "versions.yml:md5,14e9a2661241abd828f4f06a7b5c222d"
- ]
- ],
- "timestamp": "2024-01-09T23:03:14.524346"
- }
-}
\ No newline at end of file
diff --git a/nextflow.config b/nextflow.config
index 1b6bb0b9..707fc9f3 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -97,7 +97,7 @@ try {
}
// Load genomic-medicine-sweden/tomte custom profiles from different institutions.
-// Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs!
+// Warning: Uncomment only if a pipeline-specific instititutional config already exists on nf-core/configs!
// try {
// includeConfig "${params.custom_config_base}/pipeline/tomte.config"
// } catch (Exception e) {
@@ -117,7 +117,6 @@ profiles {
podman.enabled = false
shifter.enabled = false
charliecloud.enabled = false
- channels = ['conda-forge', 'bioconda', 'defaults']
apptainer.enabled = false
}
mamba {
diff --git a/pyproject.toml b/pyproject.toml
index 7d08e1c8..bc01239b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,13 +1,10 @@
-# Config file for Python. Mostly used to configure linting of bin/*.py with Ruff.
+# Config file for Python. Mostly used to configure linting of bin/*.py with Black.
# Should be kept the same as nf-core/tools to avoid fighting with template synchronisation.
-[tool.ruff]
+[tool.black]
line-length = 120
-target-version = "py38"
-select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"]
-cache-dir = "~/.cache/ruff"
+target_version = ["py37", "py38", "py39", "py310"]
-[tool.ruff.isort]
-known-first-party = ["nf_core"]
-
-[tool.ruff.per-file-ignores]
-"__init__.py" = ["E402", "F401"]
+[tool.isort]
+profile = "black"
+known_first_party = ["nf_core"]
+multi_line_output = 3
diff --git a/workflows/tomte.nf b/workflows/tomte.nf
index ee6e4819..a2562d4f 100644
--- a/workflows/tomte.nf
+++ b/workflows/tomte.nf
@@ -364,13 +364,6 @@ workflow.onComplete {
}
}
-workflow.onError {
- if (workflow.errorReport.contains("Process requirement exceeds available memory")) {
- println("🛑 Default resources exceed availability 🛑 ")
- println("💡 See here on how to configure pipeline: https://nf-co.re/docs/usage/configuration#tuning-workflow-resources 💡")
- }
-}
-
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
THE END