diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 6cf8ffd921..1bd2e27fb0 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -38,4 +38,4 @@ body: * Executor _(eg. slurm, local, awsbatch)_ * OS _(eg. CentOS Linux, macOS, Linux Mint)_ * Version of nf-core/tools _(eg. 1.1, 1.5, 1.8.2)_ - * Python version _(eg. 3.7, 3.8)_ + * Python version _(eg. 3.10, 3.11)_ diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index c5b3464b83..f7cab98c55 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -3,13 +3,13 @@ 1. Check issue milestones to see outstanding issues to resolve if possible or transfer to the milestones for the next release e.g. [`v1.9`](https://github.com/nf-core/tools/issues?q=is%3Aopen+is%3Aissue+milestone%3A1.9) 2. Most importantly, pick an undeniably outstanding [name](http://www.codenamegenerator.com/) for the release where _Prefix_ = _Metal_ and _Dictionary_ = _Animal_. 3. Check the [pipeline health page](https://nf-co.re/pipeline_health) to make sure that all repos look sane (missing `TEMPLATE` branches etc) -4. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py`. +4. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:latest`. 5. Make sure all CI tests are passing! 6. Create a PR from `dev` to `master` 7. Make sure all CI tests are passing again (additional tests are run on PRs to `master`) 8. Request review (2 approvals required) -9. Merge the PR into `master` -10. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) +9. Run `rich-codex` to regenerate docs screengrabs (actions `workflow_dispatch` button) +10. Merge the PR into `master` 11. Wait for CI tests on the commit to passed 12. (Optional but a good idea) Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR. 13. Create a new release copying the `CHANGELOG` for that release into the description section. @@ -19,4 +19,4 @@ 1. Check the automated template synchronisation has been triggered properly. This should automatically open PRs directly to individual pipeline repos with the appropriate changes to update the pipeline template. 2. Check that the automatic `PyPi` deployment has worked: [pypi.org/project/nf-core](https://pypi.org/project/nf-core/) 3. Check `BioConda` has an automated PR to bump the version, and merge. eg. [bioconda/bioconda-recipes #20065](https://github.com/bioconda/bioconda-recipes/pull/20065) -4. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` +4. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` and change the gitpod container to `nfcore/gitpod:dev`. diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml index 1f3d241d5f..dd64ffa3e5 100644 --- a/.github/workflows/branch.yml +++ b/.github/workflows/branch.yml @@ -13,7 +13,7 @@ jobs: - name: Check PRs if: github.repository == 'nf-core/tools' run: | - { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/tools ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] + { [[ ${{github.event.pull_request.head.repo.full_name}} == nf-core/tools ]] && [[ $GITHUB_HEAD_REF == "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] # If the above check failed, post a comment on the PR explaining the failure - name: Post PR comment diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml new file mode 100644 index 0000000000..4b55c5e4aa --- /dev/null +++ b/.github/workflows/clean-up.yml @@ -0,0 +1,24 @@ +name: "Close user-tagged issues and PRs" +on: + schedule: + - cron: "0 0 * * 0" # Once a week + +jobs: + clean-up: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/stale@v7 + with: + stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." + stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." + close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity." + days-before-stale: 30 + days-before-close: 20 + days-before-pr-close: -1 + any-of-labels: "awaiting-changes,awaiting-feedback" + exempt-issue-labels: "WIP" + exempt-pr-labels: "WIP" + repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index d09bdfd822..46152d92a7 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -84,6 +84,10 @@ jobs: - name: remove TODO run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; + # Replace zenodo.XXXXXX to pass readme linting + - name: replace zenodo.XXXXXX + run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; + # Run nf-core linting - name: nf-core lint run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned @@ -108,7 +112,7 @@ jobs: run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force - name: nf-core modules install gitlab - run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git --branch main install fastqc --force --dir nf-core-testpipeline/ + run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git --branch branch-tester install fastp --dir nf-core-testpipeline/ - name: nf-core modules list local run: nf-core --log-file log.txt modules list local --dir nf-core-testpipeline/ diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 2da7901d16..8f3c5fdb47 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -29,10 +29,10 @@ jobs: - uses: actions/checkout@v3 name: Check out source-code repository - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: Install python dependencies run: | diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 8657283417..d762154f7e 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -16,10 +16,10 @@ jobs: - uses: actions/checkout@v3 name: Check out source-code repository - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: Install python dependencies run: | diff --git a/.github/workflows/pytest-frozen-ubuntu-20.04.yml b/.github/workflows/pytest-frozen-ubuntu-20.04.yml index 6d49145ed7..b015376633 100644 --- a/.github/workflows/pytest-frozen-ubuntu-20.04.yml +++ b/.github/workflows/pytest-frozen-ubuntu-20.04.yml @@ -33,6 +33,7 @@ jobs: - name: Downgrade git to the Ubuntu official repository's version run: | + sudo apt update sudo apt remove git git-man sudo add-apt-repository --remove ppa:git-core/ppa sudo apt install git diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 00cd3c813e..d03b1c33de 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml index 65bc1dd9cc..54aaf240df 100644 --- a/.github/workflows/rich-codex.yml +++ b/.github/workflows/rich-codex.yml @@ -18,7 +18,7 @@ jobs: uses: nf-core/setup-nextflow@v1 - name: Install nf-core/tools - run: pip install . + run: pip install git+https://github.com/nf-core/tools.git@dev - name: Generate terminal images with rich-codex uses: ewels/rich-codex@v1 diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index 0c84d38aea..3ec4dc2e10 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -22,10 +22,10 @@ jobs: - name: Check out source-code repository uses: actions/checkout@v3 - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: Install python dependencies run: | diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index 98c4f997fb..71acf6baa1 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -21,10 +21,10 @@ jobs: - name: Check out source-code repository uses: actions/checkout@v3 - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v4 with: - python-version: 3.7 + python-version: 3.8 - name: Install python dependencies run: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eddb0f1048..b7aeeb5bc9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,14 +1,13 @@ repos: - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.1.0 hooks: - id: black - language_version: python3.9 - repo: https://github.com/pycqa/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.6.2" + rev: "v2.7.1" hooks: - id: prettier diff --git a/.prettierignore b/.prettierignore index bd1a8bee9c..4cd77bb4ed 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,4 +3,5 @@ adaptivecard.json slackreport.json docs/api/_build testing - +nf_core/module-template/modules/meta.yml +nf_core/module-template/tests/test.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 923a6c1ca4..8921d75fea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,60 @@ # nf-core/tools: Changelog +# [v2.8 - Ruthenium Monkey](https://github.com/nf-core/tools/releases/tag/2.8) - [2023-04-27] + +### Template + +- Explicitly disable `conda` when a container profile ([#2140](https://github.com/nf-core/tools/pull/2140)) +- Turn on automatic clean up of intermediate files in `work/` on successful pipeline completion in full-test config ([#2163](https://github.com/nf-core/tools/pull/2163)) [Contributed by @jfy133] +- Add documentation to `usage.md` on how to use `params.yml` files, based on nf-core/ampliseq text ([#2173](https://github.com/nf-core/tools/pull/2173/)) [Contributed by @jfy133, @d4straub] +- Make jobs automatically resubmit for a much wider range of exit codes (now `104` and `130..145`) ([#2170](https://github.com/nf-core/tools/pull/2170)) +- Add a clean-up GHA which closes issues and PRs with specific labels ([#2183](https://github.com/nf-core/tools/pull/2183)) +- Remove problematic sniffer code in samplesheet_check.py that could give false positive 'missing header' errors ([https://github.com/nf-core/tools/pull/2194]) [Contributed by @Midnighter, @jfy133] +- Consistent syntax for branch checks in PRs ([#2202](https://github.com/nf-core/tools/issues/2202)) +- Fixed minor Jinja2 templating bug that caused the PR template to miss a newline +- Updated AWS tests to use newly moved `seqeralabs/action-tower-launch` instead of `nf-core/tower-action` +- Remove `.cff` files from `.editorconfig` [(#2145)[https://github.com/nf-core/tools/pull/2145]] +- Simplify pipeline README ([#2186](https://github.com/nf-core/tools/issues/2186)) +- Added support for the apptainer container engine via `-profile apptainer`. ([#2244](https://github.com/nf-core/tools/issues/2244)) [Contributed by @jfy133] +- Added config `docker.registry` to pipeline template for a configurable default container registry when using Docker containers. Defaults to `quay.io` ([#2133](https://github.com/nf-core/tools/pull/2133)) +- Add tower.yml file to the pipeline template ([#2251](https://github.com/nf-core/tools/pull/2251)) +- Add mastodon badge to README ([#2253](https://github.com/nf-core/tools/pull/2253)) +- Removed `quay.io` from all module Docker container references as this is now supplied at pipeline level. ([#2249](https://github.com/nf-core/tools/pull/2249)) +- Remove `CITATION.cff` file from pipeline template, to avoid that pipeline Zenodo entries reference the nf-core publication instead of the pipeline ([#2059](https://github.com/nf-core/tools/pull/2059)). + +### Linting + +- Update modules lint test to fail if enable_conda is found ([#2213](https://github.com/nf-core/tools/pull/2213)) +- Read module lint configuration from `.nf-core.yml`, not `.nf-core-lint.yml` ([#2221](https://github.com/nf-core/tools/pull/2221)) +- `nf-core schema lint` now defaults to linting `nextflow_schema.json` if no filename is provided ([#2225](https://github.com/nf-core/tools/pull/2225)) +- Warn if `/zenodo.XXXXXX` is present in the Readme ([#2254](https://github.com/nf-core/tools/pull/2254)) +- Lint all labels in a module ([#2227](https://github.com/nf-core/tools/pull/2227)) + +### Modules + +- Add an `--empty-template` option to create a module without TODO statements or examples ([#2175](https://github.com/nf-core/tools/pull/2175) & [#2177](https://github.com/nf-core/tools/pull/2177)) +- Removed the `nf-core modules mulled` command and all its code dependencies ([2199](https://github.com/nf-core/tools/pull/2199)). +- Take into accout the provided `--git_remote` URL when linting all modules ([2243](https://github.com/nf-core/tools/pull/2243)). + +### Subworkflows + +- Fixing problem when a module included in a subworkflow had a name change from TOOL to TOOL/SUBTOOL ([#2177](https://github.com/nf-core/tools/pull/2177)) +- Fix `nf-core subworkflows test` not running subworkflow tests ([#2181](https://github.com/nf-core/tools/pull/2181)) +- Add tests for `nf-core subworkflows create-test-yml` ([#2219](https://github.com/nf-core/tools/pull/2219)) + +### General + +- Deprecate Python 3.7 support because it reaches EOL ([#2210](https://github.com/nf-core/tools/pull/2210)) +- `nf-core modules/subworkflows info` now prints the include statement for the module/subworkflow ([#2182](https://github.com/nf-core/tools/pull/2182)). +- Add a clean-up GHA which closes issues and PRs with specific labels ([#2183](https://github.com/nf-core/tools/pull/2183)) +- update minimum version of rich to 13.3.1 ([#2185](https://github.com/nf-core/tools/pull/2185)) +- Add the Nextflow version to Gitpod container matching the minimal Nextflow version for nf-core (according to `nextflow.config`) ([#2196](https://github.com/nf-core/tools/pull/2196)) +- Use `nfcore/gitpod:dev` container in the dev branch ([#2196](https://github.com/nf-core/tools/pull/2196)) +- Replace requests_mock with responses in test mocks ([#2165](https://github.com/nf-core/tools/pull/2165)). +- Add warning when installing a module from an `org_path` that exists in multiple remotes in `modules.json` ([#2228](https://github.com/nf-core/tools/pull/2228) [#2239](https://github.com/nf-core/tools/pull/2239)). +- Add the possibility to translate refgenie asset aliases to the ones used in a pipeline with an alias_translations.yaml file ([#2242](https://github.com/nf-core/tools/pull/2242)). +- Add initial CHM13 support ([1988](https://github.com/nf-core/tools/issues/1988)) + ## [v2.7.2 - Mercury Eagle Patch](https://github.com/nf-core/tools/releases/tag/2.7.2) - [2022-12-19] ### Template @@ -49,7 +104,6 @@ Another big release with lots of new features and bug fixes. Thanks to all contr - Fix lint warnings for `samplesheet_check.nf` module ([#1875](https://github.com/nf-core/tools/pull/1875)). - Check that the workflow name provided with a template doesn't contain dashes ([#1822](https://github.com/nf-core/tools/pull/1822)) -- Remove `CITATION.cff` file from pipeline template, to avoid that pipeline Zenodo entries reference the nf-core publication instead of the pipeline ([#2059](https://github.com/nf-core/tools/pull/2059)). ### Linting diff --git a/README.md b/README.md index 73d868bb32..0de42e86e8 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,6 @@ A python package with helper tools for the nf-core community. - [`modules lint` - Check a module against nf-core guidelines](#check-a-module-against-nf-core-guidelines) - [`modules test` - Run the tests for a module](#run-the-tests-for-a-module-using-pytest) - [`modules bump-versions` - Bump software versions of modules](#bump-bioconda-and-container-versions-of-modules-in) - - [`modules mulled` - Generate the name for a multi-tool container image](#generate-the-name-for-a-multi-tool-container-image) - [`nf-core subworkflows` - commands for dealing with subworkflows](#subworkflows) - [`subworkflows list` - List available subworkflows](#list-subworkflows) @@ -344,6 +343,8 @@ You can run the pipeline by simply providing the directory path for the `workflo nextflow run /path/to/download/nf-core-rnaseq-dev/workflow/ --input mydata.csv --outdir results # usual parameters here ``` +> Note that if you downloaded singularity images, you will need to use `-profile singularity` or have it enabled in your config file. + ### Downloaded nf-core configs The pipeline files are automatically updated (`params.custom_config_base` is set to `../configs`), so that the local copy of institutional configs are available when running the pipeline. @@ -385,7 +386,7 @@ The Singularity image download finds containers using two methods: 2. It scrapes any files it finds with a `.nf` file extension in the workflow `modules` directory for lines that look like `container = "xxx"`. This is the typical method for DSL2 pipelines, which have one container per process. -Some DSL2 modules have container addresses for docker (eg. `quay.io/biocontainers/fastqc:0.11.9--0`) and also URLs for direct downloads of a Singularity continaer (eg. `https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0`). +Some DSL2 modules have container addresses for docker (eg. `biocontainers/fastqc:0.11.9--0`) and also URLs for direct downloads of a Singularity continaer (eg. `https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0`). Where both are found, the download URL is preferred. Once a full list of containers is found, they are processed in the following order: @@ -608,26 +609,37 @@ If you want to add a parameter to the schema, you first have to add the paramete The graphical interface is oganzised in groups and within the groups the single parameters are stored. For a better overview you can collapse all groups with the `Collapse groups` button, then your new parameters will be the only remaining one at the bottom of the page. Now you can either create a new group with the `Add group` button or drag and drop the paramters in an existing group. Therefor the group has to be expanded. The group title will be displayed, if you run your pipeline with the `--help` flag and its description apears on the parameter page of your pipeline. -Now you can start to change the parameter itself. The description is a short explanation about the parameter, that apears if you run your pipeline with the `--help` flag. By clicking on the dictionary icon you can add a longer explanation for the parameter page of your pipeline. If you want to specify some conditions for your parameter, like the file extension, you can use the nut icon to open the settings. This menu depends on the `type` you assigned to your parameter. For intergers you can define a min and max value, and for strings the file extension can be specified. +Now you can start to change the parameter itself. The `ID` of a new parameter should be defined in small letters without whitespaces. The description is a short free text explanation about the parameter, that appears if you run your pipeline with the `--help` flag. By clicking on the dictionary icon you can add a longer explanation for the parameter page of your pipeline. Usually, they contain a small paragraph about the parameter settings or a used datasource, like databases or references. If you want to specify some conditions for your parameter, like the file extension, you can use the nut icon to open the settings. This menu depends on the `type` you assigned to your parameter. For integers you can define a min and max value, and for strings the file extension can be specified. + +The `type` field is one of the most important points in your pipeline schema, since it defines the datatype of your input and how it will be interpreted. This allows extensive testing prior to starting the pipeline. + +The basic datatypes for a pipeline schema are: + +- `string` +- `number` +- `integer` +- `boolean` + +For the `string` type you have three different options in the settings (nut icon): `enumerated values`, `pattern` and `format`. The first option, `enumerated values`, allows you to specify a list of specific input values. The list has to be separated with a pipe. The `pattern` and `format` settings can depend on each other. The `format` has to be either a directory or a file path. Depending on the `format` setting selected, specifying the `pattern` setting can be the most efficient and time saving option, especially for `file paths`. The `number` and `integer` types share the same settings. Similarly to `string`, there is an `enumerated values` option with the possibility of specifying a `min` and `max` value. For the `boolean` there is no further settings and the default value is usually `false`. The `boolean` value can be switched to `true` by adding the flag to the command. This parameter type is often used to skip specific sections of a pipeline. -After you filled your schema, click on the `Finished` button in the top rigth corner, this will automatically update your `nextflow_schema.json`. If this is not working you can copy the schema from the graphical interface and paste it in your `nextflow_schema.json` file. +After filling the schema, click on the `Finished` button in the top right corner, this will automatically update your `nextflow_schema.json`. If this is not working, the schema can be copied from the graphical interface and pasted in your `nextflow_schema.json` file. ### Update existing pipeline schema -Important for the update of a pipeline schema is, that if you want to change the default value of a parameter, you should change it in the `nextflow.config` file, since the value in the config file overwrites the value in the pipeline schema. To change any other parameter use `nf-core schema build --web-only` to open the graphical interface without rebuilding the pipeline schema. Now, you can change your parameters as mentioned above but keep in mind that changing the parameter datatype is depending on the default value you specified in the `nextflow.config` file. +It's important to change the default value of a parameter in the `nextflow.config` file first and then in the pipeline schema, because the value in the config file overwrites the value in the pipeline schema. To change any other parameter use `nf-core schema build --web-only` to open the graphical interface without rebuilding the pipeline schema. Now, the parameters can be changed as mentioned above but keep in mind that changing the parameter datatype depends on the default value specified in the `nextflow.config` file. ### Linting a pipeline schema The pipeline schema is linted as part of the main pipeline `nf-core lint` command, however sometimes it can be useful to quickly check the syntax of the JSONSchema without running a full lint run. -Usage is `nf-core schema lint `, eg: +Usage is `nf-core schema lint ` (defaulting to `nextflow_schema.json`), eg: -![`nf-core schema lint nextflow_schema.json`](docs/images/nf-core-schema-lint.svg) +![`nf-core schema lint`](docs/images/nf-core-schema-lint.svg) ## Bumping a pipeline version number @@ -959,17 +971,6 @@ bump-versions: star/align: "2.6.1d" ``` -### Generate the name for a multi-tool container image - -When you want to use an image of a multi-tool container and you know the specific dependencies and their versions of that container, for example, by looking them up in the [BioContainers hash.tsv](https://github.com/BioContainers/multi-package-containers/blob/master/combinations/hash.tsv), you can use the `nf-core modules mulled` helper tool. This tool generates the name of a BioContainers mulled image. - - - -![`nf-core modules mulled pysam==0.16.0.1 biopython==1.78`](docs/images/nf-core-modules-mulled.svg) - ## Subworkflows After the launch of nf-core modules, we can provide now also nf-core subworkflows to fully utilize the power of DSL2 modularization. @@ -1030,6 +1031,8 @@ To list subworkflows installed in a local pipeline directory you can use `nf-cor @@ -1043,6 +1046,8 @@ This shows documentation about the subworkflow on the command line, similar to w ![`nf-core subworkflows info bam_rseqc`](docs/images/nf-core-subworkflows-info.svg) @@ -1054,6 +1059,8 @@ A subworkflow installed this way will be installed to the `./subworkflows/nf-cor ![`nf-core subworkflows install bam_rseqc`](docs/images/nf-core-subworkflows-install.svg) @@ -1073,6 +1080,8 @@ You can update subworkflows installed from a remote repository in your pipeline ![`nf-core subworkflows update --all --no-preview`](docs/images/nf-core-subworkflows-update.svg) @@ -1132,6 +1141,8 @@ To delete a subworkflow from your pipeline, run `nf-core subworkflows remove`. ![`nf-core subworkflows remove bam_rseqc`](docs/images/nf-core-subworkflows-remove.svg) @@ -1160,10 +1171,10 @@ The `nf-core subworkflows create` command will prompt you with the relevant ques -![`cd modules && nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --label process_low --meta --force`](docs/images/nf-core-subworkflows-create.svg) +![`cd modules && nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force`](docs/images/nf-core-subworkflows-create.svg) ### Create a subworkflow test config file @@ -1172,7 +1183,7 @@ To help developers build new subworkflows, the `nf-core subworkflows create-test After you have written a minimal Nextflow script to test your subworkflow in `/tests/subworkflow//main.nf`, this command will run the tests for you and create the `/tests/subworkflow///test.yml` file. @@ -1186,7 +1197,7 @@ To run unit tests of a subworkflow that you have installed or the test created b You can specify the subworkflow name in the form TOOL/SUBTOOL in command line or provide it later by prompts. - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - + - - - - $ nf-core lint - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Testing pipeline: . - - -╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ - -pipeline_todos: pipeline_todos - -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - -╭─[!] 2 Module Test Warnings─────────────────────────────────────────────────────────────────────╮ -                                           ╷                          ╷                            -Module name                              File path               Test message              -╶──────────────────────────────────────────┼──────────────────────────┼──────────────────────────╴ -custom/dumpsoftwareversionsmodules/nf-core/custom/…New version available -multiqcmodules/nf-core/multiqc New version available -                                           ╵                          ╵                            -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -╭───────────────────────╮ -LINT RESULTS SUMMARY  -├───────────────────────┤ -[✔] 179 Tests Passed -[?]   1 Test Ignored -[!]   2 Test Warnings -[✗]   0 Tests Failed -╰───────────────────────╯ + + + + $ nf-core lint + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Testing pipeline: . + + +╭─[?] 1 Pipeline Test Ignored────────────────────────────────────────────────────────────────────╮ + +pipeline_todos: pipeline_todos + +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +╭─[!] 1 Pipeline Test Warning────────────────────────────────────────────────────────────────────╮ + +readme: README contains the placeholder zenodo.XXXXXXX. This should be replaced with the zenodo  +doi (after the first release). + +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ + +╭──────────────────────╮ +LINT RESULTS SUMMARY +├──────────────────────┤ +[✔] 183 Tests Passed +[?]   1 Test Ignored +[!]   1 Test Warning +[✗]   0 Tests Failed +╰──────────────────────╯ diff --git a/docs/images/nf-core-list-rna.svg b/docs/images/nf-core-list-rna.svg index 9af2b2b45b..0aa93b0d91 100644 --- a/docs/images/nf-core-list-rna.svg +++ b/docs/images/nf-core-list-rna.svg @@ -19,96 +19,96 @@ font-weight: 700; } - .terminal-2712691909-matrix { + .terminal-38953927-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2712691909-title { + .terminal-38953927-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2712691909-r1 { fill: #c5c8c6 } -.terminal-2712691909-r2 { fill: #98a84b } -.terminal-2712691909-r3 { fill: #9a9b99 } -.terminal-2712691909-r4 { fill: #608ab1 } -.terminal-2712691909-r5 { fill: #d0b344 } -.terminal-2712691909-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-2712691909-r7 { fill: #868887 } + .terminal-38953927-r1 { fill: #c5c8c6 } +.terminal-38953927-r2 { fill: #98a84b } +.terminal-38953927-r3 { fill: #9a9b99 } +.terminal-38953927-r4 { fill: #608ab1 } +.terminal-38953927-r5 { fill: #d0b344 } +.terminal-38953927-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-38953927-r7 { fill: #868887 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -120,32 +120,32 @@ - + - - $ nf-core list rna rna-seq - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ -Pipeline Name       StarsLatest Release   ReleasedLast PulledHave latest release? -┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ -│ smrnaseq             │    49 │          2.2.0 │   yesterday │           - │ -                    │ -│ rnafusion            │    95 │          2.3.4 │   yesterday │           - │ -                    │ -│ rnaseq               │   604 │         3.11.2 │  2 days ago │           - │ -                    │ -│ dualrnaseq           │    12 │          1.0.0 │ 2 years ago │           - │ -                    │ -│ circrna              │    27 │            dev │           - │           - │ -                    │ -│ lncpipe              │    25 │            dev │           - │           - │ -                    │ -│ scflow               │    19 │            dev │           - │           - │ -                    │ -│ spatialtranscriptom… │    19 │            dev │           - │           - │ -                    │ -└──────────────────────┴───────┴────────────────┴─────────────┴─────────────┴──────────────────────┘ + + $ nf-core list rna rna-seq + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━┓ +Pipeline Name       StarsLatest Release   ReleasedLast PulledHave latest release? +┡━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━┩ +│ smrnaseq             │    49 │          2.2.0 │   yesterday │           - │ -                    │ +│ rnafusion            │    95 │          2.3.4 │  2 days ago │           - │ -                    │ +│ rnaseq               │   604 │         3.11.2 │  3 days ago │           - │ -                    │ +│ dualrnaseq           │    12 │          1.0.0 │ 2 years ago │           - │ -                    │ +│ circrna              │    27 │            dev │           - │           - │ -                    │ +│ lncpipe              │    25 │            dev │           - │           - │ -                    │ +│ scflow               │    19 │            dev │           - │           - │ -                    │ +│ spatialtranscriptom… │    19 │            dev │           - │           - │ -                    │ +└──────────────────────┴───────┴────────────────┴─────────────┴─────────────┴──────────────────────┘ diff --git a/docs/images/nf-core-list-stars.svg b/docs/images/nf-core-list-stars.svg index 89a0cbaace..bd24375ed5 100644 --- a/docs/images/nf-core-list-stars.svg +++ b/docs/images/nf-core-list-stars.svg @@ -19,88 +19,88 @@ font-weight: 700; } - .terminal-1179384851-matrix { + .terminal-3741580213-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1179384851-title { + .terminal-3741580213-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1179384851-r1 { fill: #c5c8c6 } -.terminal-1179384851-r2 { fill: #98a84b } -.terminal-1179384851-r3 { fill: #9a9b99 } -.terminal-1179384851-r4 { fill: #608ab1 } -.terminal-1179384851-r5 { fill: #d0b344 } -.terminal-1179384851-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-1179384851-r7 { fill: #868887 } -.terminal-1179384851-r8 { fill: #868887;font-style: italic; } + .terminal-3741580213-r1 { fill: #c5c8c6 } +.terminal-3741580213-r2 { fill: #98a84b } +.terminal-3741580213-r3 { fill: #9a9b99 } +.terminal-3741580213-r4 { fill: #608ab1 } +.terminal-3741580213-r5 { fill: #d0b344 } +.terminal-3741580213-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3741580213-r7 { fill: #868887 } +.terminal-3741580213-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -112,29 +112,29 @@ - + - - $ nf-core list -s stars - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ rnaseq              │   604 │         3.11.2 │    2 days ago │           - │ -                   │ -│ sarek               │   235 │          3.1.2 │  4 months ago │           - │ -                   │ -│ chipseq             │   144 │          2.0.0 │  7 months ago │           - │ -                   │ -│ atacseq             │   134 │            2.0 │  5 months ago │           - │ -                   │ -[..truncated..] + + $ nf-core list -s stars + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ rnaseq              │   604 │         3.11.2 │    3 days ago │           - │ -                   │ +│ sarek               │   235 │          3.1.2 │  4 months ago │           - │ -                   │ +│ chipseq             │   144 │          2.0.0 │  7 months ago │           - │ -                   │ +│ atacseq             │   134 │            2.0 │  5 months ago │           - │ -                   │ +[..truncated..] diff --git a/docs/images/nf-core-list.svg b/docs/images/nf-core-list.svg index 221ab2ada0..3c4a4cd4df 100644 --- a/docs/images/nf-core-list.svg +++ b/docs/images/nf-core-list.svg @@ -19,91 +19,91 @@ font-weight: 700; } - .terminal-938707336-matrix { + .terminal-3979640600-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-938707336-title { + .terminal-3979640600-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-938707336-r1 { fill: #c5c8c6 } -.terminal-938707336-r2 { fill: #98a84b } -.terminal-938707336-r3 { fill: #9a9b99 } -.terminal-938707336-r4 { fill: #608ab1 } -.terminal-938707336-r5 { fill: #d0b344 } -.terminal-938707336-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-938707336-r7 { fill: #868887 } -.terminal-938707336-r8 { fill: #868887;font-style: italic; } + .terminal-3979640600-r1 { fill: #c5c8c6 } +.terminal-3979640600-r2 { fill: #98a84b } +.terminal-3979640600-r3 { fill: #9a9b99 } +.terminal-3979640600-r4 { fill: #608ab1 } +.terminal-3979640600-r5 { fill: #d0b344 } +.terminal-3979640600-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3979640600-r7 { fill: #868887 } +.terminal-3979640600-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -115,30 +115,30 @@ - + - - $ nf-core list - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ -Have latest         -Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ -│ smrnaseq            │    49 │          2.2.0 │     yesterday │           - │ -                   │ -│ rnafusion           │    95 │          2.3.4 │     yesterday │           - │ -                   │ -│ rnaseq              │   604 │         3.11.2 │    2 days ago │           - │ -                   │ -│ demultiplex         │    25 │          1.2.0 │    2 days ago │           - │ -                   │ -│ differentialabunda… │    19 │          1.2.0 │    1 week ago │           - │ -                   │ -[..truncated..] + + $ nf-core list + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━┳━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┓ +Have latest         +Pipeline Name      StarsLatest Release     ReleasedLast Pulledrelease?            +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━╇━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━┩ +│ funcscan            │    29 │          1.1.0 │  14 hours ago │           - │ -                   │ +│ smrnaseq            │    49 │          2.2.0 │     yesterday │           - │ -                   │ +│ rnafusion           │    95 │          2.3.4 │    2 days ago │           - │ -                   │ +│ rnaseq              │   604 │         3.11.2 │    3 days ago │           - │ -                   │ +│ demultiplex         │    25 │          1.2.0 │    3 days ago │           - │ -                   │ +[..truncated..] diff --git a/docs/images/nf-core-modules-bump-version.svg b/docs/images/nf-core-modules-bump-version.svg index feb044916e..0a9094ad26 100644 --- a/docs/images/nf-core-modules-bump-version.svg +++ b/docs/images/nf-core-modules-bump-version.svg @@ -19,61 +19,61 @@ font-weight: 700; } - .terminal-191281607-matrix { + .terminal-3754932584-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-191281607-title { + .terminal-3754932584-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-191281607-r1 { fill: #c5c8c6 } -.terminal-191281607-r2 { fill: #98a84b } -.terminal-191281607-r3 { fill: #9a9b99 } -.terminal-191281607-r4 { fill: #608ab1 } -.terminal-191281607-r5 { fill: #d0b344 } + .terminal-3754932584-r1 { fill: #c5c8c6 } +.terminal-3754932584-r2 { fill: #98a84b } +.terminal-3754932584-r3 { fill: #9a9b99 } +.terminal-3754932584-r4 { fill: #608ab1 } +.terminal-3754932584-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + @@ -85,21 +85,21 @@ - + - - $ nf-core modules bump-versions fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - + + $ nf-core modules bump-versions fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + diff --git a/docs/images/nf-core-modules-create-test.svg b/docs/images/nf-core-modules-create-test.svg index 7df385f093..e49f6dcffa 100644 --- a/docs/images/nf-core-modules-create-test.svg +++ b/docs/images/nf-core-modules-create-test.svg @@ -19,84 +19,84 @@ font-weight: 700; } - .terminal-1818600303-matrix { + .terminal-1372103280-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1818600303-title { + .terminal-1372103280-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1818600303-r1 { fill: #c5c8c6 } -.terminal-1818600303-r2 { fill: #98a84b } -.terminal-1818600303-r3 { fill: #9a9b99 } -.terminal-1818600303-r4 { fill: #608ab1 } -.terminal-1818600303-r5 { fill: #d0b344 } -.terminal-1818600303-r6 { fill: #ff2c7a } -.terminal-1818600303-r7 { fill: #98729f } + .terminal-1372103280-r1 { fill: #c5c8c6 } +.terminal-1372103280-r2 { fill: #98a84b } +.terminal-1372103280-r3 { fill: #9a9b99 } +.terminal-1372103280-r4 { fill: #608ab1 } +.terminal-1372103280-r5 { fill: #d0b344 } +.terminal-1372103280-r6 { fill: #ff2c7a } +.terminal-1372103280-r7 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -108,28 +108,28 @@ - + - - $ nf-core modules create-test-yml fastqc --no-prompts --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Looking for test workflow entry points: 'tests/modules/nf-core/fastqc/main.nf' -──────────────────────────────────────────────────────────────────────────────────────────────────── -INFO     Building test meta for entry point 'test_fastqc_paired_end' -INFO     Running 'fastqc' test with command:                                                         -nextflow run ./tests/modules/nf-core/fastqc -entry test_fastqc_paired_end -c  -./tests/config/nextflow.config -c ./tests/modules/nf-core/fastqc/nextflow.config --outdir  -/tmp/tmpfvwrxyfa -work-dir /tmp/tmph09ncxux + + $ nf-core modules create-test-yml fastqc --no-prompts --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Looking for test workflow entry points: 'tests/modules/nf-core/fastqc/main.nf' +──────────────────────────────────────────────────────────────────────────────────────────────────── +INFO     Building test meta for entry point 'test_fastqc_paired_end' +INFO     Running 'fastqc' test with command:                                                         +nextflow run ./tests/modules/nf-core/fastqc -entry test_fastqc_paired_end -c  +./tests/config/nextflow.config -c ./tests/modules/nf-core/fastqc/nextflow.config --outdir  +/tmp/tmpnqfm1ogi -work-dir /tmp/tmpcv36s2sh diff --git a/docs/images/nf-core-modules-create.svg b/docs/images/nf-core-modules-create.svg index 4a17a62f41..3a97b353d1 100644 --- a/docs/images/nf-core-modules-create.svg +++ b/docs/images/nf-core-modules-create.svg @@ -19,74 +19,74 @@ font-weight: 700; } - .terminal-3009293582-matrix { + .terminal-2173119663-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3009293582-title { + .terminal-2173119663-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3009293582-r1 { fill: #c5c8c6 } -.terminal-3009293582-r2 { fill: #98a84b } -.terminal-3009293582-r3 { fill: #9a9b99 } -.terminal-3009293582-r4 { fill: #608ab1 } -.terminal-3009293582-r5 { fill: #d0b344 } -.terminal-3009293582-r6 { fill: #68a0b3;font-weight: bold } + .terminal-2173119663-r1 { fill: #c5c8c6 } +.terminal-2173119663-r2 { fill: #98a84b } +.terminal-2173119663-r3 { fill: #9a9b99 } +.terminal-2173119663-r4 { fill: #608ab1 } +.terminal-2173119663-r5 { fill: #d0b344 } +.terminal-2173119663-r6 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -98,25 +98,25 @@ - + - - $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Repository type: modules -INFO    Press enter to use default values (shown in brackets)or type your own responses.  -ctrl+click underlined text to open links. -INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' + + $ nf-core modules create fastqc --author @nf-core-bot  --label process_low --meta --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Repository type: modules +INFO    Press enter to use default values (shown in brackets)or type your own responses.  +ctrl+click underlined text to open links. +INFO     Using Bioconda package: 'bioconda::fastqc=0.12.1' diff --git a/docs/images/nf-core-modules-info.svg b/docs/images/nf-core-modules-info.svg index 320672f91a..5bd142d9aa 100644 --- a/docs/images/nf-core-modules-info.svg +++ b/docs/images/nf-core-modules-info.svg @@ -19,163 +19,163 @@ font-weight: 700; } - .terminal-3462197848-matrix { + .terminal-957411833-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3462197848-title { + .terminal-957411833-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3462197848-r1 { fill: #c5c8c6 } -.terminal-3462197848-r2 { fill: #98a84b } -.terminal-3462197848-r3 { fill: #9a9b99 } -.terminal-3462197848-r4 { fill: #608ab1 } -.terminal-3462197848-r5 { fill: #d0b344 } -.terminal-3462197848-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3462197848-r7 { fill: #98a84b;font-weight: bold } -.terminal-3462197848-r8 { fill: #868887 } -.terminal-3462197848-r9 { fill: #d08442 } -.terminal-3462197848-r10 { fill: #868887;font-style: italic; } -.terminal-3462197848-r11 { fill: #98729f } + .terminal-957411833-r1 { fill: #c5c8c6 } +.terminal-957411833-r2 { fill: #98a84b } +.terminal-957411833-r3 { fill: #9a9b99 } +.terminal-957411833-r4 { fill: #608ab1 } +.terminal-957411833-r5 { fill: #d0b344 } +.terminal-957411833-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-957411833-r7 { fill: #98a84b;font-weight: bold } +.terminal-957411833-r8 { fill: #868887 } +.terminal-957411833-r9 { fill: #d08442 } +.terminal-957411833-r10 { fill: #868887;font-style: italic; } +.terminal-957411833-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -187,53 +187,53 @@ - + - - $ nf-core modules info abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 🔧 Tools: abacas                                                                                 │ -│ 📖 Description: contiguate draft genome assembly                                                 │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                  ╷                                                                   ╷              -📥 Inputs        Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} -                  ╵                                                                   ╵              -                  ╷                                                                   ╷              -📤 Outputs       Description                                                             Pattern -╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ - meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ -                  │single_end:false ]                                                 │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - results  (files)│List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* -                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ -                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ -                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ -                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ -╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ - versions  (file)│File containing software versions                                  │versions.yml -                  ╵                                                                   ╵              - - 💻  Installation command: nf-core modules install abacas - + + $ nf-core modules info abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +╭─ Module: abacas  ────────────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ +│ 🔧 Tools: abacas                                                                                 │ +│ 📖 Description: contiguate draft genome assembly                                                 │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +                  ╷                                                                   ╷              +📥 Inputs        Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + scaffold  (file)│Fasta file containing scaffold                                     │*.{fasta,fa} +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + fasta  (file)   │FASTA reference file                                               │*.{fasta,fa} +                  ╵                                                                   ╵              +                  ╷                                                                   ╷              +📤 Outputs       Description                                                             Pattern +╺━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━╸ + meta  (map)     │Groovy Map containing sample information e.g. [ id:'test',         │ +                  │single_end:false ]                                                 │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + results  (files)│List containing abacas output files [ 'test.abacas.bin',           │ *.{abacas}* +                  │'test.abacas.fasta', 'test.abacas.gaps', 'test.abacas.gaps.tab',   │ +                  │'test.abacas.nucmer.delta', 'test.abacas.nucmer.filtered.delta',   │ +                  │'test.abacas.nucmer.tiling', 'test.abacas.tab',                    │ +                  │'test.abacas.unused.contigs.out', 'test.abacas.MULTIFASTA.fa' ]    │ +╶─────────────────┼───────────────────────────────────────────────────────────────────┼────────────╴ + versions  (file)│File containing software versions                                  │versions.yml +                  ╵                                                                   ╵              + + 💻  Installation command: nf-core modules install abacas + diff --git a/docs/images/nf-core-modules-install.svg b/docs/images/nf-core-modules-install.svg index 6fb029df0a..f2869ed343 100644 --- a/docs/images/nf-core-modules-install.svg +++ b/docs/images/nf-core-modules-install.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-3957689443-matrix { + .terminal-2146405380-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3957689443-title { + .terminal-2146405380-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3957689443-r1 { fill: #c5c8c6 } -.terminal-3957689443-r2 { fill: #98a84b } -.terminal-3957689443-r3 { fill: #9a9b99 } -.terminal-3957689443-r4 { fill: #608ab1 } -.terminal-3957689443-r5 { fill: #d0b344 } + .terminal-2146405380-r1 { fill: #c5c8c6 } +.terminal-2146405380-r2 { fill: #98a84b } +.terminal-2146405380-r3 { fill: #9a9b99 } +.terminal-2146405380-r4 { fill: #608ab1 } +.terminal-2146405380-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core modules install abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Installing 'abacas' -INFO     Use the following statement to include this module:                                         - - include { ABACAS } from '../modules/nf-core/abacas/main'                                            - + + $ nf-core modules install abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Installing 'abacas' +INFO     Use the following statement to include this module:                                         + + include { ABACAS } from '../modules/nf-core/abacas/main'                                            + diff --git a/docs/images/nf-core-modules-lint.svg b/docs/images/nf-core-modules-lint.svg index b39f7aa4fd..8f3b419489 100644 --- a/docs/images/nf-core-modules-lint.svg +++ b/docs/images/nf-core-modules-lint.svg @@ -19,94 +19,94 @@ font-weight: 700; } - .terminal-3722872946-matrix { + .terminal-3094906900-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3722872946-title { + .terminal-3094906900-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3722872946-r1 { fill: #c5c8c6 } -.terminal-3722872946-r2 { fill: #98a84b } -.terminal-3722872946-r3 { fill: #9a9b99 } -.terminal-3722872946-r4 { fill: #608ab1 } -.terminal-3722872946-r5 { fill: #d0b344 } -.terminal-3722872946-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-3722872946-r7 { fill: #98a84b;font-weight: bold } -.terminal-3722872946-r8 { fill: #cc555a } + .terminal-3094906900-r1 { fill: #c5c8c6 } +.terminal-3094906900-r2 { fill: #98a84b } +.terminal-3094906900-r3 { fill: #9a9b99 } +.terminal-3094906900-r4 { fill: #608ab1 } +.terminal-3094906900-r5 { fill: #d0b344 } +.terminal-3094906900-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-3094906900-r7 { fill: #98a84b;font-weight: bold } +.terminal-3094906900-r8 { fill: #cc555a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -118,31 +118,31 @@ - + - - $ nf-core modules lint multiqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Linting modules repo: '.' -INFO     Linting module: 'multiqc' - -╭───────────────────────╮ -LINT RESULTS SUMMARY -├───────────────────────┤ -[✔]  23 Tests Passed  -[!]   0 Test Warnings -[✗]   0 Tests Failed  -╰───────────────────────╯ + + $ nf-core modules lint multiqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Linting modules repo: '.' +INFO     Linting module: 'multiqc' + +╭───────────────────────╮ +LINT RESULTS SUMMARY +├───────────────────────┤ +[✔]  24 Tests Passed  +[!]   0 Test Warnings +[✗]   0 Tests Failed  +╰───────────────────────╯ diff --git a/docs/images/nf-core-modules-list-local.svg b/docs/images/nf-core-modules-list-local.svg index fab2cecf7e..3128f1caad 100644 --- a/docs/images/nf-core-modules-list-local.svg +++ b/docs/images/nf-core-modules-list-local.svg @@ -19,108 +19,108 @@ font-weight: 700; } - .terminal-2617511112-matrix { + .terminal-4212330781-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2617511112-title { + .terminal-4212330781-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2617511112-r1 { fill: #c5c8c6 } -.terminal-2617511112-r2 { fill: #98a84b } -.terminal-2617511112-r3 { fill: #9a9b99 } -.terminal-2617511112-r4 { fill: #608ab1 } -.terminal-2617511112-r5 { fill: #d0b344 } -.terminal-2617511112-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-2617511112-r7 { fill: #868887;font-style: italic; } + .terminal-4212330781-r1 { fill: #c5c8c6 } +.terminal-4212330781-r2 { fill: #98a84b } +.terminal-4212330781-r3 { fill: #9a9b99 } +.terminal-4212330781-r4 { fill: #608ab1 } +.terminal-4212330781-r5 { fill: #d0b344 } +.terminal-4212330781-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-4212330781-r7 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -132,36 +132,36 @@ - + - - $ nf-core modules list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Modules installed in '.':                                                                   - -┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ -Module Name        Repository        Version SHA        Message           Date       -┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ -│ custom/dumpsoftwar… │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ -│                     │                    │                     │ syntax for all     │            │ -│                     │                    │                     │ modules (#2654)    │            │ -│ fastqc              │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ -│                     │                    │                     │ syntax for all     │            │ -│                     │                    │                     │ modules (#2654)    │            │ -│ multiqc             │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ -│                     │                    │                     │ syntax for all     │            │ -│                     │                    │                     │ modules (#2654)    │            │ -[..truncated..] + + $ nf-core modules list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Modules installed in '.':                                                                   + +┏━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓ +Module Name        Repository        Version SHA        Message           Date       +┡━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩ +│ custom/dumpsoftwar… │ https://github.co… │ 76cc4938c1f6ea5c7d… │ give execution     │ 2023-04-28 │ +│                     │                    │                     │ permissions to     │            │ +│                     │                    │                     │ dumpsoftwareversi… │            │ +│                     │                    │                     │ (#3347)            │            │ +│ fastqc              │ https://github.co… │ c8e35eb2055c099720… │ Bulk change conda  │ 2022-12-13 │ +│                     │                    │                     │ syntax for all     │            │ +│                     │                    │                     │ modules (#2654)    │            │ +│ multiqc             │ https://github.co… │ f2d63bd5b68925f98f… │ fix meta.ymls for  │ 2023-04-28 │ +│                     │                    │                     │ dumpsoftware and   │            │ +[..truncated..] diff --git a/docs/images/nf-core-modules-list-remote.svg b/docs/images/nf-core-modules-list-remote.svg index 9373440820..4faf10450f 100644 --- a/docs/images/nf-core-modules-list-remote.svg +++ b/docs/images/nf-core-modules-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-3693960194-matrix { + .terminal-3737934755-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-3693960194-title { + .terminal-3737934755-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-3693960194-r1 { fill: #c5c8c6 } -.terminal-3693960194-r2 { fill: #98a84b } -.terminal-3693960194-r3 { fill: #9a9b99 } -.terminal-3693960194-r4 { fill: #608ab1 } -.terminal-3693960194-r5 { fill: #d0b344 } -.terminal-3693960194-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-3693960194-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-3693960194-r8 { fill: #868887;font-style: italic; } + .terminal-3737934755-r1 { fill: #c5c8c6 } +.terminal-3737934755-r2 { fill: #98a84b } +.terminal-3737934755-r3 { fill: #9a9b99 } +.terminal-3737934755-r4 { fill: #608ab1 } +.terminal-3737934755-r5 { fill: #d0b344 } +.terminal-3737934755-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-3737934755-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-3737934755-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core modules list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Modules available from https://github.com/nf-core/modules.git(master):                     - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Module Name                                           -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ abacas                                                │ -│ abricate/run                                          │ -│ abricate/summary                                      │ -│ adapterremoval                                        │ -│ adapterremovalfixprefix                               │ -│ admixture                                             │ -│ affy/justrma                                          │ -│ agat/convertspgff2gtf                                 │ -│ agat/convertspgxf2gxf                                 │ -[..truncated..] + + $ nf-core modules list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Modules available from https://github.com/nf-core/modules.git(master):                     + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Module Name                                           +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ abacas                                                │ +│ abricate/run                                          │ +│ abricate/summary                                      │ +│ adapterremoval                                        │ +│ adapterremovalfixprefix                               │ +│ admixture                                             │ +│ affy/justrma                                          │ +│ agat/convertspgff2gtf                                 │ +│ agat/convertspgxf2gxf                                 │ +[..truncated..] diff --git a/docs/images/nf-core-modules-mulled.svg b/docs/images/nf-core-modules-mulled.svg deleted file mode 100644 index 2ac4b28f23..0000000000 --- a/docs/images/nf-core-modules-mulled.svg +++ /dev/null @@ -1,116 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - $ nf-core modules mulled pysam==0.16.0.1 biopython==1.78 - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -INFO     Found docker image on quay.io! ✨                                                           -INFO     Mulled container hash:                                                                      -mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f:185a25ca79923df85b58f42deb48f5ac4481e91f-0 - - - - diff --git a/docs/images/nf-core-modules-patch.svg b/docs/images/nf-core-modules-patch.svg index 729bb1d519..b0d8a66a95 100644 --- a/docs/images/nf-core-modules-patch.svg +++ b/docs/images/nf-core-modules-patch.svg @@ -19,65 +19,65 @@ font-weight: 700; } - .terminal-2827097699-matrix { + .terminal-277091844-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2827097699-title { + .terminal-277091844-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2827097699-r1 { fill: #c5c8c6 } -.terminal-2827097699-r2 { fill: #98a84b } -.terminal-2827097699-r3 { fill: #9a9b99 } -.terminal-2827097699-r4 { fill: #608ab1 } -.terminal-2827097699-r5 { fill: #d0b344 } -.terminal-2827097699-r6 { fill: #cc555a;font-weight: bold } + .terminal-277091844-r1 { fill: #c5c8c6 } +.terminal-277091844-r2 { fill: #98a84b } +.terminal-277091844-r3 { fill: #9a9b99 } +.terminal-277091844-r4 { fill: #608ab1 } +.terminal-277091844-r5 { fill: #d0b344 } +.terminal-277091844-r6 { fill: #cc555a;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -89,22 +89,22 @@ - + - - $ nf-core modules patch fastqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -ERROR    Module 'modules/nf-core/fastqc' is unchanged. No patch to compute                           + + $ nf-core modules patch fastqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +ERROR    Module 'modules/nf-core/fastqc' is unchanged. No patch to compute                           diff --git a/docs/images/nf-core-modules-remove.svg b/docs/images/nf-core-modules-remove.svg index ce72c00e13..fb1d3591ff 100644 --- a/docs/images/nf-core-modules-remove.svg +++ b/docs/images/nf-core-modules-remove.svg @@ -19,64 +19,64 @@ font-weight: 700; } - .terminal-2450992562-matrix { + .terminal-3673042259-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2450992562-title { + .terminal-3673042259-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2450992562-r1 { fill: #c5c8c6 } -.terminal-2450992562-r2 { fill: #98a84b } -.terminal-2450992562-r3 { fill: #9a9b99 } -.terminal-2450992562-r4 { fill: #608ab1 } -.terminal-2450992562-r5 { fill: #d0b344 } + .terminal-3673042259-r1 { fill: #c5c8c6 } +.terminal-3673042259-r2 { fill: #98a84b } +.terminal-3673042259-r3 { fill: #9a9b99 } +.terminal-3673042259-r4 { fill: #608ab1 } +.terminal-3673042259-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -88,22 +88,22 @@ - + - - $ nf-core modules remove abacas - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Removed files for 'abacas' and it's dependencies 'abacas'.                                  + + $ nf-core modules remove abacas + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Removed files for 'abacas' and it's dependencies 'abacas'.                                  diff --git a/docs/images/nf-core-modules-test.svg b/docs/images/nf-core-modules-test.svg index c11314a7d1..83f6535612 100644 --- a/docs/images/nf-core-modules-test.svg +++ b/docs/images/nf-core-modules-test.svg @@ -19,67 +19,67 @@ font-weight: 700; } - .terminal-424201008-matrix { + .terminal-110742225-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-424201008-title { + .terminal-110742225-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-424201008-r1 { fill: #c5c8c6 } -.terminal-424201008-r2 { fill: #98a84b } -.terminal-424201008-r3 { fill: #9a9b99 } -.terminal-424201008-r4 { fill: #608ab1 } -.terminal-424201008-r5 { fill: #d0b344 } + .terminal-110742225-r1 { fill: #c5c8c6 } +.terminal-110742225-r2 { fill: #98a84b } +.terminal-110742225-r3 { fill: #9a9b99 } +.terminal-110742225-r4 { fill: #608ab1 } +.terminal-110742225-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -91,23 +91,23 @@ - + - - $ nf-core modules test samtools/view --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -────────────────────────────────────────── samtools/view ─────────────────────────────────────────── -INFO     Running pytest for module 'samtools/view' + + $ nf-core modules test samtools/view --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +────────────────────────────────────────── samtools/view ─────────────────────────────────────────── +INFO     Running pytest for module 'samtools/view' diff --git a/docs/images/nf-core-modules-update.svg b/docs/images/nf-core-modules-update.svg index 8e6f3baf35..a41cbbeec4 100644 --- a/docs/images/nf-core-modules-update.svg +++ b/docs/images/nf-core-modules-update.svg @@ -19,76 +19,76 @@ font-weight: 700; } - .terminal-4072425894-matrix { + .terminal-3947254567-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-4072425894-title { + .terminal-3947254567-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-4072425894-r1 { fill: #c5c8c6 } -.terminal-4072425894-r2 { fill: #98a84b } -.terminal-4072425894-r3 { fill: #9a9b99 } -.terminal-4072425894-r4 { fill: #608ab1 } -.terminal-4072425894-r5 { fill: #d0b344 } + .terminal-3947254567-r1 { fill: #c5c8c6 } +.terminal-3947254567-r2 { fill: #98a84b } +.terminal-3947254567-r3 { fill: #9a9b99 } +.terminal-3947254567-r4 { fill: #608ab1 } +.terminal-3947254567-r5 { fill: #d0b344 } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -100,26 +100,26 @@ - + - - $ nf-core modules update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO    'modules/nf-core/abacas' is already up to date                                              -INFO     Updating 'nf-core/custom/dumpsoftwareversions' -INFO    'modules/nf-core/fastqc' is already up to date                                              -INFO     Updating 'nf-core/multiqc' -INFO     Updates complete ✨                                                                         + + $ nf-core modules update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO    'modules/nf-core/abacas' is already up to date                                              +INFO    'modules/nf-core/custom/dumpsoftwareversions' is already up to date                         +INFO    'modules/nf-core/fastqc' is already up to date                                              +INFO    'modules/nf-core/multiqc' is already up to date                                             +INFO     Updates complete ✨                                                                         diff --git a/docs/images/nf-core-schema-build.svg b/docs/images/nf-core-schema-build.svg index ac95ebba84..5fa9746d5a 100644 --- a/docs/images/nf-core-schema-build.svg +++ b/docs/images/nf-core-schema-build.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-1833749233-matrix { + .terminal-3395078802-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1833749233-title { + .terminal-3395078802-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1833749233-r1 { fill: #c5c8c6 } -.terminal-1833749233-r2 { fill: #98a84b } -.terminal-1833749233-r3 { fill: #9a9b99 } -.terminal-1833749233-r4 { fill: #608ab1 } -.terminal-1833749233-r5 { fill: #d0b344 } -.terminal-1833749233-r6 { fill: #98a84b;font-weight: bold } -.terminal-1833749233-r7 { fill: #868887;font-weight: bold } -.terminal-1833749233-r8 { fill: #868887 } -.terminal-1833749233-r9 { fill: #4e707b;font-weight: bold } -.terminal-1833749233-r10 { fill: #68a0b3;font-weight: bold } + .terminal-3395078802-r1 { fill: #c5c8c6 } +.terminal-3395078802-r2 { fill: #98a84b } +.terminal-3395078802-r3 { fill: #9a9b99 } +.terminal-3395078802-r4 { fill: #608ab1 } +.terminal-3395078802-r5 { fill: #d0b344 } +.terminal-3395078802-r6 { fill: #98a84b;font-weight: bold } +.terminal-3395078802-r7 { fill: #868887;font-weight: bold } +.terminal-3395078802-r8 { fill: #868887 } +.terminal-3395078802-r9 { fill: #4e707b;font-weight: bold } +.terminal-3395078802-r10 { fill: #68a0b3;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,23 +96,23 @@ - + - - $ nf-core schema build --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 30 params) -INFO     Writing schema with 31 params: './nextflow_schema.json' + + $ nf-core schema build --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 30 params) +INFO     Writing schema with 31 params: './nextflow_schema.json' diff --git a/docs/images/nf-core-schema-lint.svg b/docs/images/nf-core-schema-lint.svg index 79113e70ec..a5b8a94fd3 100644 --- a/docs/images/nf-core-schema-lint.svg +++ b/docs/images/nf-core-schema-lint.svg @@ -19,68 +19,68 @@ font-weight: 700; } - .terminal-1041388458-matrix { + .terminal-1690716924-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-1041388458-title { + .terminal-1690716924-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-1041388458-r1 { fill: #c5c8c6 } -.terminal-1041388458-r2 { fill: #98a84b } -.terminal-1041388458-r3 { fill: #9a9b99 } -.terminal-1041388458-r4 { fill: #608ab1 } -.terminal-1041388458-r5 { fill: #d0b344 } -.terminal-1041388458-r6 { fill: #98a84b;font-weight: bold } -.terminal-1041388458-r7 { fill: #868887;font-weight: bold } -.terminal-1041388458-r8 { fill: #868887 } -.terminal-1041388458-r9 { fill: #4e707b;font-weight: bold } + .terminal-1690716924-r1 { fill: #c5c8c6 } +.terminal-1690716924-r2 { fill: #98a84b } +.terminal-1690716924-r3 { fill: #9a9b99 } +.terminal-1690716924-r4 { fill: #608ab1 } +.terminal-1690716924-r5 { fill: #d0b344 } +.terminal-1690716924-r6 { fill: #98a84b;font-weight: bold } +.terminal-1690716924-r7 { fill: #868887;font-weight: bold } +.terminal-1690716924-r8 { fill: #868887 } +.terminal-1690716924-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -92,22 +92,22 @@ - + - - $ nf-core schema lint nextflow_schema.json - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 31 params) + + $ nf-core schema lint + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 31 params) diff --git a/docs/images/nf-core-schema-validate.svg b/docs/images/nf-core-schema-validate.svg index 3133ec66b8..0ec7e246aa 100644 --- a/docs/images/nf-core-schema-validate.svg +++ b/docs/images/nf-core-schema-validate.svg @@ -19,71 +19,71 @@ font-weight: 700; } - .terminal-2998337404-matrix { + .terminal-3313499933-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2998337404-title { + .terminal-3313499933-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2998337404-r1 { fill: #c5c8c6 } -.terminal-2998337404-r2 { fill: #98a84b } -.terminal-2998337404-r3 { fill: #9a9b99 } -.terminal-2998337404-r4 { fill: #608ab1 } -.terminal-2998337404-r5 { fill: #d0b344 } -.terminal-2998337404-r6 { fill: #98a84b;font-weight: bold } -.terminal-2998337404-r7 { fill: #868887;font-weight: bold } -.terminal-2998337404-r8 { fill: #868887 } -.terminal-2998337404-r9 { fill: #4e707b;font-weight: bold } + .terminal-3313499933-r1 { fill: #c5c8c6 } +.terminal-3313499933-r2 { fill: #98a84b } +.terminal-3313499933-r3 { fill: #9a9b99 } +.terminal-3313499933-r4 { fill: #608ab1 } +.terminal-3313499933-r5 { fill: #d0b344 } +.terminal-3313499933-r6 { fill: #98a84b;font-weight: bold } +.terminal-3313499933-r7 { fill: #868887;font-weight: bold } +.terminal-3313499933-r8 { fill: #868887 } +.terminal-3313499933-r9 { fill: #4e707b;font-weight: bold } - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -95,23 +95,23 @@ - + - - $ nf-core schema validate nf-core-rnaseq/workflow nf-params.json - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -INFO    [] Default parameters match schema validation -INFO    [] Pipeline schema looks valid(found 93 params) -INFO    [] Input parameters look valid + + $ nf-core schema validate nf-core-rnaseq/workflow nf-params.json + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + +INFO    [] Default parameters match schema validation +INFO    [] Pipeline schema looks valid(found 93 params) +INFO    [] Input parameters look valid diff --git a/docs/images/nf-core-subworkflows-create-test.svg b/docs/images/nf-core-subworkflows-create-test.svg index 5e8841a50e..074b32dd15 100644 --- a/docs/images/nf-core-subworkflows-create-test.svg +++ b/docs/images/nf-core-subworkflows-create-test.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core subworkflows create-test-yml bam_stats_samtools --no-prompts --force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -WARNING 'repository_type' not defined in .nf-core.yml                                               -Warning: Input is not a terminal (fd=0). -? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - » Pipeline -   nf-core/modules - - - -        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - -Aborted. + + $ nf-core subworkflows create-test-yml bam_stats_samtools --no-prompts --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Looking for test workflow entry points:                                                     +'tests/subworkflows/nf-core/bam_stats_samtools/main.nf' +──────────────────────────────────────────────────────────────────────────────────────────────────── +INFO     Building test meta for entry point 'test_bam_stats_samtools' +INFO     Running 'bam_stats_samtools' test with command:                                             +nextflow run ./tests/subworkflows/nf-core/bam_stats_samtools -entry test_bam_stats_samtools +-c ./tests/config/nextflow.config --outdir /tmp/tmp4iszml15 -work-dir /tmp/tmp1r4spwkd diff --git a/docs/images/nf-core-subworkflows-create.svg b/docs/images/nf-core-subworkflows-create.svg index 763a5a5d94..215fce11c0 100644 --- a/docs/images/nf-core-subworkflows-create.svg +++ b/docs/images/nf-core-subworkflows-create.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + + + + + + + - + - + - - $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot  --label process_low --meta  ---force - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -Usage: nf-core subworkflows create [OPTIONS] subworkflow name                                       - -Try 'nf-core subworkflows create -h' for help. -╭─ Error ──────────────────────────────────────────────────────────────────────────────────────────╮ - No such option: --label Did you mean --help?                                                      -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ - + + $ nf-core subworkflows create bam_stats_samtools --author @nf-core-bot --force + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Repository type: modules +INFO    Press enter to use default values (shown in brackets)or type your own responses.  +ctrl+click underlined text to open links. +INFO     Created / edited following files:                                                           +           ./subworkflows/nf-core/bam_stats_samtools/main.nf +           ./subworkflows/nf-core/bam_stats_samtools/meta.yml +           ./tests/subworkflows/nf-core/bam_stats_samtools/main.nf +           ./tests/subworkflows/nf-core/bam_stats_samtools/test.yml +           ./tests/subworkflows/nf-core/bam_stats_samtools/nextflow.config +           ./tests/config/pytest_modules.yml diff --git a/docs/images/nf-core-subworkflows-info.svg b/docs/images/nf-core-subworkflows-info.svg index dd5a6e51f7..bace90622f 100644 --- a/docs/images/nf-core-subworkflows-info.svg +++ b/docs/images/nf-core-subworkflows-info.svg @@ -19,304 +19,304 @@ font-weight: 700; } - .terminal-2200225568-matrix { + .terminal-4158441153-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-2200225568-title { + .terminal-4158441153-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-2200225568-r1 { fill: #c5c8c6 } -.terminal-2200225568-r2 { fill: #98a84b } -.terminal-2200225568-r3 { fill: #9a9b99 } -.terminal-2200225568-r4 { fill: #608ab1 } -.terminal-2200225568-r5 { fill: #d0b344 } -.terminal-2200225568-r6 { fill: #c5c8c6;font-weight: bold } -.terminal-2200225568-r7 { fill: #98a84b;font-weight: bold } -.terminal-2200225568-r8 { fill: #868887 } -.terminal-2200225568-r9 { fill: #d08442 } -.terminal-2200225568-r10 { fill: #868887;font-style: italic; } -.terminal-2200225568-r11 { fill: #98729f } + .terminal-4158441153-r1 { fill: #c5c8c6 } +.terminal-4158441153-r2 { fill: #98a84b } +.terminal-4158441153-r3 { fill: #9a9b99 } +.terminal-4158441153-r4 { fill: #608ab1 } +.terminal-4158441153-r5 { fill: #d0b344 } +.terminal-4158441153-r6 { fill: #c5c8c6;font-weight: bold } +.terminal-4158441153-r7 { fill: #98a84b;font-weight: bold } +.terminal-4158441153-r8 { fill: #868887 } +.terminal-4158441153-r9 { fill: #d08442 } +.terminal-4158441153-r10 { fill: #868887;font-style: italic; } +.terminal-4158441153-r11 { fill: #98729f } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -328,100 +328,100 @@ - + - - $ nf-core subworkflows info bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ -│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ -│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ -╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ -                       ╷                                                                   ╷         -📥 Inputs             Description                                                        Pattern -╺━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━╸ - meta  (map)          │Groovy Map containing sample information e.g. [ id:'test',         │ -                       │single_end:false ]                                                 │ -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - bam  (file)          │BAM file to calculate statistics                                   │*.{bam} -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - bai  (file)          │Index for input BAM file                                           │*.{bai} -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - bed  (file)          │BED file for the reference gene model                              │*.{bed} -╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ - rseqc_modules  (list)│List of rseqc modules to run e.g. [ 'bam_stat', 'infer_experiment' │ -                       │]                                                                  │ -                       ╵                                                                   ╵         -                                     ╷                                   ╷                           -📤 Outputs                          Description                                          Pattern -╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━╸ - bamstat_txt  (file)                │bam statistics report              │           *.bam_stat.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_distance  (file)     │the inner distances                │     *.inner_distance.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_freq  (file)         │frequencies of different insert    │*.inner_distance_freq.txt -                                     │sizes                              │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_mean  (file)         │mean/median values of inner        │*.inner_distance_mean.txt -                                     │distances                          │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_pdf  (file)          │distribution plot of inner         │*.inner_distance_plot.pdf -                                     │distances                          │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - innerdistance_rscript  (file)      │script to reproduce the plot       │  *.inner_distance_plot.R -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - inferexperiment_txt  (file)        │infer_experiment results report    │   *.infer_experiment.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_bed  (file)     │bed file of annotated junctions    │           *.junction.bed -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_interact_bed   │Interact bed file                  │           *.Interact.bed -(file)                              │                                   │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_xls  (file)     │xls file with junction information │                    *.xls -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_pdf  (file)     │junction plot                      │           *.junction.pdf -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_events_pdf     │events plot                        │             *.events.pdf -(file)                              │                                   │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_rscript  (file) │Rscript to reproduce the plots     │                      *.r -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionannotation_log  (file)     │Log file generated by tool         │                    *.log -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionsaturation_pdf  (file)     │Junction saturation report         │                    *.pdf -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - junctionsaturation_rscript  (file) │Junction saturation R-script       │                      *.r -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readdistribution_txt  (file)       │the read distribution report       │  *.read_distribution.txt -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_seq_xls  (file)    │Read duplication rate determined   │         *seq.DupRate.xls -                                     │from mapping position of read      │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_pos_xls  (file)    │Read duplication rate determined   │         *pos.DupRate.xls -                                     │from sequence of read              │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_pdf  (file)        │plot of duplication rate           │                    *.pdf -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - readduplication_rscript  (file)    │script to reproduce the plot       │                      *.R -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - tin_txt  (file)                    │TXT file containing tin.py results │                    *.txt -                                     │summary                            │ -╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ - versions  (file)                   │File containing software versions  │             versions.yml -                                     ╵                                   ╵                           - - 💻  Installation command: nf-core subworkflows install bam_rseqc - + + $ nf-core subworkflows info bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +╭─ Subworkflow: bam_rseqc  ────────────────────────────────────────────────────────────────────────╮ +│ 🌐 Repository: https://github.com/nf-core/modules.git                                            │ +│ 📖 Description: Subworkflow to run multiple commands in the RSeqC package                        │ +╰──────────────────────────────────────────────────────────────────────────────────────────────────╯ +                       ╷                                                                   ╷         +📥 Inputs             Description                                                        Pattern +╺━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━╸ + meta  (map)          │Groovy Map containing sample information e.g. [ id:'test',         │ +                       │single_end:false ]                                                 │ +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + bam  (file)          │BAM file to calculate statistics                                   │*.{bam} +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + bai  (file)          │Index for input BAM file                                           │*.{bai} +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + bed  (file)          │BED file for the reference gene model                              │*.{bed} +╶──────────────────────┼───────────────────────────────────────────────────────────────────┼───────╴ + rseqc_modules  (list)│List of rseqc modules to run e.g. [ 'bam_stat', 'infer_experiment' │ +                       │]                                                                  │ +                       ╵                                                                   ╵         +                                     ╷                                   ╷                           +📤 Outputs                          Description                                          Pattern +╺━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━╸ + bamstat_txt  (file)                │bam statistics report              │           *.bam_stat.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_distance  (file)     │the inner distances                │     *.inner_distance.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_freq  (file)         │frequencies of different insert    │*.inner_distance_freq.txt +                                     │sizes                              │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_mean  (file)         │mean/median values of inner        │*.inner_distance_mean.txt +                                     │distances                          │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_pdf  (file)          │distribution plot of inner         │*.inner_distance_plot.pdf +                                     │distances                          │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + innerdistance_rscript  (file)      │script to reproduce the plot       │  *.inner_distance_plot.R +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + inferexperiment_txt  (file)        │infer_experiment results report    │   *.infer_experiment.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_bed  (file)     │bed file of annotated junctions    │           *.junction.bed +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_interact_bed   │Interact bed file                  │           *.Interact.bed +(file)                              │                                   │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_xls  (file)     │xls file with junction information │                    *.xls +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_pdf  (file)     │junction plot                      │           *.junction.pdf +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_events_pdf     │events plot                        │             *.events.pdf +(file)                              │                                   │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_rscript  (file) │Rscript to reproduce the plots     │                      *.r +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionannotation_log  (file)     │Log file generated by tool         │                    *.log +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionsaturation_pdf  (file)     │Junction saturation report         │                    *.pdf +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + junctionsaturation_rscript  (file) │Junction saturation R-script       │                      *.r +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readdistribution_txt  (file)       │the read distribution report       │  *.read_distribution.txt +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_seq_xls  (file)    │Read duplication rate determined   │         *seq.DupRate.xls +                                     │from mapping position of read      │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_pos_xls  (file)    │Read duplication rate determined   │         *pos.DupRate.xls +                                     │from sequence of read              │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_pdf  (file)        │plot of duplication rate           │                    *.pdf +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + readduplication_rscript  (file)    │script to reproduce the plot       │                      *.R +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + tin_txt  (file)                    │TXT file containing tin.py results │                    *.txt +                                     │summary                            │ +╶────────────────────────────────────┼───────────────────────────────────┼─────────────────────────╴ + versions  (file)                   │File containing software versions  │             versions.yml +                                     ╵                                   ╵                           + + 💻  Installation command: nf-core subworkflows install bam_rseqc + diff --git a/docs/images/nf-core-subworkflows-install.svg b/docs/images/nf-core-subworkflows-install.svg index a440ce839e..7363919195 100644 --- a/docs/images/nf-core-subworkflows-install.svg +++ b/docs/images/nf-core-subworkflows-install.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core subworkflows install bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -WARNING 'repository_type' not defined in .nf-core.yml                                               -Warning: Input is not a terminal (fd=0). -? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - » Pipeline -   nf-core/modules - - - -        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - -Aborted. + + $ nf-core subworkflows install bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Installing 'bam_rseqc' diff --git a/docs/images/nf-core-subworkflows-list-local.svg b/docs/images/nf-core-subworkflows-list-local.svg index 208e28e40e..dc2836367d 100644 --- a/docs/images/nf-core-subworkflows-list-local.svg +++ b/docs/images/nf-core-subworkflows-list-local.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core subworkflows list local - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -WARNING 'repository_type' not defined in .nf-core.yml                                               -Warning: Input is not a terminal (fd=0). -? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - » Pipeline -   nf-core/modules - - - -        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - -[..truncated..] + + $ nf-core subworkflows list local + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     No nf-core subworkflows found in '.' + diff --git a/docs/images/nf-core-subworkflows-list-remote.svg b/docs/images/nf-core-subworkflows-list-remote.svg index 63b14b923d..025ee22a18 100644 --- a/docs/images/nf-core-subworkflows-list-remote.svg +++ b/docs/images/nf-core-subworkflows-list-remote.svg @@ -19,109 +19,109 @@ font-weight: 700; } - .terminal-513821131-matrix { + .terminal-1279347052-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-513821131-title { + .terminal-1279347052-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-513821131-r1 { fill: #c5c8c6 } -.terminal-513821131-r2 { fill: #98a84b } -.terminal-513821131-r3 { fill: #9a9b99 } -.terminal-513821131-r4 { fill: #608ab1 } -.terminal-513821131-r5 { fill: #d0b344 } -.terminal-513821131-r6 { fill: #1984e9;text-decoration: underline; } -.terminal-513821131-r7 { fill: #c5c8c6;font-weight: bold } -.terminal-513821131-r8 { fill: #868887;font-style: italic; } + .terminal-1279347052-r1 { fill: #c5c8c6 } +.terminal-1279347052-r2 { fill: #98a84b } +.terminal-1279347052-r3 { fill: #9a9b99 } +.terminal-1279347052-r4 { fill: #608ab1 } +.terminal-1279347052-r5 { fill: #d0b344 } +.terminal-1279347052-r6 { fill: #1984e9;text-decoration: underline; } +.terminal-1279347052-r7 { fill: #c5c8c6;font-weight: bold } +.terminal-1279347052-r8 { fill: #868887;font-style: italic; } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -133,36 +133,36 @@ - + - - $ nf-core subworkflows list remote - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                - -┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ -Subworkflow Name                              -┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ -│ bam_create_som_pon_gatk                       │ -│ bam_dedup_stats_samtools_umitools             │ -│ bam_docounts_contamination_angsd              │ -│ bam_markduplicates_picard                     │ -│ bam_ngscheckmate                              │ -│ bam_qc_picard                                 │ -│ bam_rseqc                                     │ -│ bam_sort_stats_samtools                       │ -│ bam_split_by_region                           │ -[..truncated..] + + $ nf-core subworkflows list remote + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Subworkflows available from https://github.com/nf-core/modules.git(master):                + +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ +Subworkflow Name                              +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ +│ bam_create_som_pon_gatk                       │ +│ bam_dedup_stats_samtools_umitools             │ +│ bam_docounts_contamination_angsd              │ +│ bam_markduplicates_picard                     │ +│ bam_ngscheckmate                              │ +│ bam_qc_picard                                 │ +│ bam_rseqc                                     │ +│ bam_sort_stats_samtools                       │ +│ bam_split_by_region                           │ +[..truncated..] diff --git a/docs/images/nf-core-subworkflows-remove.svg b/docs/images/nf-core-subworkflows-remove.svg index 33b93f5c19..637ad9101c 100644 --- a/docs/images/nf-core-subworkflows-remove.svg +++ b/docs/images/nf-core-subworkflows-remove.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core subworkflows remove bam_rseqc - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -WARNING 'repository_type' not defined in .nf-core.yml                                               -Warning: Input is not a terminal (fd=0). -? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - » Pipeline -   nf-core/modules - - - -        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - -Aborted. + + $ nf-core subworkflows remove bam_rseqc + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +INFO     Removed files for 'rseqc/bamstat' and it's dependencies 'rseqc/bamstat'.                    +INFO     Removed files for 'rseqc/inferexperiment' and it's dependencies 'rseqc/inferexperiment'.    +INFO     Removed files for 'rseqc/innerdistance' and it's dependencies 'rseqc/innerdistance'.        +INFO     Removed files for 'bam_rseqc' and it's dependencies 'bam_rseqc, rseqc_bamstat,  +rseqc_inferexperiment, rseqc_innerdistance'.                                                diff --git a/docs/images/nf-core-subworkflows-test.svg b/docs/images/nf-core-subworkflows-test.svg index 44d0a5fc48..f9ba18cc79 100644 --- a/docs/images/nf-core-subworkflows-test.svg +++ b/docs/images/nf-core-subworkflows-test.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core subworkflows test bam_rseqc --no-prompts - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -WARNING 'repository_type' not defined in .nf-core.yml                                               -Warning: Input is not a terminal (fd=0). -? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - » Pipeline -   nf-core/modules - - - -        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - -Aborted. + + $ nf-core subworkflows test bam_rseqc --no-prompts + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +──────────────────────────────────────────── bam_rseqc ───────────────────────────────────────────── +INFO     Running pytest for subworkflow 'bam_rseqc' diff --git a/docs/images/nf-core-subworkflows-update.svg b/docs/images/nf-core-subworkflows-update.svg index 4769128e73..a9844d80a9 100644 --- a/docs/images/nf-core-subworkflows-update.svg +++ b/docs/images/nf-core-subworkflows-update.svg @@ -1,4 +1,4 @@ - + - - + + - + - + - + - + - + - + - + - + - + - + - + - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - + - - $ nf-core subworkflows update --all --no-preview - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - - -WARNING 'repository_type' not defined in .nf-core.yml                                               -Warning: Input is not a terminal (fd=0). -? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - » Pipeline -   nf-core/modules - - - -        ? Is this repository an nf-core pipeline or a fork of nf-core/modules? (Use arr -                                                                               o -w keys) - -Aborted. + + $ nf-core subworkflows update --all --no-preview + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + + +ERROR   'rseqc/junctionannotation' diff --git a/docs/images/nf-core-sync.svg b/docs/images/nf-core-sync.svg index 99c164c146..7305dda430 100644 --- a/docs/images/nf-core-sync.svg +++ b/docs/images/nf-core-sync.svg @@ -19,72 +19,72 @@ font-weight: 700; } - .terminal-832309805-matrix { + .terminal-1818757582-matrix { font-family: Fira Code, monospace; font-size: 20px; line-height: 24.4px; font-variant-east-asian: full-width; } - .terminal-832309805-title { + .terminal-1818757582-title { font-size: 18px; font-weight: bold; font-family: arial; } - .terminal-832309805-r1 { fill: #c5c8c6 } -.terminal-832309805-r2 { fill: #98a84b } -.terminal-832309805-r3 { fill: #9a9b99 } -.terminal-832309805-r4 { fill: #608ab1 } -.terminal-832309805-r5 { fill: #d0b344 } -.terminal-832309805-r6 { fill: #98729f } -.terminal-832309805-r7 { fill: #ff2c7a } + .terminal-1818757582-r1 { fill: #c5c8c6 } +.terminal-1818757582-r2 { fill: #98a84b } +.terminal-1818757582-r3 { fill: #9a9b99 } +.terminal-1818757582-r4 { fill: #608ab1 } +.terminal-1818757582-r5 { fill: #d0b344 } +.terminal-1818757582-r6 { fill: #98729f } +.terminal-1818757582-r7 { fill: #ff2c7a } - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + @@ -96,24 +96,24 @@ - + - - $ nf-core sync - -                                          ,--./,-. -          ___     __   __   __   ___     /,-._.--~\ -    |\ | |__  __ /  ` /  \ |__) |__         }  { -    | \| |       \__, \__/ |  \ |___     \`-._,-`-, -                                          `._,._,' - -    nf-core/tools version 2.7.2 - https://nf-co.re - - -INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing -INFO     Original pipeline repository branch is 'master' -INFO     Deleting all files in 'TEMPLATE' branch                                                     -INFO     Making a new template pipeline using pipeline variables                                     + + $ nf-core sync + +                                          ,--./,-. +          ___     __   __   __   ___     /,-._.--~\ +    |\ | |__  __ /  ` /  \ |__) |__         }  { +    | \| |       \__, \__/ |  \ |___     \`-._,-`-, +                                          `._,._,' + +    nf-core/tools version 2.8 - https://nf-co.re + + +INFO     Pipeline directory: /home/runner/work/tools/tools/tmp/nf-core-nextbigthing +INFO     Original pipeline repository branch is 'master' +INFO     Deleting all files in 'TEMPLATE' branch                                                     +INFO     Making a new template pipeline using pipeline variables                                     diff --git a/nf_core/.pre-commit-prettier-config.yaml b/nf_core/.pre-commit-prettier-config.yaml index 80f75fb22c..0c31cdb99f 100644 --- a/nf_core/.pre-commit-prettier-config.yaml +++ b/nf_core/.pre-commit-prettier-config.yaml @@ -1,5 +1,5 @@ repos: - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v2.6.2" + rev: "v2.7.1" hooks: - id: prettier diff --git a/nf_core/__main__.py b/nf_core/__main__.py index aaf0ea9868..e03fcbc67c 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -52,7 +52,7 @@ }, { "name": "Developing new modules", - "commands": ["create", "create-test-yml", "lint", "bump-versions", "mulled", "test"], + "commands": ["create", "create-test-yml", "lint", "bump-versions", "test"], }, ], "nf-core subworkflows": [ @@ -341,7 +341,7 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma meets the nf-core guidelines. Documentation of all lint tests can be found on the nf-core website: [link=https://nf-co.re/tools-docs/]https://nf-co.re/tools-docs/[/] - You can ignore tests using a file called [blue].nf-core-lint.yaml[/] [i](if you have a good reason!)[/]. + You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. See the documentation for details. """ @@ -618,7 +618,8 @@ def update(ctx, tool, dir, force, prompt, sha, all, preview, save_diff, update_d default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def patch(ctx, tool, dir): +@click.option("-r", "--remove", is_flag=True, default=False) +def patch(ctx, tool, dir, remove): """ Create a patch file for minor changes in a module @@ -632,7 +633,10 @@ def patch(ctx, tool, dir): ctx.obj["modules_repo_branch"], ctx.obj["modules_repo_no_pull"], ) - module_patch.patch(tool) + if remove: + module_patch.remove(tool) + else: + module_patch.patch(tool) except (UserWarning, LookupError) as e: log.error(e) sys.exit(1) @@ -678,7 +682,16 @@ def remove(ctx, dir, tool): @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") @click.option("-c", "--conda-name", type=str, default=None, help="Name of the conda package to use") @click.option("-p", "--conda-package-version", type=str, default=None, help="Version of conda package to use") -def create_module(ctx, tool, dir, author, label, meta, no_meta, force, conda_name, conda_package_version): +@click.option( + "-i", + "--empty-template", + is_flag=True, + default=False, + help="Create a module from the template without TODOs or examples", +) +def create_module( + ctx, tool, dir, author, label, meta, no_meta, force, conda_name, conda_package_version, empty_template +): """ Create a new DSL2 module from the nf-core template. @@ -700,7 +713,7 @@ def create_module(ctx, tool, dir, author, label, meta, no_meta, force, conda_nam # Run function try: module_create = nf_core.modules.ModuleCreate( - dir, tool, author, label, has_meta, force, conda_name, conda_package_version + dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template ) module_create.create() except UserWarning as e: @@ -866,49 +879,6 @@ def bump_versions(ctx, tool, dir, all, show_all): sys.exit(1) -# nf-core modules mulled -@modules.command() -@click.argument("specifications", required=True, nargs=-1, metavar=" <...>") -@click.option( - "--build-number", - type=int, - default=0, - show_default=True, - metavar="", - help="The build number for this image. This is an incremental value that starts at zero.", -) -def mulled(specifications, build_number): - """ - Generate the name of a BioContainers mulled image version 2. - - When you know the specific dependencies and their versions of a multi-tool container image and you need the name of - that image, this command can generate it for you. - - """ - from nf_core.modules.mulled import MulledImageNameGenerator - - try: - image_name = MulledImageNameGenerator.generate_image_name( - MulledImageNameGenerator.parse_targets(specifications), build_number=build_number - ) - except ValueError as e: - log.error(e) - sys.exit(1) - if not MulledImageNameGenerator.image_exists(image_name): - log.error("The generated multi-tool container image name does not seem to exist yet.") - log.info( - "Please double check that your provided combination of tools and versions exists in the file: " - "[link=https://github.com/BioContainers/multi-package-containers/blob/master/combinations/hash.tsv]BioContainers/multi-package-containers 'combinations/hash.tsv'[/link]" - ) - log.info( - "If it does not, please add your desired combination as detailed at: " - "https://github.com/BioContainers/multi-package-containers" - ) - sys.exit(1) - log.info("Mulled container hash:") - stdout.print(image_name) - - # nf-core modules test @modules.command("test") @click.pass_context @@ -1389,7 +1359,9 @@ def build(dir, no_prompts, web_only, url): # nf-core schema lint @schema.command() -@click.argument("schema_path", type=click.Path(exists=True), required=True, metavar="") +@click.argument( + "schema_path", type=click.Path(exists=True), default="nextflow_schema.json", metavar="" +) def lint(schema_path): """ Check that a given pipeline schema is valid. @@ -1399,6 +1371,8 @@ def lint(schema_path): This function runs as part of the nf-core lint command, this is a convenience command that does just the schema linting nice and quickly. + + If no schema path is provided, "nextflow_schema.json" will be used (if it exists). """ schema_obj = nf_core.schema.PipelineSchema() try: @@ -1446,8 +1420,7 @@ def docs(schema_path, output, format, force, columns): # Assume we're in a pipeline dir root if schema path not set schema_obj.get_schema_path(schema_path) schema_obj.load_schema() - if not output: - stdout.print(schema_obj.print_documentation(output, format, force, columns.split(","))) + schema_obj.print_documentation(output, format, force, columns.split(",")) # nf-core bump-version diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 1f38775bb6..129016fa38 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -133,12 +133,10 @@ def update_file_version(filename, pipeline_obj, patterns): replacements = [] for pattern in patterns: - found_match = False newcontent = [] for line in content.splitlines(): - # Match the pattern matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) if matches_pattern: diff --git a/nf_core/components/components_command.py b/nf_core/components/components_command.py index 31ab1a71fb..775b205cf5 100644 --- a/nf_core/components/components_command.py +++ b/nf_core/components/components_command.py @@ -6,6 +6,7 @@ import yaml +import nf_core.utils from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import ModulesRepo @@ -162,24 +163,13 @@ def install_component_files(self, component_name, component_version, modules_rep def load_lint_config(self): """Parse a pipeline lint config file. - Look for a file called either `.nf-core-lint.yml` or - `.nf-core-lint.yaml` in the pipeline root directory and parse it. - (`.yml` takes precedence). + Load the '.nf-core.yml' config file and extract + the lint config from it Add parsed config to the `self.lint_config` class attribute. """ - config_fn = os.path.join(self.dir, ".nf-core-lint.yml") - - # Pick up the file if it's .yaml instead of .yml - if not os.path.isfile(config_fn): - config_fn = os.path.join(self.dir, ".nf-core-lint.yaml") - - # Load the YAML - try: - with open(config_fn, "r") as fh: - self.lint_config = yaml.safe_load(fh) - except FileNotFoundError: - log.debug(f"No lint config file found: {config_fn}") + _, tools_config = nf_core.utils.load_tools_config(self.dir) + self.lint_config = tools_config.get("lint", {}) def check_modules_structure(self): """ diff --git a/nf_core/components/components_create.py b/nf_core/components/components_create.py deleted file mode 100644 index 86c42c0373..0000000000 --- a/nf_core/components/components_create.py +++ /dev/null @@ -1,177 +0,0 @@ -import glob -import json -import logging -import os -import re -import subprocess - -import jinja2 -import rich - -import nf_core.utils - -log = logging.getLogger(__name__) - - -def render_template(component_type, object_attrs, file_paths): - """ - Create new module/subworkflow files with Jinja2. - """ - # Run jinja2 for each file in the template folder - env = jinja2.Environment( - loader=jinja2.PackageLoader("nf_core", f"{component_type[:-1]}-template"), keep_trailing_newline=True - ) - for template_fn, dest_fn in file_paths.items(): - log.debug(f"Rendering template file: '{template_fn}'") - j_template = env.get_template(template_fn) - object_attrs["nf_core_version"] = nf_core.__version__ - rendered_output = j_template.render(object_attrs) - - # Write output to the target file - os.makedirs(os.path.dirname(dest_fn), exist_ok=True) - with open(dest_fn, "w") as fh: - log.debug(f"Writing output to: '{dest_fn}'") - fh.write(rendered_output) - - # Mirror file permissions - template_stat = os.stat( - os.path.join(os.path.dirname(nf_core.__file__), f"{component_type[:-1]}-template", template_fn) - ) - os.chmod(dest_fn, template_stat.st_mode) - - -def collect_name_prompt(name, component_type): - """ - Collect module/subworkflow info via prompt if empty or invalid - """ - # Collect module info via prompt if empty or invalid - subname = None - if component_type == "modules": - pattern = r"[^a-z\d/]" - elif component_type == "subworkflows": - pattern = r"[^a-z\d_/]" - if name is None: - name = "" - while name == "" or re.search(pattern, name) or name.count("/") > 0: - # Check + auto-fix for invalid chacters - if re.search(pattern, name): - if component_type == "modules": - log.warning("Tool/subtool name must be lower-case letters only, with no punctuation") - elif component_type == "subworkflows": - log.warning("Subworkflow name must be lower-case letters only, with no punctuation") - name_clean = re.sub(r"[^a-z\d/]", "", name.lower()) - if rich.prompt.Confirm.ask(f"[violet]Change '{name}' to '{name_clean}'?"): - name = name_clean - else: - name = "" - - if component_type == "modules": - # Split into tool and subtool - if name.count("/") > 1: - log.warning("Tool/subtool can have maximum one '/' character") - name = "" - elif name.count("/") == 1: - name, subname = name.split("/") - else: - subname = None # Reset edge case: entered '/subtool' as name and gone round loop again - - # Prompt for new entry if we reset - if name == "": - if component_type == "modules": - name = rich.prompt.Prompt.ask("[violet]Name of tool/subtool").strip() - elif component_type == "subworkflows": - name = rich.prompt.Prompt.ask("[violet]Name of subworkflow").strip() - - if component_type == "modules": - return name, subname - elif component_type == "subworkflows": - return name - - -def get_component_dirs(component_type, repo_type, directory, org, name, supername, subname, new_dir, force_overwrite): - """Given a directory and a tool/subtool or subworkflow, set the file paths and check if they already exist - - Returns dict: keys are relative paths to template files, vals are target paths. - """ - file_paths = {} - if repo_type == "pipeline": - local_component_dir = os.path.join(directory, component_type, "local") - # Check whether component file already exists - component_file = os.path.join(local_component_dir, f"{name}.nf") - if os.path.exists(component_file) and not force_overwrite: - raise UserWarning( - f"{component_type[:-1].title()} file exists already: '{component_file}'. Use '--force' to overwrite" - ) - - if component_type == "modules": - # If a subtool, check if there is a module called the base tool name already - if subname and os.path.exists(os.path.join(local_component_dir, f"{supername}.nf")): - raise UserWarning(f"Module '{supername}' exists already, cannot make subtool '{name}'") - - # If no subtool, check that there isn't already a tool/subtool - tool_glob = glob.glob(f"{local_component_dir}/{supername}_*.nf") - if not subname and tool_glob: - raise UserWarning(f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{name}'") - - # Set file paths - file_paths[os.path.join(component_type, "main.nf")] = component_file - - if repo_type == "modules": - software_dir = os.path.join(directory, component_type, org, new_dir) - test_dir = os.path.join(directory, "tests", component_type, org, new_dir) - - # Check if module/subworkflow directories exist already - if os.path.exists(software_dir) and not force_overwrite: - raise UserWarning(f"{component_type[:-1]} directory exists: '{software_dir}'. Use '--force' to overwrite") - if os.path.exists(test_dir) and not force_overwrite: - raise UserWarning(f"{component_type[:-1]} test directory exists: '{test_dir}'. Use '--force' to overwrite") - - if component_type == "modules": - # If a subtool, check if there is a module called the base tool name already - parent_tool_main_nf = os.path.join(directory, component_type, org, supername, "main.nf") - parent_tool_test_nf = os.path.join(directory, component_type, org, supername, "main.nf") - if subname and os.path.exists(parent_tool_main_nf): - raise UserWarning(f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{name}'") - if subname and os.path.exists(parent_tool_test_nf): - raise UserWarning(f"Module '{parent_tool_test_nf}' exists already, cannot make subtool '{name}'") - - # If no subtool, check that there isn't already a tool/subtool - tool_glob = glob.glob(f"{os.path.join(directory, component_type, org, supername)}/*/main.nf") - if not subname and tool_glob: - raise UserWarning(f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{name}'") - - # Set file paths - # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure - file_paths[os.path.join(component_type, "main.nf")] = os.path.join(software_dir, "main.nf") - file_paths[os.path.join(component_type, "meta.yml")] = os.path.join(software_dir, "meta.yml") - file_paths[os.path.join("tests", "main.nf")] = os.path.join(test_dir, "main.nf") - file_paths[os.path.join("tests", "test.yml")] = os.path.join(test_dir, "test.yml") - file_paths[os.path.join("tests", "nextflow.config")] = os.path.join(test_dir, "nextflow.config") - - return file_paths - - -def get_username(author): - """ - Prompt for GitHub username - """ - # Try to guess the current user if `gh` is installed - author_default = None - try: - with open(os.devnull, "w") as devnull: - gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=devnull)) - author_default = f"@{gh_auth_user['login']}" - except Exception as e: - log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") - - # Regex to valid GitHub username: https://github.com/shinnn/github-username-regex - github_username_regex = re.compile(r"^@[a-zA-Z\d](?:[a-zA-Z\d]|-(?=[a-zA-Z\d])){0,38}$") - while author is None or not github_username_regex.match(author): - if author is not None and not github_username_regex.match(author): - log.warning("Does not look like a valid GitHub username (must start with an '@')!") - author = rich.prompt.Prompt.ask( - f"[violet]GitHub Username:[/]{' (@author)' if author_default is None else ''}", - default=author_default, - ) - - return author diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index eadb5ba0cb..c2b9abf569 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -180,7 +180,10 @@ def _run_pytests(self): console.rule(self.component_name, style="black") # Set pytest arguments - command_args = ["--tag", f"{self.component_name}", "--symlink", "--keep-workflow-wd", "--git-aware"] + tag = self.component_name + if self.component_type == "subworkflows": + tag = "subworkflows/" + tag + command_args = ["--tag", f"{tag}", "--symlink", "--keep-workflow-wd", "--git-aware"] command_args += self.pytest_args # Run pytest diff --git a/nf_core/components/create.py b/nf_core/components/create.py new file mode 100644 index 0000000000..e626de4aaa --- /dev/null +++ b/nf_core/components/create.py @@ -0,0 +1,435 @@ +""" +The ComponentCreate class handles generating of module and subworkflow templates +""" + +from __future__ import print_function + +import glob +import json +import logging +import os +import re +import subprocess + +import jinja2 +import questionary +import rich +import yaml +from packaging.version import parse as parse_version + +import nf_core +import nf_core.utils +from nf_core.components.components_command import ComponentCommand + +log = logging.getLogger(__name__) + + +class ComponentCreate(ComponentCommand): + def __init__( + self, + component_type, + directory=".", + component="", + author=None, + process_label=None, + has_meta=None, + force=False, + conda_name=None, + conda_version=None, + empty_template=False, + ): + super().__init__(component_type, directory) + self.directory = directory + self.component = component + self.author = author + self.process_label = process_label + self.has_meta = has_meta + self.force_overwrite = force + self.subtool = None + self.tool_conda_name = conda_name + self.tool_conda_version = conda_version + self.tool_licence = None + self.tool_licence = "" + self.tool_description = "" + self.tool_doc_url = "" + self.tool_dev_url = "" + self.bioconda = None + self.singularity_container = None + self.docker_container = None + self.file_paths = {} + self.not_empty_template = not empty_template + + def create(self): + """ + Create a new DSL2 module or subworkflow from the nf-core template. + + A module should be named just or + e.g fastqc or samtools/sort, respectively. + + The subworkflow should be named as the main file type it operates on and a short description of the task performed + e.g bam_sort or bam_sort_samtools, respectively. + + If is a pipeline, this function creates a file called: + '/modules/local/tool.nf' + OR + '/modules/local/tool_subtool.nf' + OR for subworkflows + '/subworkflows/local/subworkflow_name.nf' + + If is a clone of nf-core/modules, it creates or modifies the following files: + + For modules: + + modules/modules/nf-core/tool/subtool/ + * main.nf + * meta.yml + modules/tests/modules/nf-core/tool/subtool/ + * main.nf + * test.yml + * nextflow.config + tests/config/pytest_modules.yml + + The function will attempt to automatically find a Bioconda package called + and matching Docker / Singularity images from BioContainers. + + For subworkflows: + subworkflows/nf-core/subworkflow_name/ + * main.nf + * meta.yml + tests/subworkflows/nf-core/subworkflow_name/ + * main.nf + * test.yml + * nextflow.config + tests/config/pytest_modules.yml + + """ + + if self.component_type == "modules": + # Check modules directory structure + self.check_modules_structure() + + # Check whether the given directory is a nf-core pipeline or a clone of nf-core/modules + log.info(f"Repository type: [blue]{self.repo_type}") + if self.directory != ".": + log.info(f"Base directory: '{self.directory}'") + + log.info( + "[yellow]Press enter to use default values [cyan bold](shown in brackets)[/] [yellow]or type your own responses. " + "ctrl+click [link=https://youtu.be/dQw4w9WgXcQ]underlined text[/link] to open links." + ) + + # Collect component info via prompt if empty or invalid + self._collect_name_prompt() + + # Determine the component name + self.component_name = self.component + self.component_dir = self.component + + if self.subtool: + self.component_name = f"{self.component}/{self.subtool}" + self.component_dir = os.path.join(self.component, self.subtool) + + self.component_name_underscore = self.component_name.replace("/", "_") + + # Check existence of directories early for fast-fail + self.file_paths = self._get_component_dirs() + + if self.component_type == "modules": + # Try to find a bioconda package for 'component' + self._get_bioconda_tool() + + # Prompt for GitHub username + self._get_username() + + if self.component_type == "modules": + self._get_module_structure_components() + + # Create component template with jinja2 + self._render_template() + + if self.repo_type == "modules": + # Add entry to pytest_modules.yml + try: + with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "r") as fh: + pytest_modules_yml = yaml.safe_load(fh) + if self.subtool: + pytest_modules_yml[self.component_name] = [ + f"modules/{self.org}/{self.component}/{self.subtool}/**", + f"tests/modules/{self.org}/{self.component}/{self.subtool}/**", + ] + else: + pytest_modules_yml[ + ("" if self.component_type == "modules" else self.component_type + "/") + self.component_name + ] = [ + f"{self.component_type}/{self.org}/{self.component}/**", + f"tests/{self.component_type}/{self.org}/{self.component}/**", + ] + pytest_modules_yml = dict(sorted(pytest_modules_yml.items())) + with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "w") as fh: + yaml.dump(pytest_modules_yml, fh, sort_keys=True, Dumper=nf_core.utils.custom_yaml_dumper()) + except FileNotFoundError: + raise UserWarning("Could not open 'tests/config/pytest_modules.yml' file!") + + new_files = list(self.file_paths.values()) + if self.repo_type == "modules": + new_files.append(os.path.join(self.directory, "tests", "config", "pytest_modules.yml")) + log.info("Created / edited following files:\n " + "\n ".join(new_files)) + + def _get_bioconda_tool(self): + """ + Try to find a bioconda package for 'tool' + """ + while True: + try: + if self.tool_conda_name: + anaconda_response = nf_core.utils.anaconda_package(self.tool_conda_name, ["bioconda"]) + else: + anaconda_response = nf_core.utils.anaconda_package(self.component, ["bioconda"]) + + if not self.tool_conda_version: + version = anaconda_response.get("latest_version") + if not version: + version = str(max([parse_version(v) for v in anaconda_response["versions"]])) + else: + version = self.tool_conda_version + + self.tool_licence = nf_core.utils.parse_anaconda_licence(anaconda_response, version) + self.tool_description = anaconda_response.get("summary", "") + self.tool_doc_url = anaconda_response.get("doc_url", "") + self.tool_dev_url = anaconda_response.get("dev_url", "") + if self.tool_conda_name: + self.bioconda = "bioconda::" + self.tool_conda_name + "=" + version + else: + self.bioconda = "bioconda::" + self.component + "=" + version + log.info(f"Using Bioconda package: '{self.bioconda}'") + break + except (ValueError, LookupError) as e: + log.warning( + f"Could not find Conda dependency using the Anaconda API: '{self.tool_conda_name if self.tool_conda_name else self.component}'" + ) + if rich.prompt.Confirm.ask("[violet]Do you want to enter a different Bioconda package name?"): + self.tool_conda_name = rich.prompt.Prompt.ask("[violet]Name of Bioconda package").strip() + continue + else: + log.warning( + f"{e}\nBuilding module without tool software and meta, you will need to enter this information manually." + ) + break + + # Try to get the container tag (only if bioconda package was found) + if self.bioconda: + try: + if self.tool_conda_name: + self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( + self.tool_conda_name, version + ) + else: + self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( + self.component, version + ) + log.info(f"Using Docker container: '{self.docker_container}'") + log.info(f"Using Singularity container: '{self.singularity_container}'") + except (ValueError, LookupError) as e: + log.info(f"Could not find a Docker/Singularity container ({e})") + + def _get_module_structure_components(self): + process_label_defaults = ["process_single", "process_low", "process_medium", "process_high", "process_long"] + if self.process_label is None: + log.info( + "Provide an appropriate resource label for the process, taken from the " + "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" + "For example: {}".format(", ".join(process_label_defaults)) + ) + while self.process_label is None: + self.process_label = questionary.autocomplete( + "Process resource label:", + choices=process_label_defaults, + style=nf_core.utils.nfcore_question_style, + default="process_single", + ).unsafe_ask() + + if self.has_meta is None: + log.info( + "Where applicable all sample-specific information e.g. 'id', 'single_end', 'read_group' " + "MUST be provided as an input via a Groovy Map called 'meta'. " + "This information may [italic]not[/] be required in some instances, for example " + "[link=https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]." + ) + while self.has_meta is None: + self.has_meta = rich.prompt.Confirm.ask( + "[violet]Will the module require a meta map of sample information?", default=True + ) + + def _render_template(self): + """ + Create new module/subworkflow files with Jinja2. + """ + object_attrs = vars(self) + # Run jinja2 for each file in the template folder + env = jinja2.Environment( + loader=jinja2.PackageLoader("nf_core", f"{self.component_type[:-1]}-template"), keep_trailing_newline=True + ) + for template_fn, dest_fn in self.file_paths.items(): + log.debug(f"Rendering template file: '{template_fn}'") + j_template = env.get_template(template_fn) + object_attrs["nf_core_version"] = nf_core.__version__ + rendered_output = j_template.render(object_attrs) + + # Write output to the target file + os.makedirs(os.path.dirname(dest_fn), exist_ok=True) + with open(dest_fn, "w") as fh: + log.debug(f"Writing output to: '{dest_fn}'") + fh.write(rendered_output) + + # Mirror file permissions + template_stat = os.stat( + os.path.join(os.path.dirname(nf_core.__file__), f"{self.component_type[:-1]}-template", template_fn) + ) + os.chmod(dest_fn, template_stat.st_mode) + + def _collect_name_prompt(self): + """ + Collect module/subworkflow info via prompt if empty or invalid + """ + # Collect module info via prompt if empty or invalid + self.subtool = None + if self.component_type == "modules": + pattern = r"[^a-z\d/]" + elif self.component_type == "subworkflows": + pattern = r"[^a-z\d_/]" + if self.component is None: + self.component = "" + while self.component == "" or re.search(pattern, self.component) or self.component.count("/") > 0: + # Check + auto-fix for invalid chacters + if re.search(pattern, self.component): + if self.component_type == "modules": + log.warning("Tool/subtool name must be lower-case letters only, with no punctuation") + elif self.component_type == "subworkflows": + log.warning("Subworkflow name must be lower-case letters only, with no punctuation") + name_clean = re.sub(r"[^a-z\d/]", "", self.component.lower()) + if rich.prompt.Confirm.ask(f"[violet]Change '{self.component}' to '{name_clean}'?"): + self.component = name_clean + else: + self.component = "" + + if self.component_type == "modules": + # Split into tool and subtool + if self.component.count("/") > 1: + log.warning("Tool/subtool can have maximum one '/' character") + self.component = "" + elif self.component.count("/") == 1: + self.component, self.subtool = self.component.split("/") + else: + self.subtool = None # Reset edge case: entered '/subtool' as name and gone round loop again + + # Prompt for new entry if we reset + if self.component == "": + if self.component_type == "modules": + self.component = rich.prompt.Prompt.ask("[violet]Name of tool/subtool").strip() + elif self.component_type == "subworkflows": + self.component = rich.prompt.Prompt.ask("[violet]Name of subworkflow").strip() + + def _get_component_dirs(self): + """Given a directory and a tool/subtool or subworkflow, set the file paths and check if they already exist + + Returns dict: keys are relative paths to template files, vals are target paths. + """ + file_paths = {} + if self.repo_type == "pipeline": + local_component_dir = os.path.join(self.directory, self.component_type, "local") + # Check whether component file already exists + component_file = os.path.join(local_component_dir, f"{self.component_name}.nf") + if os.path.exists(component_file) and not self.force_overwrite: + raise UserWarning( + f"{self.component_type[:-1].title()} file exists already: '{component_file}'. Use '--force' to overwrite" + ) + + if self.component_type == "modules": + # If a subtool, check if there is a module called the base tool name already + if self.subtool and os.path.exists(os.path.join(local_component_dir, f"{self.component}.nf")): + raise UserWarning( + f"Module '{self.component}' exists already, cannot make subtool '{self.component_name}'" + ) + + # If no subtool, check that there isn't already a tool/subtool + tool_glob = glob.glob(f"{local_component_dir}/{self.component}_*.nf") + if not self.subtool and tool_glob: + raise UserWarning( + f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" + ) + + # Set file paths + file_paths[os.path.join(self.component_type, "main.nf")] = component_file + + if self.repo_type == "modules": + software_dir = os.path.join(self.directory, self.component_type, self.org, self.component_dir) + test_dir = os.path.join(self.directory, "tests", self.component_type, self.org, self.component_dir) + + # Check if module/subworkflow directories exist already + if os.path.exists(software_dir) and not self.force_overwrite: + raise UserWarning( + f"{self.component_type[:-1]} directory exists: '{software_dir}'. Use '--force' to overwrite" + ) + if os.path.exists(test_dir) and not self.force_overwrite: + raise UserWarning( + f"{self.component_type[:-1]} test directory exists: '{test_dir}'. Use '--force' to overwrite" + ) + + if self.component_type == "modules": + # If a subtool, check if there is a module called the base tool name already + parent_tool_main_nf = os.path.join( + self.directory, self.component_type, self.org, self.component, "main.nf" + ) + parent_tool_test_nf = os.path.join( + self.directory, self.component_type, self.org, self.component, "main.nf" + ) + if self.subtool and os.path.exists(parent_tool_main_nf): + raise UserWarning( + f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{self.component_name}'" + ) + if self.subtool and os.path.exists(parent_tool_test_nf): + raise UserWarning( + f"Module '{parent_tool_test_nf}' exists already, cannot make subtool '{self.component_name}'" + ) + + # If no subtool, check that there isn't already a tool/subtool + tool_glob = glob.glob( + f"{os.path.join(self.directory, self.component_type, self.org, self.component)}/*/main.nf" + ) + if not self.subtool and tool_glob: + raise UserWarning( + f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" + ) + + # Set file paths + # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure + file_paths[os.path.join(self.component_type, "main.nf")] = os.path.join(software_dir, "main.nf") + file_paths[os.path.join(self.component_type, "meta.yml")] = os.path.join(software_dir, "meta.yml") + file_paths[os.path.join("tests", "main.nf")] = os.path.join(test_dir, "main.nf") + file_paths[os.path.join("tests", "test.yml")] = os.path.join(test_dir, "test.yml") + file_paths[os.path.join("tests", "nextflow.config")] = os.path.join(test_dir, "nextflow.config") + + return file_paths + + def _get_username(self): + """ + Prompt for GitHub username + """ + # Try to guess the current user if `gh` is installed + author_default = None + try: + with open(os.devnull, "w") as devnull: + gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=devnull)) + author_default = f"@{gh_auth_user['login']}" + except Exception as e: + log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") + + # Regex to valid GitHub username: https://github.com/shinnn/github-username-regex + github_username_regex = re.compile(r"^@[a-zA-Z\d](?:[a-zA-Z\d]|-(?=[a-zA-Z\d])){0,38}$") + while self.author is None or not github_username_regex.match(self.author): + if self.author is not None and not github_username_regex.match(self.author): + log.warning("Does not look like a valid GitHub username (must start with an '@')!") + self.author = rich.prompt.Prompt.ask( + f"[violet]GitHub Username:[/]{' (@author)' if author_default is None else ''}", + default=author_default, + ) diff --git a/nf_core/components/info.py b/nf_core/components/info.py index b89baaf99e..e4d8038b87 100644 --- a/nf_core/components/info.py +++ b/nf_core/components/info.py @@ -8,6 +8,7 @@ from rich.console import Group from rich.markdown import Markdown from rich.panel import Panel +from rich.syntax import Syntax from rich.table import Table from rich.text import Text @@ -128,6 +129,19 @@ def init_mod_name(self, component): choices=components, style=nf_core.utils.nfcore_question_style, ).unsafe_ask() + else: + if self.repo_type == "pipeline": + # check if the module is locally installed + local_paths = self.modules_json.get_all_components(self.component_type).get( + self.modules_repo.remote_url, {} + ) + for directory, comp in local_paths: + if comp == component: + component_base_path = Path(self.dir, self.component_type) + self.local_path = Path(component_base_path, directory, component) + break + if self.local_path: + self.local = True return component @@ -283,7 +297,7 @@ def generate_component_info_help(self): renderables.append(outputs_table) # Installation command - if self.remote_location: + if self.remote_location and not self.local: cmd_base = f"nf-core {self.component_type}" if self.remote_location != NF_CORE_MODULES_REMOTE: cmd_base = f"nf-core {self.component_type} --git-remote {self.remote_location}" @@ -291,4 +305,29 @@ def generate_component_info_help(self): Text.from_markup(f"\n :computer: Installation command: [magenta]{cmd_base} install {self.component}\n") ) + # Print include statement + if self.local_path: + install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) + component_name = "_".join(self.component.upper().split("/")) + renderables.append( + Text.from_markup(f"\n [blue]Use the following statement to include this {self.component_type[:-1]}:") + ) + renderables.append( + Syntax( + f"include {{ {component_name} }} from '../{Path(install_folder, self.component).relative_to(self.dir)}/main'", + "groovy", + theme="ansi_dark", + padding=1, + ) + ) + if self.component_type == "subworkflows": + subworkflow_config = Path(install_folder, self.component, "nextflow.config").relative_to(self.dir) + if os.path.isfile(subworkflow_config): + renderables.append( + Text.from_markup("\n [blue]Add the following config statement to use this subworkflow:") + ) + renderables.append( + Syntax(f"includeConfig '{subworkflow_config}'", "groovy", theme="ansi_dark", padding=1) + ) + return Group(*renderables) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index f9a16f73cf..f7a5fe6680 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -1,6 +1,5 @@ import logging import os -import re from pathlib import Path import questionary @@ -59,6 +58,12 @@ def install(self, component, silent=False): if not silent: modules_json.check_up_to_date() + # Verify that the remote repo's org_path does not match the org_path of any alternate repo among the installed modules + if self.check_alternate_remotes(modules_json): + err_msg = f"You are trying to install {self.component_type} from different repositories with the same organization name '{self.modules_repo.repo_path}' (set in the `.nf-core.yml` file in the `org_path` field).\nThis is not supported, and will likely cause problems. org_path should be set to the github account/organization name." + log.error(err_msg) + return False + # Verify SHA if not self.modules_repo.verify_sha(self.prompt, self.sha): return False @@ -74,10 +79,10 @@ def install(self, component, silent=False): ) # Set the install folder based on the repository name - install_folder = os.path.join(self.dir, self.component_type, self.modules_repo.repo_path) + install_folder = Path(self.dir, self.component_type, self.modules_repo.repo_path) # Compute the component directory - component_dir = os.path.join(install_folder, component) + component_dir = Path(install_folder, component) # Check that the component is not already installed component_not_installed = self.check_component_installed( @@ -169,19 +174,19 @@ def collect_and_verify_name(self, component, modules_repo): if component is None: component = questionary.autocomplete( f"{'Tool' if self.component_type == 'modules' else 'Subworkflow'} name:", - choices=sorted(modules_repo.get_avail_components(self.component_type)), + choices=sorted(modules_repo.get_avail_components(self.component_type, commit=self.sha)), style=nf_core.utils.nfcore_question_style, ).unsafe_ask() # Check that the supplied name is an available module/subworkflow - if component and component not in modules_repo.get_avail_components(self.component_type): + if component and component not in modules_repo.get_avail_components(self.component_type, commit=self.sha): log.error( f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." ) log.info(f"Use the command 'nf-core {self.component_type} list' to view available software") return False - if not modules_repo.component_exists(component, self.component_type): + if not modules_repo.component_exists(component, self.component_type, commit=self.sha): warn_msg = f"{self.component_type[:-1].title()} '{component}' not found in remote '{modules_repo.remote_url}' ({modules_repo.branch})" log.warning(warn_msg) return False @@ -265,3 +270,20 @@ def clean_modules_json(self, component, modules_repo, modules_json): self.component_type, component, repo_to_remove, modules_repo.repo_path ) return component_values["installed_by"] + + def check_alternate_remotes(self, modules_json): + """ + Check whether there are previously installed components with the same org_path but different remote urls + Log error if multiple remotes exist. + + Return: + True: if problematic components are found + False: if problematic components are not found + """ + modules_json.load() + for repo_url, repo_content in modules_json.modules_json.get("repos", dict()).items(): + for component_type in repo_content: + for dir in repo_content.get(component_type, dict()).keys(): + if dir == self.modules_repo.repo_path and repo_url != self.modules_repo.remote_url: + return True + return False diff --git a/nf_core/components/remove.py b/nf_core/components/remove.py index 528f032124..99df757992 100644 --- a/nf_core/components/remove.py +++ b/nf_core/components/remove.py @@ -127,7 +127,7 @@ def remove(self, component, removed_by=None, removed_components=None, force=Fals component, silent=True ): log.warning( - f"Could not install the {self.component_type[:-1]} '{component}', please install it manually with 'nf-core {component_type} install {component}'." + f"Could not install the {self.component_type[:-1]} '{component}', please install it manually with 'nf-core {self.component_type} install {component}'." ) removed_components.append(component) return removed diff --git a/nf_core/components/update.py b/nf_core/components/update.py index ef645a5a1d..5f8a2129c5 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -386,7 +386,7 @@ def get_single_component_info(self, component): ) # Check that the supplied name is an available module/subworkflow - if component and component not in self.modules_repo.get_avail_components(self.component_type): + if component and component not in self.modules_repo.get_avail_components(self.component_type, commit=self.sha): raise LookupError( f"{self.component_type[:-1].title()} '{component}' not found in list of available {self.component_type}." f"Use the command 'nf-core {self.component_type} list remote' to view available software" @@ -879,25 +879,23 @@ def update_linked_components(self, modules_to_update, subworkflows_to_update, up if m_update in updated: continue original_component_type, original_update_all = self._change_component_type("modules") - self.update(m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) - self._reset_component_type(original_component_type, original_update_all) + try: + self.update(m_update, silent=True, updated=updated, check_diff_exist=check_diff_exist) + except LookupError as e: + # If the module to be updated is not available, check if there has been a name change + if "not found in list of available" in str(e): + # Skip update, we check for name changes with manage_changes_in_linked_components + pass + else: + raise + finally: + self._reset_component_type(original_component_type, original_update_all) def manage_changes_in_linked_components(self, component, modules_to_update, subworkflows_to_update): """Check for linked components added or removed in the new subworkflow version""" if self.component_type == "subworkflows": subworkflow_directory = Path(self.dir, self.component_type, self.modules_repo.repo_path, component) included_modules, included_subworkflows = get_components_to_install(subworkflow_directory) - # If a new module/subworkflow is included in the subworklfow and wasn't included before - for module in included_modules: - if module not in modules_to_update: - log.info(f"Installing newly included module '{module}' for '{component}'") - install_module_object = ComponentInstall(self.dir, "modules", installed_by=component) - install_module_object.install(module, silent=True) - for subworkflow in included_subworkflows: - if subworkflow not in subworkflows_to_update: - log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") - install_subworkflow_object = ComponentInstall(self.dir, "subworkflows", installed_by=component) - install_subworkflow_object.install(subworkflow, silent=True) # If a module/subworkflow has been removed from the subworkflow for module in modules_to_update: if module not in included_modules: @@ -909,6 +907,17 @@ def manage_changes_in_linked_components(self, component, modules_to_update, subw log.info(f"Removing subworkflow '{subworkflow}' which is not included in '{component}' anymore.") remove_subworkflow_object = ComponentRemove("subworkflows", self.dir) remove_subworkflow_object.remove(subworkflow, removed_by=component) + # If a new module/subworkflow is included in the subworklfow and wasn't included before + for module in included_modules: + if module not in modules_to_update: + log.info(f"Installing newly included module '{module}' for '{component}'") + install_module_object = ComponentInstall(self.dir, "modules", installed_by=component) + install_module_object.install(module, silent=True) + for subworkflow in included_subworkflows: + if subworkflow not in subworkflows_to_update: + log.info(f"Installing newly included subworkflow '{subworkflow}' for '{component}'") + install_subworkflow_object = ComponentInstall(self.dir, "subworkflows", installed_by=component) + install_subworkflow_object.install(subworkflow, silent=True) def _change_component_type(self, new_component_type): original_component_type = self.component_type diff --git a/nf_core/create.py b/nf_core/create.py index 045c35d1b4..9e3b38102f 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -56,7 +56,7 @@ def __init__( plain=False, default_branch=None, ): - self.template_params, skip_paths_keys = self.create_param_dict( + self.template_params, skip_paths_keys, self.template_yaml = self.create_param_dict( name, description, author, version, template_yaml_path, plain ) @@ -172,13 +172,13 @@ def create_param_dict(self, name, description, author, version, template_yaml_pa and "nextflow_config" in config_yml["lint"] and "manifest.name" in config_yml["lint"]["nextflow_config"] ): - return param_dict, skip_paths + return param_dict, skip_paths, template_yaml if param_dict["prefix"] == "nf-core": # Check that the pipeline name matches the requirements if not re.match(r"^[a-z]+$", param_dict["short_name"]): raise UserWarning("[red]Invalid workflow name: must be lowercase without punctuation.") - return param_dict, skip_paths + return param_dict, skip_paths, template_yaml def customize_template(self, template_areas): """Customizes the template parameters. @@ -283,7 +283,6 @@ def render_template(self): # Set the paths to skip according to customization for template_fn_path_obj in template_files: - template_fn_path = str(template_fn_path_obj) # Skip files that are in the self.skip_paths list @@ -349,6 +348,11 @@ def render_template(self): # Update the .nf-core.yml with linting configurations self.fix_linting() + log.debug("Dumping pipeline template yml to file") + if self.template_yaml: + with open(self.outdir / "pipeline_template.yml", "w") as fh: + yaml.safe_dump(self.template_yaml, fh) + def update_nextflow_schema(self): """ Removes unused parameters from the nextflow schema. diff --git a/nf_core/download.py b/nf_core/download.py index 2f964b3afd..cd36c65c4a 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -432,7 +432,7 @@ def find_container_images(self): Later DSL2: container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' : - 'quay.io/biocontainers/fastqc:0.11.9--0' }" + 'biocontainers/fastqc:0.11.9--0' }" DSL1 / Special case DSL2: container "nfcore/cellranger:6.0.2" @@ -506,7 +506,6 @@ def get_singularity_images(self): containers_download = [] containers_pull = [] for container in self.containers: - # Fetch the output and cached filenames for this container out_path, cache_path = self.singularity_image_filenames(container) diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 07beed96ec..417208a20d 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -2,6 +2,19 @@ FROM gitpod/workspace-base USER root +# Install util tools. +RUN apt-get update --quiet && \ + apt-get install --quiet --yes \ + apt-transport-https \ + apt-utils \ + sudo \ + git \ + less \ + wget \ + curl \ + tree \ + graphviz + # Install Conda RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ bash Miniconda3-latest-Linux-x86_64.sh -b -p /opt/conda && \ @@ -18,23 +31,20 @@ RUN chown -R gitpod:gitpod /opt/conda /usr/src/nf_core # Change user to gitpod USER gitpod - # Install nextflow, nf-core, Mamba, and pytest-workflow -RUN conda update -n base -c defaults conda && \ - conda config --add channels defaults && \ +RUN conda config --add channels defaults && \ conda config --add channels bioconda && \ conda config --add channels conda-forge && \ - conda install \ - openjdk=17.0.3 \ - nextflow=22.10.1 \ - nf-test=0.7.1-0 \ - pytest-workflow=1.6.0 \ - mamba=0.27.0 \ - pip=22.3 \ - black=22.10.0 \ - prettier=2.7.1 \ - -n base && \ - conda clean --all -f -y + conda config --set channel_priority strict && \ + conda install --quiet --yes --name base mamba && \ + mamba install --quiet --yes --name base \ + nextflow=22.10.1 \ + nf-core \ + nf-test \ + black \ + prettier \ + pytest-workflow && \ + mamba clean --all -f -y # Install nf-core RUN python -m pip install . diff --git a/nf_core/launch.py b/nf_core/launch.py index 87150172f7..648c8775f8 100644 --- a/nf_core/launch.py +++ b/nf_core/launch.py @@ -98,7 +98,6 @@ def __init__( self.cli_launch = True def launch_pipeline(self): - # Prompt for pipeline if not supplied and no web launch ID if self.pipeline is None and self.web_id is None: launch_type = questionary.select( @@ -461,7 +460,6 @@ def prompt_group(self, group_id, group_obj): answers = {} error_msgs = [] while not while_break: - if len(error_msgs) == 0: self.print_param_header(group_id, group_obj, True) @@ -698,7 +696,6 @@ def build_command(self): # Pipeline parameters if len(self.schema_obj.input_params) > 0: - # Write the user selection to a file and run nextflow with that if self.use_params_file: dump_json_with_prettier(self.params_out, self.schema_obj.input_params) diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index b46399eb97..a998c964a0 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -181,6 +181,7 @@ class PipelineLint(nf_core.utils.Pipeline): from .schema_description import schema_description from .schema_lint import schema_lint from .schema_params import schema_params + from .system_exit import system_exit from .template_strings import template_strings from .version_consistency import version_consistency @@ -223,6 +224,7 @@ def _get_all_lint_tests(release_mode): "template_strings", "schema_lint", "schema_params", + "system_exit", "schema_description", "actions_schema_validation", "merge_markers", @@ -434,7 +436,6 @@ def format_result(test_results): ) def _print_summary(self): - # Summary table summary_colour = "red" if len(self.failed) > 0 else "green" table = Table(box=rich.box.ROUNDED, style=summary_colour) diff --git a/nf_core/lint/files_unchanged.py b/nf_core/lint/files_unchanged.py index cadced5483..c0be64d0d7 100644 --- a/nf_core/lint/files_unchanged.py +++ b/nf_core/lint/files_unchanged.py @@ -147,7 +147,6 @@ def _tf(file_path): # Files that must be completely unchanged from template for files in files_exact: - # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): @@ -177,7 +176,6 @@ def _tf(file_path): # Files that can be added to, but that must contain the template contents for files in files_partial: - # Ignore if file specified in linting config ignore_files = self.lint_config.get("files_unchanged", []) if any([f in ignore_files for f in files]): diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 79bce3e7f1..af018331f0 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -92,20 +92,22 @@ def nextflow_config(self): * Process-level configuration syntax still using the old Nextflow syntax, for example: ``process.$fastqc`` instead of ``process withName:'fastqc'``. .. tip:: You can choose to ignore tests for the presence or absence of specific config variables - by creating a file called ``.nf-core-lint.yml`` in the root of your pipeline and creating + by creating a file called ``.nf-core.yml`` in the root of your pipeline and creating a list the config variables that should be ignored. For example: .. code-block:: yaml - nextflow_config: - - params.input + lint: + nextflow_config: + - params.input The other checks in this test (depreciated syntax etc) can not be individually identified, but you can skip the entire test block if you wish: .. code-block:: yaml - nextflow_config: False + lint: + nextflow_config: False """ passed = [] warned = [] diff --git a/nf_core/lint/readme.py b/nf_core/lint/readme.py index daf92f7932..ae5c542837 100644 --- a/nf_core/lint/readme.py +++ b/nf_core/lint/readme.py @@ -17,16 +17,12 @@ def readme(self): [![Nextflow](https://img.shields.io/badge/nextflow-%E2%89%A50.27.6-brightgreen.svg)](https://www.nextflow.io/) - * Bioconda badge + .. note:: This badge are a markdown image ``![alt-text]()`` *inside* a markdown link ``[markdown image]()``, so a bit fiddly to write. - * If your pipeline contains a file called ``environment.yml`` in the root directory, a bioconda badge is required - * Required badge code: + * Zenodo release - .. code-block:: md - - [![install with bioconda](https://img.shields.io/badge/install%20with-bioconda-brightgreen.svg)](https://bioconda.github.io/) + * If pipeline is released but still contains a 'zenodo.XXXXXXX' tag, the test fails - .. note:: These badges are a markdown image ``![alt-text]()`` *inside* a markdown link ``[markdown image]()``, so a bit fiddly to write. """ passed = [] warned = [] @@ -62,24 +58,16 @@ def readme(self): else: warned.append("README did not have a Nextflow minimum version badge.") - # Check that the minimum version mentioned in the quick start section is consistent - # Looking for: "1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`)" - nf_version_re = r"1\.\s*Install\s*\[`Nextflow`\]\(https://www.nextflow.io/docs/latest/getstarted.html#installation\)\s*\(`>=(\d*\.\d*\.\d*)`\)" - match = re.search(nf_version_re, content) - if match: - nf_quickstart_version = match.group(1) - try: - if nf_quickstart_version != self.minNextflowVersion: - raise AssertionError() - except (AssertionError, KeyError): - failed.append( - f"README Nextflow minimium version in Quick Start section does not match config. README: `{nf_quickstart_version}`, Config `{self.minNextflowVersion}`" + if "zenodo_doi" not in ignore_configs: + # Check that zenodo.XXXXXXX has been replaced with the zendo.DOI + zenodo_re = r"/zenodo\.X+" + match = re.search(zenodo_re, content) + if match: + warned.append( + "README contains the placeholder `zenodo.XXXXXXX`. " + "This should be replaced with the zenodo doi (after the first release)." ) else: - passed.append( - f"README Nextflow minimum version in Quick Start section matched config. README: `{nf_quickstart_version}`, Config: `{self.minNextflowVersion}`" - ) - else: - warned.append("README did not have a Nextflow minimum version mentioned in Quick Start section.") + passed.append("README Zenodo placeholder was replaced with DOI.") return {"passed": passed, "warned": warned, "failed": failed} diff --git a/nf_core/lint/system_exit.py b/nf_core/lint/system_exit.py new file mode 100644 index 0000000000..56a526d97b --- /dev/null +++ b/nf_core/lint/system_exit.py @@ -0,0 +1,37 @@ +import logging +from pathlib import Path + +log = logging.getLogger(__name__) + + +def system_exit(self): + """Check for System.exit calls in groovy/nextflow code + + Calls to System.exit(1) should be replaced by throwing errors + + This lint test looks for all calls to `System.exit` + in any file with the `.nf` or `.groovy` extension + """ + passed = [] + warned = [] + + root_dir = Path(self.wf_path) + + # Get all groovy and nf files + groovy_files = [f for f in root_dir.rglob("*.groovy")] + nf_files = [f for f in root_dir.rglob("*.nf")] + to_check = nf_files + groovy_files + + for file in to_check: + try: + with file.open() as fh: + for i, l in enumerate(fh.readlines(), start=1): + if "System.exit" in l and not "System.exit(0)" in l: + warned.append(f"`System.exit` in {file.name}: _{l.strip()}_ [line {i}]") + except FileNotFoundError: + log.debug(f"Could not open file {file.name} in system_exit lint test") + + if len(warned) == 0: + passed.append("No `System.exit` calls found") + + return {"passed": passed, "warned": warned} diff --git a/nf_core/lint/template_strings.py b/nf_core/lint/template_strings.py index 436abe7b2b..fb1f0f32e5 100644 --- a/nf_core/lint/template_strings.py +++ b/nf_core/lint/template_strings.py @@ -24,7 +24,6 @@ def template_strings(self): # Loop through files, searching for string num_matches = 0 for fn in self.files: - # Skip binary files binary_ftypes = ["image", "application/java-archive"] (ftype, encoding) = mimetypes.guess_type(fn) diff --git a/nf_core/list.py b/nf_core/list.py index 53307ac9bd..77a9ac3919 100644 --- a/nf_core/list.py +++ b/nf_core/list.py @@ -328,7 +328,6 @@ def get_local_nf_workflow_details(self): """Get full details about a local cached workflow""" if self.local_path is None: - # Try to guess the local cache directory if len(os.environ.get("NXF_ASSETS", "")) > 0: nf_wfdir = os.path.join(os.environ.get("NXF_ASSETS"), self.full_name) diff --git a/nf_core/module-template/modules/main.nf b/nf_core/module-template/modules/main.nf index e8f043f083..83cdf90b92 100644 --- a/nf_core/module-template/modules/main.nf +++ b/nf_core/module-template/modules/main.nf @@ -1,3 +1,4 @@ +{%- if not_empty_template -%} // TODO nf-core: If in doubt look at other nf-core/modules to see how we are doing things! :) // https://github.com/nf-core/modules/tree/master/modules/nf-core/ // You can also ask for help via your pull request or on the #modules channel on the nf-core Slack workspace: @@ -14,21 +15,25 @@ // bwa mem | samtools view -B -T ref.fasta // TODO nf-core: Optional inputs are not currently supported by Nextflow. However, using an empty // list (`[]`) instead of a file can be used to work around this issue. +{%- endif %} -process {{ tool_name_underscore|upper }} { +process {{ component_name_underscore|upper }} { tag {{ '"$meta.id"' if has_meta else "'$bam'" }} label '{{ process_label }}' + {% if not_empty_template -%} // TODO nf-core: List required Conda package(s). // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. + {% endif -%} conda "{{ bioconda if bioconda else 'YOUR-TOOL-HERE' }}" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? '{{ singularity_container if singularity_container else 'https://depot.galaxyproject.org/singularity/YOUR-TOOL-HERE' }}': - '{{ docker_container if docker_container else 'quay.io/biocontainers/YOUR-TOOL-HERE' }}' }" + '{{ docker_container if docker_container else 'biocontainers/YOUR-TOOL-HERE' }}' }" input: + {% if not_empty_template -%} // TODO nf-core: Where applicable all sample-specific information e.g. "id", "single_end", "read_group" // MUST be provided as an input via a Groovy Map called "meta". // This information may not be required in some instances e.g. indexing reference genome files: @@ -36,11 +41,20 @@ process {{ tool_name_underscore|upper }} { // TODO nf-core: Where applicable please provide/convert compressed files as input/output // e.g. "*.fastq.gz" and NOT "*.fastq", "*.bam" and NOT "*.sam" etc. {{ 'tuple val(meta), path(bam)' if has_meta else 'path bam' }} + {%- else -%} + {{ 'tuple val(meta), path(input)' if has_meta else 'path input' }} + {%- endif %} output: + {% if not_empty_template -%} // TODO nf-core: Named file extensions MUST be emitted for ALL output channels {{ 'tuple val(meta), path("*.bam")' if has_meta else 'path "*.bam"' }}, emit: bam + {%- else -%} + {{ 'tuple val(meta), path("*")' if has_meta else 'path "*"' }}, emit: output + {%- endif %} + {% if not_empty_template -%} // TODO nf-core: List additional required output channels/values here + {%- endif %} path "versions.yml" , emit: versions when: @@ -51,6 +65,7 @@ process {{ tool_name_underscore|upper }} { {% if has_meta -%} def prefix = task.ext.prefix ?: "${meta.id}" {%- endif %} + {% if not_empty_template -%} // TODO nf-core: Where possible, a command MUST be provided to obtain the version number of the software e.g. 1.10 // If the software is unable to output a version number on the command-line then it can be manually specified // e.g. https://github.com/nf-core/modules/blob/master/modules/nf-core/homer/annotatepeaks/main.nf @@ -60,7 +75,9 @@ process {{ tool_name_underscore|upper }} { // using the Nextflow "task" variable e.g. "--threads $task.cpus" // TODO nf-core: Please replace the example samtools command below with your module's command // TODO nf-core: Please indent the command appropriately (4 spaces!!) to help with readability ;) + {%- endif %} """ + {% if not_empty_template -%} samtools \\ sort \\ $args \\ @@ -70,6 +87,7 @@ process {{ tool_name_underscore|upper }} { -T $prefix \\ {%- endif %} $bam + {%- endif %} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/nf_core/module-template/modules/meta.yml b/nf_core/module-template/modules/meta.yml index c95e3e1d84..2c8197dcba 100644 --- a/nf_core/module-template/modules/meta.yml +++ b/nf_core/module-template/modules/meta.yml @@ -1,11 +1,19 @@ -name: "{{ tool_name_underscore }}" +--- +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json +name: "{{ component_name_underscore }}" +{% if not_empty_template -%} ## TODO nf-core: Add a description of the module and list keywords +{% endif -%} description: write your description here keywords: - sort + - example + - genomics tools: - - "{{ tool }}": + - "{{ component }}": + {% if not_empty_template -%} ## TODO nf-core: Add a description and other details for the software below + {% endif -%} description: "{{ tool_description }}" homepage: "{{ tool_doc_url }}" documentation: "{{ tool_doc_url }}" @@ -13,7 +21,9 @@ tools: doi: "" licence: "{{ tool_licence }}" +{% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as input +{% endif -%} input: #{% if has_meta %} Only when we have meta - meta: @@ -21,14 +31,18 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - #{% endif %} + {% endif %} + {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example input - - bam: + {%- endif %} + - {{ 'bam:' if not_empty_template else "input:" }} type: file - description: BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} +{% if not_empty_template -%} ## TODO nf-core: Add a description of all of the variables used as output +{% endif -%} output: #{% if has_meta -%} Only when we have meta - meta: @@ -36,16 +50,18 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - #{% endif %} + {% endif %} - versions: type: file description: File containing software versions pattern: "versions.yml" + {% if not_empty_template -%} ## TODO nf-core: Delete / customise this example output - - bam: + {%- endif %} + - {{ 'bam:' if not_empty_template else "output:" }} type: file - description: Sorted BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" + description: {{ 'Sorted BAM/CRAM/SAM file' if not_empty_template else "" }} + pattern: {{ '"*.{bam,cram,sam}"' if not_empty_template else "" }} authors: - "{{ author }}" diff --git a/nf_core/module-template/tests/main.nf b/nf_core/module-template/tests/main.nf index 351de72385..fcb7195fe4 100644 --- a/nf_core/module-template/tests/main.nf +++ b/nf_core/module-template/tests/main.nf @@ -2,9 +2,9 @@ nextflow.enable.dsl = 2 -include { {{ tool_name_underscore|upper }} } from '../../../../{{ "../" if subtool else "" }}modules/{{ org }}/{{ tool_dir }}/main.nf' +include { {{ component_name_underscore|upper }} } from '../../../../{{ "../" if subtool else "" }}modules/{{ org }}/{{ component_dir }}/main.nf' -workflow test_{{ tool_name_underscore }} { +workflow test_{{ component_name_underscore }} { {% if has_meta %} input = [ [ id:'test', single_end:false ], // meta map @@ -14,5 +14,5 @@ workflow test_{{ tool_name_underscore }} { input = file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) {%- endif %} - {{ tool_name_underscore|upper }} ( input ) + {{ component_name_underscore|upper }} ( input ) } diff --git a/nf_core/module-template/tests/test.yml b/nf_core/module-template/tests/test.yml index a65b127c97..4f38dec298 100644 --- a/nf_core/module-template/tests/test.yml +++ b/nf_core/module-template/tests/test.yml @@ -1,12 +1,18 @@ +{%- if not_empty_template -%} ## TODO nf-core: Please run the following command to build this file: # nf-core modules create-test-yml {{ tool }}{%- if subtool %}/{{ subtool }}{%- endif %} -- name: "{{ tool }}{{ ' '+subtool if subtool else '' }}" - command: nextflow run ./tests/modules/{{ org }}/{{ tool_dir }} -entry test_{{ tool_name_underscore }} -c ./tests/config/nextflow.config -c ./tests/modules/{{ org }}/{{ tool_dir }}/nextflow.config +{% endif -%} +- name: "{{ component }}{{ ' '+subtool if subtool else '' }}" + command: nextflow run ./tests/modules/{{ org }}/{{ component_dir }} -entry test_{{ component_name_underscore }} -c ./tests/config/nextflow.config -c ./tests/modules/{{ org }}/{{ component_dir }}/nextflow.config tags: - - "{{ tool }}{% if subtool -%}" - - "{{ tool }}/{{ subtool }}{%- endif %}" + - "{{ component }}{% if subtool -%}" + - "{{ component }}/{{ subtool }}{%- endif %}" files: - - path: "output/{{ tool }}/test.bam" + {% if not_empty_template -%} + - path: "output/{{ component }}/test.bam" md5sum: e667c7caad0bc4b7ac383fd023c654fc - - path: "output/{{ tool }}/versions.yml" + - path: "output/{{ component }}/versions.yml" md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b + {% else -%} + - path: "" + {%- endif %} diff --git a/nf_core/modules/__init__.py b/nf_core/modules/__init__.py index 060b39124b..47af637d02 100644 --- a/nf_core/modules/__init__.py +++ b/nf_core/modules/__init__.py @@ -8,7 +8,6 @@ from .modules_repo import ModulesRepo from .modules_test import ModulesTest from .modules_utils import ModuleException -from .mulled import MulledImageNameGenerator from .patch import ModulePatch from .remove import ModuleRemove from .test_yml_builder import ModulesTestYmlBuilder diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index 7b9702622e..c925e497fa 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -172,7 +172,7 @@ def bump_module_version(self, module): patterns = [ (bioconda_packages[0], f"'bioconda::{bioconda_tool_name}={last_ver}'"), - (rf"quay.io/biocontainers/{bioconda_tool_name}:[^'\"\s]+", docker_img), + (rf"biocontainers/{bioconda_tool_name}:[^'\"\s]+", docker_img), ( rf"https://depot.galaxyproject.org/singularity/{bioconda_tool_name}:[^'\"\s]+", singularity_img, @@ -187,7 +187,6 @@ def bump_module_version(self, module): found_match = False newcontent = [] for line in content.splitlines(): - # Match the pattern matches_pattern = re.findall(rf"^.*{pattern[0]}.*$", line) if matches_pattern: diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index 019a77c71f..b5368130ce 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -1,242 +1,32 @@ -""" -The ModuleCreate class handles generating of module templates -""" - -from __future__ import print_function - import logging -import os -import questionary -import rich -import yaml -from packaging.version import parse as parse_version - -import nf_core -import nf_core.components.components_create -import nf_core.utils -from nf_core.components.components_command import ComponentCommand +from nf_core.components.create import ComponentCreate log = logging.getLogger(__name__) -class ModuleCreate(ComponentCommand): +class ModuleCreate(ComponentCreate): def __init__( self, - directory=".", - tool="", + pipeline_dir, + component="", author=None, process_label=None, has_meta=None, force=False, conda_name=None, conda_version=None, + empty_template=False, ): - super().__init__("modules", directory) - self.directory = directory - self.tool = tool - self.author = author - self.process_label = process_label - self.has_meta = has_meta - self.force_overwrite = force - self.subtool = None - self.tool_conda_name = conda_name - self.tool_conda_version = conda_version - self.tool_licence = None - self.tool_licence = "" - self.tool_description = "" - self.tool_doc_url = "" - self.tool_dev_url = "" - self.bioconda = None - self.singularity_container = None - self.docker_container = None - self.file_paths = {} - - def create(self): - """ - Create a new DSL2 module from the nf-core template. - - Tool should be named just or - e.g fastqc or samtools/sort, respectively. - - If is a pipeline, this function creates a file called: - '/modules/local/tool.nf' - OR - '/modules/local/tool_subtool.nf' - - If is a clone of nf-core/modules, it creates or modifies the following files: - - modules/modules/nf-core/tool/subtool/ - * main.nf - * meta.yml - modules/tests/modules/nf-core/tool/subtool/ - * main.nf - * test.yml - * nextflow.config - tests/config/pytest_modules.yml - - The function will attempt to automatically find a Bioconda package called - and matching Docker / Singularity images from BioContainers. - """ - - # Check modules directory structure - self.check_modules_structure() - - log.info(f"Repository type: [blue]{self.repo_type}") - if self.directory != ".": - log.info(f"Base directory: '{self.directory}'") - - log.info( - "[yellow]Press enter to use default values [cyan bold](shown in brackets)[/] [yellow]or type your own responses. " - "ctrl+click [link=https://youtu.be/dQw4w9WgXcQ]underlined text[/link] to open links." + super().__init__( + "modules", + pipeline_dir, + component, + author, + process_label, + has_meta, + force, + conda_name, + conda_version, + empty_template, ) - - # Collect module info via prompt if empty or invalid - self.tool, self.subtool = nf_core.components.components_create.collect_name_prompt( - self.tool, self.component_type - ) - - # Determine the tool name - self.tool_name = self.tool - self.tool_dir = self.tool - - if self.subtool: - self.tool_name = f"{self.tool}/{self.subtool}" - self.tool_dir = os.path.join(self.tool, self.subtool) - - self.tool_name_underscore = self.tool_name.replace("/", "_") - - # Check existence of directories early for fast-fail - self.file_paths = nf_core.components.components_create.get_component_dirs( - self.component_type, - self.repo_type, - self.directory, - self.org, - self.tool_name, - self.tool, - self.subtool, - self.tool_dir, - self.force_overwrite, - ) - - # Try to find a bioconda package for 'tool' - self._get_bioconda_tool() - - # Prompt for GitHub username - self.author = nf_core.components.components_create.get_username(self.author) - - self._get_module_structure_components() - - # Create module template with jinja2 - nf_core.components.components_create.render_template(self.component_type, vars(self), self.file_paths) - - if self.repo_type == "modules": - # Add entry to pytest_modules.yml - try: - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "r") as fh: - pytest_modules_yml = yaml.safe_load(fh) - if self.subtool: - pytest_modules_yml[self.tool_name] = [ - f"modules/{self.org}/{self.tool}/{self.subtool}/**", - f"tests/modules/{self.org}/{self.tool}/{self.subtool}/**", - ] - else: - pytest_modules_yml[self.tool_name] = [ - f"modules/{self.org}/{self.tool}/**", - f"tests/modules/{self.org}/{self.tool}/**", - ] - pytest_modules_yml = dict(sorted(pytest_modules_yml.items())) - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "w") as fh: - yaml.dump(pytest_modules_yml, fh, sort_keys=True, Dumper=nf_core.utils.custom_yaml_dumper()) - except FileNotFoundError: - raise UserWarning("Could not open 'tests/config/pytest_modules.yml' file!") - - new_files = list(self.file_paths.values()) - if self.repo_type == "modules": - new_files.append(os.path.join(self.directory, "tests", "config", "pytest_modules.yml")) - log.info("Created / edited following files:\n " + "\n ".join(new_files)) - - def _get_bioconda_tool(self): - """ - Try to find a bioconda package for 'tool' - """ - while True: - try: - if self.tool_conda_name: - anaconda_response = nf_core.utils.anaconda_package(self.tool_conda_name, ["bioconda"]) - else: - anaconda_response = nf_core.utils.anaconda_package(self.tool, ["bioconda"]) - - if not self.tool_conda_version: - version = anaconda_response.get("latest_version") - if not version: - version = str(max([parse_version(v) for v in anaconda_response["versions"]])) - else: - version = self.tool_conda_version - - self.tool_licence = nf_core.utils.parse_anaconda_licence(anaconda_response, version) - self.tool_description = anaconda_response.get("summary", "") - self.tool_doc_url = anaconda_response.get("doc_url", "") - self.tool_dev_url = anaconda_response.get("dev_url", "") - if self.tool_conda_name: - self.bioconda = "bioconda::" + self.tool_conda_name + "=" + version - else: - self.bioconda = "bioconda::" + self.tool + "=" + version - log.info(f"Using Bioconda package: '{self.bioconda}'") - break - except (ValueError, LookupError) as e: - log.warning( - f"Could not find Conda dependency using the Anaconda API: '{self.tool_conda_name if self.tool_conda_name else self.tool}'" - ) - if rich.prompt.Confirm.ask("[violet]Do you want to enter a different Bioconda package name?"): - self.tool_conda_name = rich.prompt.Prompt.ask("[violet]Name of Bioconda package").strip() - continue - else: - log.warning( - f"{e}\nBuilding module without tool software and meta, you will need to enter this information manually." - ) - break - - # Try to get the container tag (only if bioconda package was found) - if self.bioconda: - try: - if self.tool_conda_name: - self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( - self.tool_conda_name, version - ) - else: - self.docker_container, self.singularity_container = nf_core.utils.get_biocontainer_tag( - self.tool, version - ) - log.info(f"Using Docker container: '{self.docker_container}'") - log.info(f"Using Singularity container: '{self.singularity_container}'") - except (ValueError, LookupError) as e: - log.info(f"Could not find a Docker/Singularity container ({e})") - - def _get_module_structure_components(self): - process_label_defaults = ["process_single", "process_low", "process_medium", "process_high", "process_long"] - if self.process_label is None: - log.info( - "Provide an appropriate resource label for the process, taken from the " - "[link=https://github.com/nf-core/tools/blob/master/nf_core/pipeline-template/conf/base.config#L29]nf-core pipeline template[/link].\n" - "For example: {}".format(", ".join(process_label_defaults)) - ) - while self.process_label is None: - self.process_label = questionary.autocomplete( - "Process resource label:", - choices=process_label_defaults, - style=nf_core.utils.nfcore_question_style, - default="process_single", - ).unsafe_ask() - - if self.has_meta is None: - log.info( - "Where applicable all sample-specific information e.g. 'id', 'single_end', 'read_group' " - "MUST be provided as an input via a Groovy Map called 'meta'. " - "This information may [italic]not[/] be required in some instances, for example " - "[link=https://github.com/nf-core/modules/blob/master/modules/nf-core/bwa/index/main.nf]indexing reference genome files[/link]." - ) - while self.has_meta is None: - self.has_meta = rich.prompt.Confirm.ask( - "[violet]Will the module require a meta map of sample information?", default=True - ) diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index 0f4ece6c49..24d673b1c2 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -91,6 +91,8 @@ def __init__( modules_json.check_up_to_date() self.all_remote_modules = [] for repo_url, components in modules_json.get_all_components(self.component_type).items(): + if remote_url is not None and remote_url != repo_url: + continue for org, comp in components: self.all_remote_modules.append( NFCoreModule( diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index d44fe90f1e..bacc7d7fbd 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -235,31 +235,31 @@ def check_process_section(self, lines, fix_version, progress_bar): self.failed.append(("process_capitals", "Process name is not in capital letters", self.main_nf)) # Check that process labels are correct - correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"] - process_label = [l for l in lines if l.lstrip().startswith("label")] - if len(process_label) > 0: - try: - process_label = re.search("process_[A-Za-z]+", process_label[0]).group(0) - except AttributeError: - process_label = re.search("'([A-Za-z_-]+)'", process_label[0]).group(0) - finally: - if not process_label in correct_process_labels: - self.warned.append( + check_process_labels(self, lines) + + # Deprecated enable_conda + for i, l in enumerate(lines): + url = None + l = l.strip(" '\"") + if _container_type(l) == "conda": + bioconda_packages = [b for b in l.split() if "bioconda::" in b] + match = re.search(r"params\.enable_conda", l) + if match is None: + self.passed.append( ( - "process_standard_label", - f"Process label ({process_label}) is not among standard labels: `{'`,`'.join(correct_process_labels)}`", + "deprecated_enable_conda", + f"Deprecated parameter 'params.enable_conda' correctly not found in the conda definition", self.main_nf, ) ) else: - self.passed.append(("process_standard_label", "Correct process label", self.main_nf)) - else: - self.warned.append(("process_standard_label", "Process label unspecified", self.main_nf)) - for i, l in enumerate(lines): - url = None - if _container_type(l) == "bioconda": - bioconda_packages = [b for b in l.split() if "bioconda::" in b] - l = l.strip(" '\"") + self.failed.append( + ( + "deprecated_enable_conda", + f"Found deprecated parameter 'params.enable_conda' in the conda definition", + self.main_nf, + ) + ) if _container_type(l) == "singularity": # e.g. "https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' :" -> v1.2.0_cv1 # e.g. "https://depot.galaxyproject.org/singularity/fastqc:0.11.9--0' :" -> 0.11.9--0 @@ -290,6 +290,9 @@ def check_process_section(self, lines, fix_version, progress_bar): else: self.failed.append(("docker_tag", "Unable to parse docker tag", self.main_nf)) docker_tag = None + if l.startswith("biocontainers/"): + # When we think it is a biocontainer, assume we are querying quay.io/biocontainers and insert quay.io as prefix + l = "quay.io/" + l url = urlparse(l.split("'")[0]) # lint double quotes if l.count('"') > 2: @@ -392,6 +395,56 @@ def check_process_section(self, lines, fix_version, progress_bar): return docker_tag == singularity_tag +def check_process_labels(self, lines): + correct_process_labels = ["process_single", "process_low", "process_medium", "process_high", "process_long"] + all_labels = [l.strip() for l in lines if l.lstrip().startswith("label ")] + bad_labels = [] + good_labels = [] + if len(all_labels) > 0: + for label in all_labels: + try: + label = re.match(r"^label\s+'?([a-zA-Z0-9_-]+)'?$", label).group(1) + except AttributeError: + self.warned.append( + ( + "process_standard_label", + f"Specified label appears to contain non-alphanumerics: {label}", + self.main_nf, + ) + ) + continue + if label not in correct_process_labels: + bad_labels.append(label) + else: + good_labels.append(label) + if len(good_labels) > 1: + self.warned.append( + ( + "process_standard_label", + f"Conflicting process labels found: `{'`,`'.join(good_labels)}`", + self.main_nf, + ) + ) + elif len(good_labels) == 1: + self.passed.append(("process_standard_label", "Correct process label", self.main_nf)) + else: + self.warned.append(("process_standard_label", "Standard process label not found", self.main_nf)) + if len(bad_labels) > 0: + self.warned.append( + ("process_standard_label", f"Non-standard labels found: `{'`,`'.join(bad_labels)}`", self.main_nf) + ) + if len(all_labels) > len(set(all_labels)): + self.warned.append( + ( + "process_standard_label", + f"Duplicate labels found: `{'`,`'.join(sorted(all_labels))}`", + self.main_nf, + ) + ) + else: + self.warned.append(("process_standard_label", "Process label not specified", self.main_nf)) + + def _parse_input(self, line_raw): """ Return list of input channel names from an input line. @@ -471,7 +524,7 @@ def _fix_module_version(self, current_version, latest_version, singularity_tag, for line in lines: l = line.strip(" '\"") build_type = _container_type(l) - if build_type == "bioconda": + if build_type == "conda": new_lines.append(re.sub(rf"{current_version}", f"{latest_version}", line)) elif build_type in ("singularity", "docker"): # Check that the new url is valid @@ -516,8 +569,8 @@ def _get_build(response): def _container_type(line): """Returns the container type of a build.""" - if re.search("bioconda::", line): - return "bioconda" + if line.startswith("conda"): + return "conda" if line.startswith("https://containers") or line.startswith("https://depot"): # Look for a http download URL. # Thanks Stack Overflow for the regex: https://stackoverflow.com/a/3809435/713980 @@ -528,5 +581,9 @@ def _container_type(line): if url_match: return "singularity" return None - if line.startswith("biocontainers/") or line.startswith("quay.io/"): + if ( + line.startswith("biocontainers/") + or line.startswith("quay.io/") + or (line.count("/") == 1 and line.count(":") == 1) + ): return "docker" diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index d6ec296999..dd5e954f25 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -1,5 +1,7 @@ +import json from pathlib import Path +import jsonschema.validators import yaml from nf_core.modules.modules_differ import ModulesDiffer @@ -10,17 +12,15 @@ def meta_yml(module_lint_object, module): Lint a ``meta.yml`` file The lint test checks that the module has - a ``meta.yml`` file and that it contains - the required keys: ``name``, input`` and - ``output``. + a ``meta.yml`` file and that it follows the + JSON schema defined in the ``modules/yaml-schema.json`` + file in the nf-core/modules repository. In addition it checks that the module name and module input is consistent between the ``meta.yml`` and the ``main.nf``. """ - required_keys = ["name", "output"] - required_keys_lists = ["input", "output"] # Check if we have a patch file, get original file in that case meta_yaml = None if module.is_patched: @@ -42,21 +42,31 @@ def meta_yml(module_lint_object, module): module.failed.append(("meta_yml_exists", "Module `meta.yml` does not exist", module.meta_yml)) return - # Confirm that all required keys are given - contains_required_keys = True - all_list_children = True - for rk in required_keys: - if rk not in meta_yaml.keys(): - module.failed.append(("meta_required_keys", f"`{rk}` not specified in YAML", module.meta_yml)) - contains_required_keys = False - elif rk in meta_yaml.keys() and not isinstance(meta_yaml[rk], list) and rk in required_keys_lists: - module.failed.append(("meta_required_keys", f"`{rk}` is not a list", module.meta_yml)) - all_list_children = False - if contains_required_keys: - module.passed.append(("meta_required_keys", "`meta.yml` contains all required keys", module.meta_yml)) + # Confirm that the meta.yml file is valid according to the JSON schema + valid_meta_yml = True + try: + with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/yaml-schema.json"), "r") as fh: + schema = json.load(fh) + jsonschema.validators.validate(instance=meta_yaml, schema=schema) + module.passed.append(("meta_yml_valid", "Module `meta.yml` is valid", module.meta_yml)) + except jsonschema.exceptions.ValidationError as e: + valid_meta_yml = False + hint = "" + if len(e.path) > 0: + hint = f"\nCheck the entry for `{e.path[0]}`." + if e.message.startswith("None is not of type 'object'") and len(e.path) > 2: + hint = f"\nCheck that the child entries of {e.path[0]+'.'+e.path[2]} are indented correctly." + module.failed.append( + ( + "meta_yml_valid", + f"The `meta.yml` of the module {module.module_name} is not valid: {e.message}.{hint}", + module.meta_yml, + ) + ) + return # Confirm that all input and output channels are specified - if contains_required_keys and all_list_children: + if valid_meta_yml: if "input" in meta_yaml: meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] for input in module.inputs: diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 2d95afd10c..e9e77f65c7 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -419,16 +419,16 @@ def unsynced_components(self): def parse_dirs(self, dirs, missing_installation, component_type): untracked_dirs = [] - for dir in dirs: + for dir_ in dirs: # Check if the module/subworkflows directory exists in modules.json - install_dir = dir.parts[0] - component = str(Path(*dir.parts[1:])) + install_dir = dir_.parts[0] + component = str(Path(*dir_.parts[1:])) component_in_file = False git_url = None for repo in missing_installation: if component_type in missing_installation[repo]: - for dir_name in missing_installation[repo][component_type]: - if component in missing_installation[repo][component_type][dir_name]: + if install_dir in missing_installation[repo][component_type]: + if component in missing_installation[repo][component_type][install_dir]: component_in_file = True git_url = repo break @@ -494,7 +494,7 @@ def reinstall_repo(self, install_dir, remote_url, module_entries): Args: install_dir (str): The name of directory where modules are installed remote_url (str): The git url of the remote repository - modules ([ dict[str, dict[str, str]] ]): Module entries with + module_entries ([ dict[str, dict[str, str]] ]): Module entries with branch and git sha info Returns: @@ -745,6 +745,16 @@ def add_patch_entry(self, module_name, repo_url, install_dir, patch_filename, wr if write_file: self.dump() + def remove_patch_entry(self, module_name, repo_url, install_dir, write_file=True): + if self.modules_json is None: + self.load() + try: + del self.modules_json["repos"][repo_url]["modules"][install_dir][module_name]["patch"] + except KeyError: + log.warning("No patch entry in 'modules.json' to remove") + if write_file: + self.dump() + def get_patch_fn(self, module_name, repo_url, install_dir): """ Get the patch filename of a module diff --git a/nf_core/modules/modules_repo.py b/nf_core/modules/modules_repo.py index 606514e55e..5f77148867 100644 --- a/nf_core/modules/modules_repo.py +++ b/nf_core/modules/modules_repo.py @@ -302,7 +302,7 @@ def checkout(self, commit): """ self.repo.git.checkout(commit) - def component_exists(self, component_name, component_type, checkout=True): + def component_exists(self, component_name, component_type, checkout=True, commit=None): """ Check if a module/subworkflow exists in the branch of the repo @@ -312,7 +312,7 @@ def component_exists(self, component_name, component_type, checkout=True): Returns: (bool): Whether the module/subworkflow exists in this branch of the repository """ - return component_name in self.get_avail_components(component_type, checkout=checkout) + return component_name in self.get_avail_components(component_type, checkout=checkout, commit=commit) def get_component_dir(self, component_name, component_type): """ @@ -449,7 +449,7 @@ def get_commit_info(self, sha): return message, date raise LookupError(f"Commit '{sha}' not found in the '{self.remote_url}'") - def get_avail_components(self, component_type, checkout=True): + def get_avail_components(self, component_type, checkout=True, commit=None): """ Gets the names of the modules/subworkflows in the repository. They are detected by checking which directories have a 'main.nf' file @@ -459,6 +459,8 @@ def get_avail_components(self, component_type, checkout=True): """ if checkout: self.checkout_branch() + if commit is not None: + self.checkout(commit) # Get directory if component_type == "modules": directory = self.modules_dir diff --git a/nf_core/modules/mulled.py b/nf_core/modules/mulled.py deleted file mode 100644 index fc1d1a3555..0000000000 --- a/nf_core/modules/mulled.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Generate the name of a BioContainers mulled image version 2.""" - - -import logging -import re -from typing import Iterable, List, Tuple - -import requests -from galaxy.tool_util.deps.mulled.util import build_target, v2_image_name -from packaging.version import InvalidVersion, Version - -log = logging.getLogger(__name__) - - -class MulledImageNameGenerator: - """ - Define a service class for generating BioContainers version 2 mulled image names. - - Adapted from https://gist.github.com/natefoo/19cefeedd1942c30f9d88027a61b3f83. - - """ - - _split_pattern = re.compile(r"==?") - - @classmethod - def parse_targets(cls, specifications: Iterable[str]) -> List[Tuple[str, str]]: - """ - Parse tool, version pairs from specification strings. - - Args: - specifications: An iterable of strings that contain tools and their versions. - - """ - result = [] - for spec in specifications: - try: - tool, version = cls._split_pattern.split(spec, maxsplit=1) - except ValueError: - raise ValueError( - f"The specification {spec} does not have the expected format or ." - ) from None - try: - Version(version) - except InvalidVersion: - raise ValueError(f"Not a PEP440 version spec: '{version}' in '{spec}'") from None - result.append((tool.strip(), version.strip())) - return result - - @classmethod - def generate_image_name(cls, targets: Iterable[Tuple[str, str]], build_number: int = 0) -> str: - """ - Generate the name of a BioContainers mulled image version 2. - - Args: - targets: One or more tool, version pairs of the multi-tool container image. - build_number: The build number for this image. This is an incremental value that starts at zero. - - """ - return v2_image_name([build_target(name, version) for name, version in targets], image_build=str(build_number)) - - @classmethod - def image_exists(cls, image_name: str) -> bool: - """Check whether a given BioContainers image name exists via a call to the quay.io API.""" - quay_url = f"https://quay.io/biocontainers/{image_name}/" - response = requests.get(quay_url, allow_redirects=True) - log.debug(f"Got response code '{response.status_code}' for URL {quay_url}") - if response.status_code == 200: - log.info(f"Found [link={quay_url}]docker image[/link] on quay.io! :sparkles:") - return True - else: - log.error(f"Was not able to find [link={quay_url}]docker image[/link] on quay.io") - return False diff --git a/nf_core/modules/patch.py b/nf_core/modules/patch.py index fa51640c06..4890345052 100644 --- a/nf_core/modules/patch.py +++ b/nf_core/modules/patch.py @@ -130,3 +130,80 @@ def patch(self, module=None): # Finally move the created patch file to its final location shutil.move(patch_temp_path, patch_path) log.info(f"Patch file of '{module_fullname}' written to '{patch_path}'") + + def remove(self, module): + # Check modules directory structure + self.check_modules_structure() + + self.modules_json.check_up_to_date() + self.param_check(module) + modules = self.modules_json.get_all_components(self.component_type)[self.modules_repo.remote_url] + + if module is None: + choices = [ + module if directory == self.modules_repo.repo_path else f"{directory}/{module}" + for directory, module in modules + ] + module = questionary.autocomplete( + "Tool:", + choices, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + module_dir = [dir for dir, m in modules if m == module][0] + module_fullname = str(Path("modules", module_dir, module)) + + # Verify that the module has an entry in the modules.json file + if not self.modules_json.module_present(module, self.modules_repo.remote_url, module_dir): + raise UserWarning( + f"The '{module_fullname}' module does not have an entry in the 'modules.json' file. Cannot compute patch" + ) + + module_version = self.modules_json.get_module_version(module, self.modules_repo.remote_url, module_dir) + if module_version is None: + raise UserWarning( + f"The '{module_fullname}' module does not have a valid version in the 'modules.json' file. Cannot compute patch" + ) + # Get the module branch and reset it in the ModulesRepo object + module_branch = self.modules_json.get_component_branch( + self.component_type, module, self.modules_repo.remote_url, module_dir + ) + if module_branch != self.modules_repo.branch: + self.modules_repo.setup_branch(module_branch) + + # Set the diff filename based on the module name + patch_filename = f"{module.replace('/', '-')}.diff" + module_relpath = Path("modules", module_dir, module) + patch_relpath = Path(module_relpath, patch_filename) + patch_path = Path(self.dir, patch_relpath) + module_path = Path(self.dir, module_relpath) + + if patch_path.exists(): + remove = questionary.confirm( + f"Patch exists for module '{module_fullname}'. Are you sure you want to remove?", + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() + if not remove: + return + + # Try to apply the patch in reverse and move resulting files to module dir + temp_module_dir = self.modules_json.try_apply_patch_reverse( + module, self.modules_repo.repo_path, patch_relpath, module_path + ) + try: + for file in Path(temp_module_dir).glob("*"): + file.rename(module_path.joinpath(file.name)) + os.rmdir(temp_module_dir) + except Exception as err: + raise UserWarning(f"There was a problem reverting the patched file: {err}") + + log.info(f"Patch for {module} reverted!") + # Remove patch file if we could revert the patch + patch_path.unlink() + # Write changes to module.json + self.modules_json.remove_patch_entry(module, self.modules_repo.remote_url, module_dir) + + if not all(self.modules_repo.module_files_identical(module, module_path, module_version).values()): + log.error( + f"Module files do not appear to match the remote for the commit sha in the 'module.json': {module_version}\n" + f"Recommend reinstalling with 'nf-core modules install --force --sha {module_version} {module}' " + ) diff --git a/nf_core/modules/test_yml_builder.py b/nf_core/modules/test_yml_builder.py index c9a6273a58..7ab9e464d4 100644 --- a/nf_core/modules/test_yml_builder.py +++ b/nf_core/modules/test_yml_builder.py @@ -213,16 +213,9 @@ def check_if_empty_file(self, fname): g_f = gzip.GzipFile(fileobj=fh, mode="rb") if g_f.read() == b"": return True - except Exception as e: - # Python 3.8+ - if hasattr(gzip, "BadGzipFile"): - if isinstance(e, gzip.BadGzipFile): - pass - # Python 3.7 - elif isinstance(e, OSError): - pass - else: - raise e + except gzip.BadGzipFile: + pass + return False def _md5(self, fname): diff --git a/nf_core/pipeline-template/.editorconfig b/nf_core/pipeline-template/.editorconfig index b78de6e655..b6b3190776 100644 --- a/nf_core/pipeline-template/.editorconfig +++ b/nf_core/pipeline-template/.editorconfig @@ -8,7 +8,7 @@ trim_trailing_whitespace = true indent_size = 4 indent_style = space -[*.{md,yml,yaml,html,css,scss,js,cff}] +[*.{md,yml,yaml,html,css,scss,js}] indent_size = 2 # These files are edited and tested upstream in nf-core/modules diff --git a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml index 27866452f0..f93cd55d59 100644 --- a/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/nf_core/pipeline-template/.github/ISSUE_TEMPLATE/bug_report.yml @@ -45,6 +45,6 @@ body: * Nextflow version _(eg. 22.10.1)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ - * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ + * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_ * OS _(eg. CentOS Linux, macOS, Linux Mint)_ * Version of {{ name }} _(eg. 1.1, 1.5, 1.8.2)_ diff --git a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md index 3278a33b1e..0f81ebaa4c 100644 --- a/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md +++ b/nf_core/pipeline-template/.github/PULL_REQUEST_TEMPLATE.md @@ -16,8 +16,9 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/{{ name }}/t - [ ] This comment contains a description of changes (with reason). - [ ] If you've fixed a bug or added code that should be tested, add tests! - [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/{{ name }}/tree/master/.github/CONTRIBUTING.md) - {%- if branded -%} -- [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.{% endif %} + {%- if branded %} +- [ ] If necessary, also make a PR on the {{ name }} _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository. + {%- endif %} - [ ] Make sure your code lints (`nf-core lint`). - [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir `). - [ ] Usage Documentation in `docs/usage.md` is updated. diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 4b96fa80ee..4942167b12 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -14,7 +14,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Launch workflow via tower - uses: nf-core/tower-action@v3 + uses: seqeralabs/action-tower-launch@v1 # TODO nf-core: You can customise AWS full pipeline tests as required # Add full size test data (but still relatively small datasets for few samples) # on the `test_full.config` test runs with only one set of parameters {%- raw %} diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index 0f261fcb42..7f80cf1bb5 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -12,7 +12,7 @@ jobs: steps: # Launch workflow using Tower CLI tool action {%- raw %} - name: Launch workflow via tower - uses: nf-core/tower-action@v3 + uses: seqeralabs/action-tower-launch@v1 with: workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} diff --git a/nf_core/pipeline-template/.github/workflows/branch.yml b/nf_core/pipeline-template/.github/workflows/branch.yml index e22310ba6f..8edfa540c9 100644 --- a/nf_core/pipeline-template/.github/workflows/branch.yml +++ b/nf_core/pipeline-template/.github/workflows/branch.yml @@ -13,7 +13,7 @@ jobs: - name: Check PRs if: github.repository == '{{ name }}' run: | - { [[ {% raw %}${{github.event.pull_request.head.repo.full_name }}{% endraw %} == {{ name }} ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] + { [[ {% raw %}${{github.event.pull_request.head.repo.full_name }}{% endraw %} == {{ name }} ]] && [[ $GITHUB_HEAD_REF == "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]] # If the above check failed, post a comment on the PR explaining the failure {%- raw %} # NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets diff --git a/nf_core/pipeline-template/.github/workflows/clean-up.yml b/nf_core/pipeline-template/.github/workflows/clean-up.yml new file mode 100644 index 0000000000..427aad5087 --- /dev/null +++ b/nf_core/pipeline-template/.github/workflows/clean-up.yml @@ -0,0 +1,24 @@ +name: "Close user-tagged issues and PRs" +on: + schedule: + - cron: "0 0 * * 0" # Once a week + +jobs: + clean-up: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/stale@v7 + with: + stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days." + stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful." + close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity." + days-before-stale: 30 + days-before-close: 20 + days-before-pr-close: -1 + any-of-labels: "awaiting-changes,awaiting-feedback" + exempt-issue-labels: "WIP" + exempt-pr-labels: "WIP" + repo-token: "{% raw %}${{ secrets.GITHUB_TOKEN }}{% endraw %}" diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 46249e9a79..8cf670e808 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -78,7 +78,7 @@ jobs: - uses: actions/setup-python@v4 with: - python-version: "3.7" + python-version: "3.8" architecture: "x64" - name: Install dependencies diff --git a/nf_core/pipeline-template/.pre-commit-config.yaml b/nf_core/pipeline-template/.pre-commit-config.yaml new file mode 100644 index 0000000000..0c31cdb99f --- /dev/null +++ b/nf_core/pipeline-template/.pre-commit-config.yaml @@ -0,0 +1,5 @@ +repos: + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v2.7.1" + hooks: + - id: prettier diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 0845f6aca0..e2ca15a8e6 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -19,63 +19,76 @@ {% endif -%} {%- if branded -%}[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23{{ short_name }}-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/{{ short_name }}){% endif -%} {%- if branded -%}[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core){% endif -%} +{%- if branded -%}[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core){% endif -%} {%- if branded -%}[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core) {% endif -%} ## Introduction - +**{{ name }}** is a bioinformatics pipeline that ... -**{{ name }}** is a bioinformatics best-practice analysis pipeline for {{ description }}. - -The pipeline is built using [Nextflow](https://www.nextflow.io), a workflow tool to run tasks across multiple compute infrastructures in a very portable manner. It uses Docker/Singularity containers making installation trivial and results highly reproducible. The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. Where possible, these processes have been submitted to and installed from [nf-core/modules](https://github.com/nf-core/modules) in order to make them available to all nf-core pipelines, and to everyone within the Nextflow community! - - - -On release, automated continuous integration tests run the pipeline on a full-sized dataset on the AWS cloud infrastructure. This ensures that the pipeline runs on AWS, has sensible resource allocation defaults set to run on real-world datasets, and permits the persistent storage of results to benchmark between pipeline releases and other analysis sources. -{%- if branded -%} -The results obtained from the full-sized test can be viewed on the [nf-core website](https://nf-co.re/{{ short_name }}/results).{% endif %} - -## Pipeline summary + + 1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/)) 2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/)) -## Quick Start +## Usage -1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`) +> **Note** +> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how +> to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) +> with `-profile test` before running the workflow on actual data. -2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) (you can follow [this tutorial](https://singularity-tutorial.github.io/01-installation/)), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(you can use [`Conda`](https://conda.io/miniconda.html) both to install Nextflow itself and also to manage software within pipelines. Please only use it within pipelines as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_. + - +Now, you can run the pipeline using: - ```bash - nextflow run {{ name }} --input samplesheet.csv --outdir --genome GRCh37 -profile - ``` + + +```bash +nextflow run {{ name }} \ + -profile \ + --input samplesheet.csv \ + --outdir +``` + +> **Warning:** +> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those +> provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_; +> see [docs](https://nf-co.re/usage/configuration#custom-configuration-files). {% if branded -%} -## Documentation +For more details, please refer to the [usage documentation](https://nf-co.re/{{ short_name }}/usage) and the [parameter documentation](https://nf-co.re/{{ short_name }}/parameters). + +## Pipeline output -The {{ name }} pipeline comes with documentation about the pipeline [usage](https://nf-co.re/{{ short_name }}/usage), [parameters](https://nf-co.re/{{ short_name }}/parameters) and [output](https://nf-co.re/{{ short_name }}/output). +To see the the results of a test run with a full size dataset refer to the [results](https://nf-co.re/{{ short_name }}/results) tab on the nf-core website pipeline page. +For more details about the output files and reports, please refer to the +[output documentation](https://nf-co.re/{{ short_name }}/output). {% endif -%} diff --git a/nf_core/pipeline-template/bin/check_samplesheet.py b/nf_core/pipeline-template/bin/check_samplesheet.py index 11b155723a..4a758fe003 100755 --- a/nf_core/pipeline-template/bin/check_samplesheet.py +++ b/nf_core/pipeline-template/bin/check_samplesheet.py @@ -158,9 +158,6 @@ def sniff_format(handle): peek = read_head(handle) handle.seek(0) sniffer = csv.Sniffer() - if not sniffer.has_header(peek): - logger.critical("The given sample sheet does not appear to contain a header.") - sys.exit(1) dialect = sniffer.sniff(peek) return dialect diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index c5c691057d..f73c5afaa4 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -15,7 +15,7 @@ process { memory = { check_max( 6.GB * task.attempt, 'memory' ) } time = { check_max( 4.h * task.attempt, 'time' ) } - errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' } + errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 maxErrors = '-1' diff --git a/nf_core/pipeline-template/conf/igenomes.config b/nf_core/pipeline-template/conf/igenomes.config index 7a1b3ac6d3..3f11437759 100644 --- a/nf_core/pipeline-template/conf/igenomes.config +++ b/nf_core/pipeline-template/conf/igenomes.config @@ -36,6 +36,14 @@ params { macs_gsize = "2.7e9" blacklist = "${projectDir}/assets/blacklists/hg38-blacklist.bed" } + 'CHM13' { + fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/CHM13/Sequence/WholeGenomeFasta/genome.fa" + bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/CHM13/Sequence/BWAIndex/" + bwamem2 = "${params.igenomes_base}/Homo_sapiens/UCSC/CHM13/Sequence/BWAmem2Index/" + gtf = "${params.igenomes_base}/Homo_sapiens/NCBI/CHM13/Annotation/Genes/genes.gtf" + gff = "ftp://ftp.ncbi.nlm.nih.gov/genomes/all/GCF/009/914/755/GCF_009914755.1_T2T-CHM13v2.0/GCF_009914755.1_T2T-CHM13v2.0_genomic.gff.gz" + mito_name = "chrM" + } 'GRCm38' { fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa" bwa = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/BWAIndex/version0.6.0/" diff --git a/nf_core/pipeline-template/conf/test_full.config b/nf_core/pipeline-template/conf/test_full.config index d92692fa94..46b165a910 100644 --- a/nf_core/pipeline-template/conf/test_full.config +++ b/nf_core/pipeline-template/conf/test_full.config @@ -10,6 +10,8 @@ ---------------------------------------------------------------------------------------- */ +cleanup = true + params { config_profile_name = 'Full test profile' config_profile_description = 'Full test dataset to check pipeline function' diff --git a/nf_core/pipeline-template/docs/usage.md b/nf_core/pipeline-template/docs/usage.md index 9a171f5aad..73e1132541 100644 --- a/nf_core/pipeline-template/docs/usage.md +++ b/nf_core/pipeline-template/docs/usage.md @@ -75,6 +75,29 @@ work # Directory containing the nextflow working files # Other nextflow hidden files, eg. history of pipeline runs and old logs. ``` +If you wish to repeatedly use the same parameters for multiple runs, rather than specifying each flag in the command, you can specify these in a params file. + +Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `. + +> ⚠️ Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args). +> The above pipeline run specified with a params file in yaml format: + +```bash +nextflow run {{ name }} -profile docker -params-file params.yaml +``` + +with `params.yaml` containing: + +```yaml +input: './samplesheet.csv' +outdir: './results/' +genome: 'GRCh37' +input: 'data' +<...> +``` + +You can also generate such `YAML`/`JSON` files via [nf-core/launch](https://nf-co.re/launch). + ### Updating the pipeline When you run the above command, Nextflow automatically pulls the pipeline code from GitHub and stores it as a cached version. When running the pipeline after this, it will always use the cached version if available - even if the pipeline has been updated since. To make sure that you're running the latest version of the pipeline, make sure that you regularly update the cached version of the pipeline: @@ -91,6 +114,10 @@ First, go to the [{{ name }} releases page](https://github.com/{{ name }}/releas This version number will be logged in reports when you run the pipeline, so that you'll know what you used when you look back in the future. For example, at the bottom of the MultiQC reports. +To further assist in reproducbility, you can use share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter. + +> 💡 If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles. + ## Core Nextflow arguments > **NB:** These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen). @@ -99,7 +126,7 @@ This version number will be logged in reports when you run the pipeline, so that Use this parameter to choose a configuration profile. Profiles can give configuration presets for different compute environments. -Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Conda) - see below. +Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below. > We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported. @@ -126,8 +153,10 @@ If `-profile` is not specified, the pipeline will run locally and expect all sof - A generic configuration profile to be used with [Shifter](https://nersc.gitlab.io/development/shifter/how-to-use/) - `charliecloud` - A generic configuration profile to be used with [Charliecloud](https://hpc.github.io/charliecloud/) +- `apptainer` + - A generic configuration profile to be used with [Apptainer](https://apptainer.org/) - `conda` - - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter or Charliecloud. + - A generic configuration profile to be used with [Conda](https://conda.io/docs/). Please only use Conda as a last resort i.e. when it's not possible to run the pipeline with Docker, Singularity, Podman, Shifter, Charliecloud, or Apptainer. ### `-resume` @@ -145,104 +174,20 @@ Specify the path to a specific config file (this is a core Nextflow command). Se Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped. -For example, if the nf-core/rnaseq pipeline is failing after multiple re-submissions of the `STAR_ALIGN` process due to an exit code of `137` this would indicate that there is an out of memory issue: - -```console -[62/149eb0] NOTE: Process `NFCORE_RNASEQ:RNASEQ:ALIGN_STAR:STAR_ALIGN (WT_REP1)` terminated with an error exit status (137) -- Execution is retried (1) -Error executing process > 'NFCORE_RNASEQ:RNASEQ:ALIGN_STAR:STAR_ALIGN (WT_REP1)' - -Caused by: - Process `NFCORE_RNASEQ:RNASEQ:ALIGN_STAR:STAR_ALIGN (WT_REP1)` terminated with an error exit status (137) - -Command executed: - STAR \ - --genomeDir star \ - --readFilesIn WT_REP1_trimmed.fq.gz \ - --runThreadN 2 \ - --outFileNamePrefix WT_REP1. \ - - -Command exit status: - 137 - -Command output: - (empty) - -Command error: - .command.sh: line 9: 30 Killed STAR --genomeDir star --readFilesIn WT_REP1_trimmed.fq.gz --runThreadN 2 --outFileNamePrefix WT_REP1. -Work dir: - /home/pipelinetest/work/9d/172ca5881234073e8d76f2a19c88fb - -Tip: you can replicate the issue by changing to the process work dir and entering the command `bash .command.run` -``` - -#### For beginners - -A first step to bypass this error, you could try to increase the amount of CPUs, memory, and time for the whole pipeline. Therefor you can try to increase the resource for the parameters `--max_cpus`, `--max_memory`, and `--max_time`. Based on the error above, you have to increase the amount of memory. Therefore you can go to the [parameter documentation of rnaseq](https://nf-co.re/rnaseq/3.9/parameters) and scroll down to the `show hidden parameter` button to get the default value for `--max_memory`. In this case 128GB, you than can try to run your pipeline again with `--max_memory 200GB -resume` to skip all process, that were already calculated. If you can not increase the resource of the complete pipeline, you can try to adapt the resource for a single process as mentioned below. - -#### Advanced option on process level - -To bypass this error you would need to find exactly which resources are set by the `STAR_ALIGN` process. The quickest way is to search for `process STAR_ALIGN` in the [nf-core/rnaseq Github repo](https://github.com/nf-core/rnaseq/search?q=process+STAR_ALIGN). -We have standardised the structure of Nextflow DSL2 pipelines such that all module files will be present in the `modules/` directory and so, based on the search results, the file we want is `modules/nf-core/star/align/main.nf`. -If you click on the link to that file you will notice that there is a `label` directive at the top of the module that is set to [`label process_high`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/modules/nf-core/software/star/align/main.nf#L9). -The [Nextflow `label`](https://www.nextflow.io/docs/latest/process.html#label) directive allows us to organise workflow processes in separate groups which can be referenced in a configuration file to select and configure subset of processes having similar computing requirements. -The default values for the `process_high` label are set in the pipeline's [`base.config`](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L33-L37) which in this case is defined as 72GB. -Providing you haven't set any other standard nf-core parameters to **cap** the [maximum resources](https://nf-co.re/usage/configuration#max-resources) used by the pipeline then we can try and bypass the `STAR_ALIGN` process failure by creating a custom config file that sets at least 72GB of memory, in this case increased to 100GB. -The custom config below can then be provided to the pipeline via the [`-c`](#-c) parameter as highlighted in previous sections. - -```nextflow -process { - withName: 'NFCORE_RNASEQ:RNASEQ:ALIGN_STAR:STAR_ALIGN' { - memory = 100.GB - } -} -``` - -> **NB:** We specify the full process name i.e. `NFCORE_RNASEQ:RNASEQ:ALIGN_STAR:STAR_ALIGN` in the config file because this takes priority over the short name (`STAR_ALIGN`) and allows existing configuration using the full process name to be correctly overridden. -> -> If you get a warning suggesting that the process selector isn't recognised check that the process name has been specified correctly. - -### Updating containers (advanced users) - -The [Nextflow DSL2](https://www.nextflow.io/docs/latest/dsl2.html) implementation of this pipeline uses one container per process which makes it much easier to maintain and update software dependencies. If for some reason you need to use a different version of a particular tool with the pipeline then you just need to identify the `process` name and override the Nextflow `container` definition for that process using the `withName` declaration. For example, in the [nf-core/viralrecon](https://nf-co.re/viralrecon) pipeline a tool called [Pangolin](https://github.com/cov-lineages/pangolin) has been used during the COVID-19 pandemic to assign lineages to SARS-CoV-2 genome sequenced samples. Given that the lineage assignments change quite frequently it doesn't make sense to re-release the nf-core/viralrecon everytime a new version of Pangolin has been released. However, you can override the default container used by the pipeline by creating a custom config file and passing it as a command-line argument via `-c custom.config`. - -1. Check the default version used by the pipeline in the module file for [Pangolin](https://github.com/nf-core/viralrecon/blob/a85d5969f9025409e3618d6c280ef15ce417df65/modules/nf-core/software/pangolin/main.nf#L14-L19) -2. Find the latest version of the Biocontainer available on [Quay.io](https://quay.io/repository/biocontainers/pangolin?tag=latest&tab=tags) -3. Create the custom config accordingly: - - - For Docker: - - ```nextflow - process { - withName: PANGOLIN { - container = 'quay.io/biocontainers/pangolin:3.0.5--pyhdfd78af_0' - } - } - ``` +To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website. - - For Singularity: +### Custom Containers - ```nextflow - process { - withName: PANGOLIN { - container = 'https://depot.galaxyproject.org/singularity/pangolin:3.0.5--pyhdfd78af_0' - } - } - ``` +In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date. - - For Conda: +To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website. - ```nextflow - process { - withName: PANGOLIN { - conda = 'bioconda::pangolin=3.0.5' - } - } - ``` +### Custom Tool Arguments -> **NB:** If you wish to periodically update individual tool-specific results (e.g. Pangolin) generated by the pipeline then you must ensure to keep the `work/` directory otherwise the `-resume` ability of the pipeline will be compromised and it will restart from scratch. +A pipeline might not always support every possible argument or option of a particular tool used in pipeline. Fortunately, nf-core pipelines provide some freedom to users to insert additional parameters that the pipeline does not include by default. {% if nf_core_configs -%} +To learn how to provide additional arguments to a particular tool of the pipeline, please see the [customising tool arguments](https://nf-co.re/docs/usage/configuration#customising-tool-arguments) section of the nf-core website. ### nf-core/configs diff --git a/nf_core/pipeline-template/lib/NfcoreSchema.groovy b/nf_core/pipeline-template/lib/NfcoreSchema.groovy index 33cd4f6e8d..9b34804d6d 100755 --- a/nf_core/pipeline-template/lib/NfcoreSchema.groovy +++ b/nf_core/pipeline-template/lib/NfcoreSchema.groovy @@ -2,6 +2,7 @@ // This file holds several functions used to perform JSON parameter validation, help and summary rendering for the nf-core pipeline template. // +import nextflow.Nextflow import org.everit.json.schema.Schema import org.everit.json.schema.loader.SchemaLoader import org.everit.json.schema.ValidationException @@ -83,6 +84,7 @@ class NfcoreSchema { 'stub-run', 'test', 'w', + 'with-apptainer', 'with-charliecloud', 'with-conda', 'with-dag', @@ -177,7 +179,7 @@ class NfcoreSchema { } if (has_error) { - System.exit(1) + Nextflow.error('Exiting!') } } diff --git a/nf_core/pipeline-template/lib/WorkflowMain.groovy b/nf_core/pipeline-template/lib/WorkflowMain.groovy index 05db418b2d..4cb7409fb9 100755 --- a/nf_core/pipeline-template/lib/WorkflowMain.groovy +++ b/nf_core/pipeline-template/lib/WorkflowMain.groovy @@ -2,6 +2,8 @@ // This file holds several functions specific to the main.nf workflow in the {{ name }} pipeline // +import nextflow.Nextflow + class WorkflowMain { // @@ -21,7 +23,7 @@ class WorkflowMain { // // Generate help string // - public static String help(workflow, params, log) { + public static String help(workflow, params) { {% if igenomes -%} def command = "nextflow run ${workflow.manifest.name} --input samplesheet.csv --genome GRCh37 -profile docker" {% else -%} @@ -38,7 +40,7 @@ class WorkflowMain { // // Generate parameter summary log string // - public static String paramsSummaryLog(workflow, params, log) { + public static String paramsSummaryLog(workflow, params) { def summary_log = '' summary_log += NfcoreTemplate.logo(workflow, params.monochrome_logs) summary_log += NfcoreSchema.paramsSummaryLog(workflow, params) @@ -53,7 +55,7 @@ class WorkflowMain { public static void initialise(workflow, params, log) { // Print help to screen if required if (params.help) { - log.info help(workflow, params, log) + log.info help(workflow, params) System.exit(0) } @@ -65,7 +67,7 @@ class WorkflowMain { } // Print parameter summary log to screen - log.info paramsSummaryLog(workflow, params, log) + log.info paramsSummaryLog(workflow, params) // Validate workflow parameters via the JSON schema if (params.validate_params) { @@ -85,8 +87,7 @@ class WorkflowMain { // Check input has been provided if (!params.input) { - log.error "Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'" - System.exit(1) + Nextflow.error("Please provide an input samplesheet to the pipeline e.g. '--input samplesheet.csv'") } } {% if igenomes -%} diff --git a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy index 252f127d80..6e577a669c 100755 --- a/nf_core/pipeline-template/lib/WorkflowPipeline.groovy +++ b/nf_core/pipeline-template/lib/WorkflowPipeline.groovy @@ -2,6 +2,7 @@ // This file holds several functions specific to the workflow/{{ short_name }}.nf in the {{ name }} pipeline // +import nextflow.Nextflow import groovy.text.SimpleTemplateEngine class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { @@ -15,8 +16,7 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { {% endif %} if (!params.fasta) { - log.error "Genome fasta file not specified with e.g. '--fasta genome.fa' or via a detectable config file." - System.exit(1) + Nextflow.error "Genome fasta file not specified with e.g. '--fasta genome.fa' or via a detectable config file." } } @@ -63,19 +63,19 @@ class Workflow{{ short_name[0]|upper }}{{ short_name[1:] }} { return description_html } + {%- if igenomes %} - {%- if igenomes -%} // // Exit pipeline if incorrect --genome key provided // private static void genomeExistsError(params, log) { if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) { - log.error "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + + def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" + " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" + " Currently, the available genome keys are:\n" + " ${params.genomes.keySet().join(", ")}\n" + "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - System.exit(1) + Nextflow.error(error_string) } } {% endif -%}} diff --git a/nf_core/pipeline-template/main.nf b/nf_core/pipeline-template/main.nf index 74dddd590d..52d8b1bb38 100644 --- a/nf_core/pipeline-template/main.nf +++ b/nf_core/pipeline-template/main.nf @@ -4,7 +4,7 @@ {{ name }} ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Github : https://github.com/{{ name }} -{% if branded %} +{%- if branded %} Website: https://nf-co.re/{{ short_name }} Slack : https://nfcore.slack.com/channels/{{ short_name }} {% endif -%} diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 08116ecbac..b5c31d0939 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -7,7 +7,7 @@ "nf-core": { "custom/dumpsoftwareversions": { "branch": "master", - "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "git_sha": "76cc4938c1f6ea5c7d83fed1eeffc146787f9543", "installed_by": ["modules"] }, "fastqc": { @@ -17,7 +17,7 @@ }, "multiqc": { "branch": "master", - "git_sha": "c8e35eb2055c099720a75538d1b8adb3fb5a464c", + "git_sha": "f2d63bd5b68925f98f572eed70993d205cc694b7", "installed_by": ["modules"] } } diff --git a/nf_core/pipeline-template/modules/local/samplesheet_check.nf b/nf_core/pipeline-template/modules/local/samplesheet_check.nf index 5d25800775..77be6dfff4 100644 --- a/nf_core/pipeline-template/modules/local/samplesheet_check.nf +++ b/nf_core/pipeline-template/modules/local/samplesheet_check.nf @@ -5,7 +5,7 @@ process SAMPLESHEET_CHECK { conda "conda-forge::python=3.8.3" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/python:3.8.3' : - 'quay.io/biocontainers/python:3.8.3' }" + 'biocontainers/python:3.8.3' }" input: path samplesheet diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf index 3df21765b9..800a60991a 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/main.nf @@ -2,10 +2,10 @@ process CUSTOM_DUMPSOFTWAREVERSIONS { label 'process_single' // Requires `pyyaml` which does not have a dedicated container but is in the MultiQC container - conda "bioconda::multiqc=1.13" + conda "bioconda::multiqc=1.14" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : - 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.14--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.14--pyhdfd78af_0' }" input: path versions diff --git a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml index 60b546a012..c32657de7a 100644 --- a/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/custom/dumpsoftwareversions/meta.yml @@ -1,7 +1,9 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json name: custom_dumpsoftwareversions description: Custom module used to dump software versions within the nf-core pipeline template keywords: - custom + - dump - version tools: - custom: diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 68f66bea74..4b604749f5 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -1,10 +1,10 @@ process MULTIQC { label 'process_single' - conda "bioconda::multiqc=1.13" + conda "bioconda::multiqc=1.14" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/multiqc:1.13--pyhdfd78af_0' : - 'quay.io/biocontainers/multiqc:1.13--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/multiqc:1.14--pyhdfd78af_0' : + 'quay.io/biocontainers/multiqc:1.14--pyhdfd78af_0' }" input: path multiqc_files, stageAs: "?/*" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml index ebc29b279d..f93b5ee519 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/meta.yml @@ -1,3 +1,4 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/yaml-schema.json name: MultiQC description: Aggregate results from bioinformatics analyses across many samples into a single report keywords: @@ -37,7 +38,7 @@ output: description: MultiQC report file pattern: "multiqc_report.html" - data: - type: dir + type: directory description: MultiQC data dir pattern: "multiqc_data" - plots: diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 5cbb076b46..4ef0fcd5e8 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -81,7 +81,11 @@ try { {% endif %} profiles { - debug { process.beforeScript = 'echo $HOSTNAME' } + debug { + dumpHashes = true + process.beforeScript = 'echo $HOSTNAME' + cleanup = false + } conda { conda.enabled = true docker.enabled = false @@ -89,6 +93,7 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false + apptainer.enabled = false } mamba { conda.enabled = true @@ -98,14 +103,18 @@ profiles { podman.enabled = false shifter.enabled = false charliecloud.enabled = false + apptainer.enabled = false } docker { docker.enabled = true + docker.registry = 'quay.io' docker.userEmulation = true + conda.enabled = false singularity.enabled = false podman.enabled = false shifter.enabled = false charliecloud.enabled = false + apptainer.enabled = false } arm { docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' @@ -113,31 +122,49 @@ profiles { singularity { singularity.enabled = true singularity.autoMounts = true + conda.enabled = false docker.enabled = false podman.enabled = false shifter.enabled = false charliecloud.enabled = false + apptainer.enabled = false } podman { podman.enabled = true + podman.registry = 'quay.io' + conda.enabled = false docker.enabled = false singularity.enabled = false shifter.enabled = false charliecloud.enabled = false + apptainer.enabled = false } shifter { shifter.enabled = true + conda.enabled = false docker.enabled = false singularity.enabled = false podman.enabled = false charliecloud.enabled = false + apptainer.enabled = false } charliecloud { charliecloud.enabled = true + conda.enabled = false docker.enabled = false singularity.enabled = false podman.enabled = false shifter.enabled = false + apptainer.enabled = false + } + apptainer { + apptainer.enabled = true + conda.enabled = false + docker.enabled = false + singularity.enabled = false + podman.enabled = false + shifter.enabled = false + charliecloud.enabled = false } gitpod { executor.name = 'local' diff --git a/nf_core/pipeline-template/tower.yml b/nf_core/pipeline-template/tower.yml new file mode 100644 index 0000000000..787aedfe92 --- /dev/null +++ b/nf_core/pipeline-template/tower.yml @@ -0,0 +1,5 @@ +reports: + multiqc_report.html: + display: "MultiQC HTML report" + samplesheet.csv: + display: "Auto-created samplesheet with collated metadata and FASTQ paths" diff --git a/nf_core/refgenie.py b/nf_core/refgenie.py index a10e4fecdf..b666844699 100644 --- a/nf_core/refgenie.py +++ b/nf_core/refgenie.py @@ -2,6 +2,7 @@ Update a nextflow.config file with refgenie genomes """ +import json import logging import os import re @@ -10,6 +11,7 @@ import rich import rich.traceback +import yaml import nf_core.utils @@ -45,6 +47,7 @@ def _print_nf_config(rgc): """ abg = rgc.list_assets_by_genome() genomes_str = "" + alias_translations = _get_alias_translation_file(rgc) for genome, asset_list in abg.items(): genomes_str += f" '{genome}' {{\n" for asset in asset_list: @@ -54,6 +57,10 @@ def _print_nf_config(rgc): except Exception: log.warning(f"{genome}/{asset} is incomplete, ignoring...") else: + # Translate an alias name to the alias used in the pipeline + if asset in alias_translations.keys(): + log.info(f"Translating refgenie asset alias {asset} to {alias_translations[asset]}.") + asset = alias_translations[asset] genomes_str += f' {asset.ljust(20, " ")} = "{pth}"\n' genomes_str += " }\n" @@ -100,6 +107,38 @@ def _update_nextflow_home_config(refgenie_genomes_config_file, nxf_home): log.info(f"Created new nextflow config file: {nxf_home_config}") +def _get_alias_translation_file(rgc): + """ + Read a file containing alias translations. + + Alias translation file should be located in the same folder as the refgenie `genome_config.yaml` file, + the path is set to $REFGENIE environment variable by `refgenie init`. + Alias translation file should be named `alias_translations.yaml` + + Input file contains the name of refgenie server aliases as keys and the name of the respective nf-core pipeline aliases as values. + Such as: + ensembl_gtf: gtf + star_index: star + """ + translations = {} + + if "REFGENIE" in os.environ: + refgenie_genomes_config_path = os.environ.get("REFGENIE") + refgenie_genomes_config_directory = Path(refgenie_genomes_config_path).parents[0] + elif "genome_folder" in rgc: + refgenie_genomes_config_directory = Path(rgc["genome_folder"]) + else: + return translations + + try: + with open(refgenie_genomes_config_directory / "alias_translations.yaml") as yaml_file: + translations = yaml.load(yaml_file, Loader=yaml.Loader) + except FileNotFoundError: + pass + + return translations + + def update_config(rgc): """ Update the genomes.config file after a local refgenie database has been updated diff --git a/nf_core/schema.py b/nf_core/schema.py index a50a1ed789..ba88e762ea 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -6,17 +6,20 @@ import json import logging import os +import tempfile import webbrowser import jinja2 import jsonschema import markdown +import rich.console import yaml from rich.prompt import Confirm +from rich.syntax import Syntax import nf_core.list import nf_core.utils -from nf_core.lint_utils import dump_json_with_prettier +from nf_core.lint_utils import dump_json_with_prettier, run_prettier_on_file log = logging.getLogger(__name__) @@ -464,13 +467,21 @@ def print_documentation( if format == "html": output = self.markdown_to_html(output) - # Print to file - if output_fn: + with tempfile.NamedTemporaryFile(mode="w+") as fh: + fh.write(output) + run_prettier_on_file(fh.name) + fh.seek(0) + prettified_docs = fh.read() + + if not output_fn: + console = rich.console.Console() + console.print("\n", Syntax(prettified_docs, format), "\n") + else: if os.path.exists(output_fn) and not force: log.error(f"File '{output_fn}' exists! Please delete first, or use '--force'") return - with open(output_fn, "w") as file: - file.write(output) + with open(output_fn, "w") as fh: + fh.write(prettified_docs) log.info(f"Documentation written to '{output_fn}'") # Return as a string @@ -486,46 +497,55 @@ def schema_to_markdown(self, columns): for definition in self.schema.get("definitions", {}).values(): out += f"\n## {definition.get('title', {})}\n\n" out += f"{definition.get('description', '')}\n\n" - out += "".join([f"| {column.title()} " for column in columns]) - out += "|\n" - out += "".join(["|-----------" for columns in columns]) - out += "|\n" - for p_key, param in definition.get("properties", {}).items(): - for column in columns: - if column == "parameter": - out += f"| `{p_key}` " - elif column == "description": - out += f"| {param.get('description', '')} " - if param.get("help_text", "") != "": - out += f"
Help
{param['help_text']}" - elif column == "type": - out += f"| `{param.get('type', '')}` " - else: - out += f"| {param.get(column, '')} " - out += "|\n" + required = definition.get("required", []) + properties = definition.get("properties", {}) + param_table = self.markdown_param_table(properties, required, columns) + out += param_table # Top-level ungrouped parameters if len(self.schema.get("properties", {})) > 0: out += "\n## Other parameters\n\n" - out += "".join([f"| {column.title()} " for column in columns]) - out += "|\n" - out += "".join(["|-----------" for columns in columns]) - out += "|\n" + required = self.schema.get("required", []) + properties = self.schema.get("properties", {}) + param_table = self.markdown_param_table(properties, required, columns) + out += param_table - for p_key, param in self.schema.get("properties", {}).items(): - for column in columns: - if column == "parameter": - out += f"| `{p_key}` " - elif column == "description": - out += f"| {param.get('description', '')} " - if param.get("help_text", "") != "": - out += f"
Help{param['help_text']}
" - elif column == "type": - out += f"| `{param.get('type', '')}` " - else: - out += f"| {param.get(column, '')} " - out += "|\n" + return out + + def markdown_param_table(self, properties, required, columns): + """Creates a markdown table for params from jsonschema properties section + + Args: + properties (dict): A jsonschema properties dictionary + required (list): A list of the required fields. + Should come from the same level of the jsonschema as properties + columns (list): A list of columns to write + Returns: + str: A string with the markdown table + """ + out = "" + out += "".join([f"| {column.title()} " for column in columns]) + out += "|\n" + out += "".join(["|-----------" for _ in columns]) + out += "|\n" + for p_key, param in properties.items(): + for column in columns: + if column == "parameter": + out += f"| `{p_key}` " + elif column == "description": + desc = param.get("description", "").replace("\n", "
") + out += f"| {desc} " + if param.get("help_text", "") != "": + help_txt = param["help_text"].replace("\n", "
") + out += f"
Help{help_txt}
" + elif column == "type": + out += f"| `{param.get('type', '')}` " + elif column == "required": + out += f"| {p_key in required or ''} " + else: + out += f"| {param.get(column, '')} " + out += "|\n" return out def markdown_to_html(self, markdown_str): diff --git a/nf_core/subworkflow-template/subworkflows/meta.yml b/nf_core/subworkflow-template/subworkflows/meta.yml index 3db57b6fb1..4c5b454ddf 100644 --- a/nf_core/subworkflow-template/subworkflows/meta.yml +++ b/nf_core/subworkflow-template/subworkflows/meta.yml @@ -1,3 +1,4 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/subworkflows/yaml-schema.json name: "{{ subworkflow_name }}" ## TODO nf-core: Add a description of the subworkflow and list keywords description: Sort SAM/BAM/CRAM file diff --git a/nf_core/subworkflow-template/tests/main.nf b/nf_core/subworkflow-template/tests/main.nf index 2bf63da2f3..f8c9b10dcb 100644 --- a/nf_core/subworkflow-template/tests/main.nf +++ b/nf_core/subworkflow-template/tests/main.nf @@ -4,7 +4,7 @@ nextflow.enable.dsl = 2 include { {{ subworkflow_name|upper }} } from '../../../../subworkflows/{{ org }}/{{ subworkflow_dir }}/main.nf' -workflow test_{{ subworkflow_name }} { +workflow test_{{ component_name_underscore }} { {% if has_meta %} input = [ [ id:'test' ], // meta map diff --git a/nf_core/subworkflows/create.py b/nf_core/subworkflows/create.py index e61f0c6c8d..963076455e 100644 --- a/nf_core/subworkflows/create.py +++ b/nf_core/subworkflows/create.py @@ -1,114 +1,22 @@ -""" -The SubworkflowCreate class handles generating of subworkflow templates -""" - -from __future__ import print_function - import logging -import os - -import yaml -import nf_core -import nf_core.components.components_create -import nf_core.utils -from nf_core.components.components_command import ComponentCommand +from nf_core.components.create import ComponentCreate log = logging.getLogger(__name__) -class SubworkflowCreate(ComponentCommand): +class SubworkflowCreate(ComponentCreate): def __init__( self, - directory=".", - subworkflow="", + pipeline_dir, + component="", author=None, force=False, ): - super().__init__("subworkflows", directory) - self.directory = directory - self.subworkflow = subworkflow - self.author = author - self.force_overwrite = force - self.file_paths = {} - - def create(self): - """ - Create a new subworkflow from the nf-core template. - - The subworkflow should be named as the main file type it operates on and a short description of the task performed - e.g bam_sort or bam_sort_samtools, respectively. - - If is a pipeline, this function creates a file called: - '/subworkflows/local/subworkflow_name.nf' - - If is a clone of nf-core/modules, it creates or modifies the following files: - - subworkflows/nf-core/subworkflow_name/ - * main.nf - * meta.yml - tests/subworkflows/nf-core/subworkflow_name/ - * main.nf - * test.yml - * nextflow.config - tests/config/pytest_modules.yml - - """ - - # Check whether the given directory is a nf-core pipeline or a clone of nf-core/modules - log.info(f"Repository type: [blue]{self.repo_type}") - if self.directory != ".": - log.info(f"Base directory: '{self.directory}'") - - log.info( - "[yellow]Press enter to use default values [cyan bold](shown in brackets)[/] [yellow]or type your own responses. " - "ctrl+click [link=https://youtu.be/dQw4w9WgXcQ]underlined text[/link] to open links." - ) - - # Collect module info via prompt if empty or invalid - self.subworkflow = nf_core.components.components_create.collect_name_prompt( - self.subworkflow, self.component_type - ) - - # Determine the tool name - self.subworkflow_name = self.subworkflow - self.subworkflow_dir = self.subworkflow - - # Check existence of directories early for fast-fail - self.file_paths = nf_core.components.components_create.get_component_dirs( - self.component_type, - self.repo_type, - self.directory, - self.org, - self.subworkflow_name, - None, - None, - self.subworkflow_dir, - self.force_overwrite, + super().__init__( + "subworkflows", + pipeline_dir, + component, + author, + force=force, ) - - # Prompt for GitHub username - self.author = nf_core.components.components_create.get_username(self.author) - - # Create subworkflow template with jinja2 - nf_core.components.components_create.render_template(self.component_type, vars(self), self.file_paths) - - if self.repo_type == "modules": - # Add entry to pytest_modules.yml - try: - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "r") as fh: - pytest_modules_yml = yaml.safe_load(fh) - pytest_modules_yml["subworkflows/" + self.subworkflow] = [ - f"subworkflows/{self.org}/{self.subworkflow}/**", - f"tests/subworkflows/{self.org}/{self.subworkflow}/**", - ] - pytest_modules_yml = dict(sorted(pytest_modules_yml.items())) - with open(os.path.join(self.directory, "tests", "config", "pytest_modules.yml"), "w") as fh: - yaml.dump(pytest_modules_yml, fh, sort_keys=True, Dumper=nf_core.utils.custom_yaml_dumper()) - except FileNotFoundError: - raise UserWarning("Could not open 'tests/config/pytest_modules.yml' file!") - - new_files = list(self.file_paths.values()) - if self.repo_type == "modules": - new_files.append(os.path.join(self.directory, "tests", "config", "pytest_modules.yml")) - log.info("Created / edited following files:\n " + "\n ".join(new_files)) diff --git a/nf_core/subworkflows/test_yml_builder.py b/nf_core/subworkflows/test_yml_builder.py index 3090b22c05..2ad50d4e25 100644 --- a/nf_core/subworkflows/test_yml_builder.py +++ b/nf_core/subworkflows/test_yml_builder.py @@ -139,7 +139,7 @@ def scrape_workflow_entry_points(self): if match: self.entry_points.append(match.group(1)) if len(self.entry_points) == 0: - raise UserWarning("No workflow entry points found in 'self.module_test_main'") + raise UserWarning(f"No workflow entry points found in '{self.subworkflow_test_main}'") def build_all_tests(self): """ @@ -195,7 +195,7 @@ def build_single_test(self, entry_point): ).strip() ep_test["tags"] = [t.strip() for t in prompt_tags.split(",")] - ep_test["files"] = self.get_md5_sums(entry_point, ep_test["command"]) + ep_test["files"] = self.get_md5_sums(ep_test["command"]) return ep_test @@ -230,16 +230,9 @@ def check_if_empty_file(self, fname): g_f = gzip.GzipFile(fileobj=fh, mode="rb") if g_f.read() == b"": return True - except Exception as e: - # Python 3.8+ - if hasattr(gzip, "BadGzipFile"): - if isinstance(e, gzip.BadGzipFile): - pass - # Python 3.7 - elif isinstance(e, OSError): - pass - else: - raise e + except gzip.BadGzipFile: + pass + return False def _md5(self, fname): @@ -279,7 +272,7 @@ def create_test_file_dict(self, results_dir, is_repeat=False): return test_files - def get_md5_sums(self, entry_point, command, results_dir=None, results_dir_repeat=None): + def get_md5_sums(self, command, results_dir=None, results_dir_repeat=None): """ Recursively go through directories and subdirectories and generate tuples of (, ) diff --git a/nf_core/utils.py b/nf_core/utils.py index b60f61fff0..36c39db50a 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -727,7 +727,8 @@ def get_tag_date(tag_date): docker_image = all_docker[k]["image"] singularity_image = all_singularity[k]["image"] current_date = date - return docker_image["image_name"], singularity_image["image_name"] + docker_image_name = docker_image["image_name"].lstrip("quay.io/") + return docker_image_name, singularity_image["image_name"] except TypeError: raise LookupError(f"Could not find docker or singularity container for {package}") elif response.status_code != 404: @@ -872,7 +873,6 @@ def get_repo_releases_branches(pipeline, wfs): # Repo is a nf-core pipeline for wf in wfs.remote_workflows: if wf.full_name == pipeline or wf.name == pipeline: - # Set to full name just in case it didn't have the nf-core/ prefix pipeline = wf.full_name @@ -883,7 +883,6 @@ def get_repo_releases_branches(pipeline, wfs): # Arbitrary GitHub repo else: if pipeline.count("/") == 1: - # Looks like a GitHub address - try working with this repo log.debug( f"Pipeline '{pipeline}' not in nf-core, but looks like a GitHub address - fetching releases from API" diff --git a/requirements-dev.txt b/requirements-dev.txt index 42ce780ce4..360f6ff87f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,7 +3,6 @@ isort myst_parser pytest-cov pytest-datafiles -requests-mock +responses Sphinx sphinx-rtd-theme -requests_mock diff --git a/requirements.txt b/requirements.txt index b3d1f251bf..9cc7fc6be5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,5 @@ click filetype -galaxy-tool-util GitPython jinja2 jsonschema>=3.0 @@ -15,6 +14,6 @@ questionary>=1.8.0 refgenie requests requests_cache -rich-click>=1.0.0 -rich>=10.7.0 +rich-click>=1.6.1 +rich>=13.3.1 tabulate diff --git a/setup.py b/setup.py index cc83e90eed..9b0d9fb6af 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "2.7.2" +version = "2.8" with open("README.md") as f: readme = f.read() @@ -35,7 +35,7 @@ "console_scripts": ["nf-core=nf_core.__main__:run_nf_core"], "refgenie.hooks.post_update": ["nf-core-refgenie=nf_core.refgenie:update_config"], }, - python_requires=">=3.7, <4", + python_requires=">=3.8, <4", install_requires=required, packages=find_packages(exclude=("docs")), include_package_data=True, diff --git a/tests/data/pipeline_create_template.yml b/tests/data/pipeline_create_template.yml new file mode 100644 index 0000000000..12e48e9c27 --- /dev/null +++ b/tests/data/pipeline_create_template.yml @@ -0,0 +1 @@ +prefix: testprefix diff --git a/tests/modules/create.py b/tests/modules/create.py index 61a8777b14..98e498c1b0 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -1,55 +1,65 @@ import os import pytest -import requests_mock +import requests_cache +import responses import nf_core.modules -from tests.utils import mock_api_calls +from tests.utils import mock_anaconda_api_calls, mock_biocontainers_api_calls def test_modules_create_succeed(self): """Succeed at creating the TrimGalore! module""" - with requests_mock.Mocker() as mock: - mock_api_calls(mock, "trim-galore", "0.6.7") + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") module_create = nf_core.modules.ModuleCreate( self.pipeline_dir, "trimgalore", "@author", "process_single", True, True, conda_name="trim-galore" ) - module_create.create() + with requests_cache.disabled(): + module_create.create() assert os.path.exists(os.path.join(self.pipeline_dir, "modules", "local", "trimgalore.nf")) def test_modules_create_fail_exists(self): """Fail at creating the same module twice""" - with requests_mock.Mocker() as mock: - mock_api_calls(mock, "trim-galore", "0.6.7") + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "trim-galore", "0.6.7") + mock_biocontainers_api_calls(rsps, "trim-galore", "0.6.7") module_create = nf_core.modules.ModuleCreate( self.pipeline_dir, "trimgalore", "@author", "process_single", False, False, conda_name="trim-galore" ) - module_create.create() - with pytest.raises(UserWarning) as excinfo: + with requests_cache.disabled(): module_create.create() + with pytest.raises(UserWarning) as excinfo: + with requests_cache.disabled(): + module_create.create() assert "Module file exists already" in str(excinfo.value) def test_modules_create_nfcore_modules(self): """Create a module in nf-core/modules clone""" - with requests_mock.Mocker() as mock: - mock_api_calls(mock, "fastqc", "0.11.9") + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "fastqc", "0.11.9") + mock_biocontainers_api_calls(rsps, "fastqc", "0.11.9") module_create = nf_core.modules.ModuleCreate( self.nfcore_modules, "fastqc", "@author", "process_low", False, False ) - module_create.create() + with requests_cache.disabled(): + module_create.create() assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "fastqc", "main.nf")) assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "nf-core", "fastqc", "main.nf")) def test_modules_create_nfcore_modules_subtool(self): """Create a tool/subtool module in a nf-core/modules clone""" - with requests_mock.Mocker() as mock: - mock_api_calls(mock, "star", "2.8.10a") + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "star", "2.8.10a") + mock_biocontainers_api_calls(rsps, "star", "2.8.10a") module_create = nf_core.modules.ModuleCreate( self.nfcore_modules, "star/index", "@author", "process_medium", False, False ) - module_create.create() + with requests_cache.disabled(): + module_create.create() assert os.path.exists(os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "main.nf")) assert os.path.exists(os.path.join(self.nfcore_modules, "tests", "modules", "nf-core", "star", "index", "main.nf")) diff --git a/tests/modules/create_test_yml.py b/tests/modules/create_test_yml.py index d444ff841a..243378af78 100644 --- a/tests/modules/create_test_yml.py +++ b/tests/modules/create_test_yml.py @@ -11,19 +11,19 @@ @with_temporary_folder def test_modules_custom_yml_dumper(self, out_dir): """Try to create a yml file with the custom yml dumper""" - yml_output_path = os.path.join(out_dir, "test.yml") + yml_output_path = Path(out_dir, "test.yml") meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) meta_builder.test_yml_output_path = yml_output_path meta_builder.tests = [{"testname": "myname"}] meta_builder.print_test_yml() - assert os.path.isfile(yml_output_path) + assert Path(yml_output_path).is_file() @with_temporary_folder def test_modules_test_file_dict(self, test_file_dir): """Create dict of test files and create md5 sums""" meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) - with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") test_files = meta_builder.create_test_file_dict(test_file_dir) assert len(test_files) == 1 @@ -34,7 +34,7 @@ def test_modules_test_file_dict(self, test_file_dir): def test_modules_create_test_yml_get_md5(self, test_file_dir): """Get md5 sums from a dummy output""" meta_builder = nf_core.modules.ModulesTestYmlBuilder("test/tool", self.pipeline_dir, False, "./", False, True) - with open(os.path.join(test_file_dir, "test_file.txt"), "w") as fh: + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: fh.write("this line is just for testing") test_files = meta_builder.get_md5_sums(command="dummy", results_dir=test_file_dir, results_dir_repeat=test_file_dir) assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" @@ -43,9 +43,7 @@ def test_modules_create_test_yml_get_md5(self, test_file_dir): def test_modules_create_test_yml_entry_points(self): """Test extracting test entry points from a main.nf file""" meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", self.pipeline_dir, False, "./", False, True) - meta_builder.module_test_main = os.path.join( - self.nfcore_modules, "tests", "modules", "nf-core", "bpipe", "test", "main.nf" - ) + meta_builder.module_test_main = Path(self.nfcore_modules, "tests", "modules", "nf-core", "bpipe", "test", "main.nf") meta_builder.scrape_workflow_entry_points() assert meta_builder.entry_points[0] == "test_bpipe_test" @@ -55,7 +53,7 @@ def test_modules_create_test_yml_check_inputs(self): cwd = os.getcwd() os.chdir(self.nfcore_modules) meta_builder = nf_core.modules.ModulesTestYmlBuilder("bpipe/test", ".", False, "./", False, True) - meta_builder.module_test_main = os.path.join(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") + meta_builder.module_test_main = Path(self.nfcore_modules, "tests", "modules", "bpipe", "test", "main.nf") with pytest.raises(UserWarning) as excinfo: meta_builder.check_inputs() os.chdir(cwd) diff --git a/tests/modules/info.py b/tests/modules/info.py index 6c5b1063f1..2dbd48b240 100644 --- a/tests/modules/info.py +++ b/tests/modules/info.py @@ -38,7 +38,6 @@ def test_modules_info_local(self): """Test getting info about a locally installed module""" self.mods_install.install("trimgalore") mods_info = nf_core.modules.ModuleInfo(self.pipeline_dir, "trimgalore") - mods_info.local = True mods_info_output = mods_info.get_component_info() console = Console(record=True) console.print(mods_info_output) @@ -47,6 +46,7 @@ def test_modules_info_local(self): assert "Module: trimgalore" in output assert "Inputs" in output assert "Outputs" in output + assert "Location" in output def test_modules_info_in_modules_repo(self): diff --git a/tests/modules/install.py b/tests/modules/install.py index d01459f142..deca31204e 100644 --- a/tests/modules/install.py +++ b/tests/modules/install.py @@ -6,6 +6,7 @@ from nf_core.modules.modules_json import ModulesJson from ..utils import ( + GITLAB_BRANCH_ORG_PATH_BRANCH, GITLAB_BRANCH_TEST_BRANCH, GITLAB_REPO, GITLAB_URL, @@ -83,3 +84,12 @@ def test_modules_install_tracking(self): assert mod_json["repos"]["https://github.com/nf-core/modules.git"]["modules"]["nf-core"]["trimgalore"][ "installed_by" ] == ["modules"] + + +def test_modules_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = ModuleInstall(self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_ORG_PATH_BRANCH) + # Install fastqc from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/modules/lint.py b/tests/modules/lint.py index 476481a109..b7aaf610ca 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -4,6 +4,7 @@ import pytest import nf_core.modules +from nf_core.modules.lint import main_nf from ..utils import GITLAB_URL, set_wd from .patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf @@ -43,7 +44,7 @@ def test_modules_lint_empty(self): def test_modules_lint_new_modules(self): - """lint all modules in nf-core/modules repo clone""" + """lint a new module""" module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) module_lint.lint(print_results=True, all_modules=True) assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" @@ -66,18 +67,17 @@ def test_modules_lint_gitlab_modules(self): self.mods_install_gitlab.install("multiqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 0 + assert len(module_lint.failed) == 2 assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 def test_modules_lint_multiple_remotes(self): """Lint modules from a different remote""" - self.mods_install.install("fastqc") self.mods_install_gitlab.install("multiqc") module_lint = nf_core.modules.ModuleLint(dir=self.pipeline_dir, remote_url=GITLAB_URL) module_lint.lint(print_results=False, all_modules=True) - assert len(module_lint.failed) == 0 + assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 @@ -103,6 +103,116 @@ def test_modules_lint_patched_modules(self): all_modules=True, ) - assert len(module_lint.failed) == 0 + assert len(module_lint.failed) == 1 assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 + + +# A skeleton object with the passed/warned/failed list attrs +# Use this in place of a ModuleLint object to test behaviour of +# linting methods which don't need the full setup +class MockModuleLint: + def __init__(self): + self.passed = [] + self.warned = [] + self.failed = [] + + self.main_nf = "main_nf" + + +PROCESS_LABEL_GOOD = ( + """ + label 'process_high' + cpus 12 + """, + 1, + 0, + 0, +) +PROCESS_LABEL_NON_ALPHANUMERIC = ( + """ + label 'a:label:with:colons' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_CONFLICTING = ( + """ + label 'process_high' + label 'process_low' + cpus 12 + """, + 0, + 1, + 0, +) +PROCESS_LABEL_GOOD_DUPLICATES = ( + """ + label 'process_high' + label 'process_high' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_GOOD_AND_NONSTANDARD = ( + """ + label 'process_high' + label 'process_extra_label' + cpus 12 + """, + 1, + 1, + 0, +) +PROCESS_LABEL_NONSTANDARD = ( + """ + label 'process_extra_label' + cpus 12 + """, + 0, + 2, + 0, +) +PROCESS_LABEL_NONSTANDARD_DUPLICATES = ( + """ + label process_extra_label + label process_extra_label + cpus 12 + """, + 0, + 3, + 0, +) +PROCESS_LABEL_NONE_FOUND = ( + """ + cpus 12 + """, + 0, + 1, + 0, +) + +PROCESS_LABEL_TEST_CASES = [ + PROCESS_LABEL_GOOD, + PROCESS_LABEL_NON_ALPHANUMERIC, + PROCESS_LABEL_GOOD_CONFLICTING, + PROCESS_LABEL_GOOD_DUPLICATES, + PROCESS_LABEL_GOOD_AND_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD, + PROCESS_LABEL_NONSTANDARD_DUPLICATES, + PROCESS_LABEL_NONE_FOUND, +] + + +def test_modules_lint_check_process_labels(self): + for test_case in PROCESS_LABEL_TEST_CASES: + process, passed, warned, failed = test_case + mocked_ModuleLint = MockModuleLint() + main_nf.check_process_labels(mocked_ModuleLint, process.splitlines()) + assert len(mocked_ModuleLint.passed) == passed + assert len(mocked_ModuleLint.warned) == warned + assert len(mocked_ModuleLint.failed) == failed diff --git a/tests/modules/patch.py b/tests/modules/patch.py index 09b892e2c8..95cc2cad95 100644 --- a/tests/modules/patch.py +++ b/tests/modules/patch.py @@ -1,6 +1,7 @@ import os import tempfile from pathlib import Path +from unittest import mock import pytest @@ -16,12 +17,12 @@ testing if the update commands works correctly with patch files """ -ORG_SHA = "775fcd090fb776a0be695044f8ab1af8896c8452" -CORRECT_SHA = "335cd32405568ca3b6d4c05ab1e8a98c21e18a4d" -SUCCEED_SHA = "f1566140c752e9c68fffc189fbe8cb9ee942b3ca" -FAIL_SHA = "1fc8b0f953d915d66ee40d28bc337ff0998d05bd" +ORG_SHA = "002623ccc88a3b0cb302c7d8f13792a95354d9f2" +CORRECT_SHA = "0245a9277d51a47c8aa68d264d294cf45312fab8" +SUCCEED_SHA = "ba15c20c032c549d77c5773659f19c2927daf48e" +FAIL_SHA = "67b642d4471c4005220a342cad3818d5ba2b5a73" BISMARK_ALIGN = "bismark/align" -REPO_NAME = "nf-core" +REPO_NAME = "nf-core-test" PATCH_BRANCH = "patch-tester" REPO_URL = "https://gitlab.com/nf-core/modules-test.git" @@ -112,6 +113,7 @@ def test_create_patch_try_apply_successful(self): """ Test creating a patch file and applying it to a new version of the the files """ + setup_patch(self.pipeline_dir, True) module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) module_path = Path(self.pipeline_dir, module_relpath) @@ -178,6 +180,7 @@ def test_create_patch_try_apply_failed(self): """ Test creating a patch file and applying it to a new version of the the files """ + setup_patch(self.pipeline_dir, True) module_relpath = Path("modules", REPO_NAME, BISMARK_ALIGN) module_path = Path(self.pipeline_dir, module_relpath) @@ -216,6 +219,7 @@ def test_create_patch_update_success(self): Should have the same effect as 'test_create_patch_try_apply_successful' but uses higher level api """ + setup_patch(self.pipeline_dir, True) module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) @@ -277,6 +281,7 @@ def test_create_patch_update_fail(self): """ Test creating a patch file and updating a module when there is a diff conflict """ + setup_patch(self.pipeline_dir, True) module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) @@ -322,3 +327,34 @@ def test_create_patch_update_fail(self): with open(module_path / patch_fn, "r") as fh: new_patch_contents = fh.read() assert patch_contents == new_patch_contents + + +def test_remove_patch(self): + """Test creating a patch when there is no change to the module""" + setup_patch(self.pipeline_dir, True) + + # Try creating a patch file + patch_obj = nf_core.modules.ModulePatch(self.pipeline_dir, GITLAB_URL, PATCH_BRANCH) + patch_obj.patch(BISMARK_ALIGN) + + module_path = Path(self.pipeline_dir, "modules", REPO_NAME, BISMARK_ALIGN) + + # Check that a patch file with the correct name has been created + patch_fn = f"{'-'.join(BISMARK_ALIGN.split('/'))}.diff" + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml", patch_fn} + + # Check the 'modules.json' contains a patch file for the module + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) == Path( + "modules", REPO_NAME, BISMARK_ALIGN, patch_fn + ) + + with mock.patch.object(nf_core.create.questionary, "confirm") as mock_questionary: + mock_questionary.unsafe_ask.return_value = True + patch_obj.remove(BISMARK_ALIGN) + # Check that the diff file has been removed + assert set(os.listdir(module_path)) == {"main.nf", "meta.yml"} + + # Check that the 'modules.json' entry has been removed + modules_json_obj = nf_core.modules.modules_json.ModulesJson(self.pipeline_dir) + assert modules_json_obj.get_patch_fn(BISMARK_ALIGN, REPO_URL, REPO_NAME) is None diff --git a/tests/modules/remove.py b/tests/modules/remove.py index b343a02da7..e412fd35a8 100644 --- a/tests/modules/remove.py +++ b/tests/modules/remove.py @@ -17,6 +17,6 @@ def test_modules_remove_trimgalore_uninstalled(self): def test_modules_remove_multiqc_from_gitlab(self): """Test removing multiqc module after installing it from an alternative source""" self.mods_install_gitlab.install("multiqc") - module_path = os.path.join(self.mods_install_gitlab.dir, "modules", "nf-core", "multiqc") + module_path = os.path.join(self.mods_install_gitlab.dir, "modules", "nf-core-test", "multiqc") assert self.mods_remove_gitlab.remove("multiqc", force=True) assert os.path.exists(module_path) is False diff --git a/tests/modules/update.py b/tests/modules/update.py index fcfd92fc39..399e9cc12c 100644 --- a/tests/modules/update.py +++ b/tests/modules/update.py @@ -187,7 +187,7 @@ def test_update_with_config_fix_all(self): def test_update_with_config_no_updates(self): """Don't update any nf-core modules""" - self.mods_install_old.install("trimgalore") + assert self.mods_install_old.install("trimgalore") old_mod_json = ModulesJson(self.pipeline_dir).get_modules_json() # Fix the version of all nf-core modules in the .nf-core.yml to an old version diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py index eac4929136..60ee6add9a 100644 --- a/tests/subworkflows/create.py +++ b/tests/subworkflows/create.py @@ -28,7 +28,7 @@ def test_subworkflows_create_fail_exists(self): def test_subworkflows_create_nfcore_modules(self): """Create a subworkflow in nf-core/modules clone""" subworkflow_create = nf_core.subworkflows.SubworkflowCreate( - self.nfcore_modules, "test_subworkflow", "@author", True + self.nfcore_modules, "test_subworkflow", "@author", force=True ) subworkflow_create.create() assert os.path.exists(os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "main.nf")) diff --git a/tests/subworkflows/create_test_yml.py b/tests/subworkflows/create_test_yml.py new file mode 100644 index 0000000000..40384b420f --- /dev/null +++ b/tests/subworkflows/create_test_yml.py @@ -0,0 +1,96 @@ +import os +from pathlib import Path +from unittest import mock + +import pytest + +import nf_core.subworkflows + +from ..utils import with_temporary_folder + + +@with_temporary_folder +def test_subworkflows_custom_yml_dumper(self, out_dir): + """Try to create a yml file with the custom yml dumper""" + yml_output_path = Path(out_dir, "test.yml") + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow="test/tool", + directory=self.pipeline_dir, + test_yml_output_path=yml_output_path, + no_prompts=True, + ) + meta_builder.test_yml_output_path = yml_output_path + meta_builder.tests = [{"testname": "myname"}] + meta_builder.print_test_yml() + assert Path(yml_output_path).is_file() + + +@with_temporary_folder +def test_subworkflows_test_file_dict(self, test_file_dir): + """Create dict of test files and create md5 sums""" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow="test/tool", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: + fh.write("this line is just for testing") + test_files = meta_builder.create_test_file_dict(test_file_dir) + assert len(test_files) == 1 + assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + + +@with_temporary_folder +def test_subworkflows_create_test_yml_get_md5(self, test_file_dir): + """Get md5 sums from a dummy output""" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow="test/tool", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + with open(Path(test_file_dir, "test_file.txt"), "w") as fh: + fh.write("this line is just for testing") + test_files = meta_builder.get_md5_sums( + command="dummy", + results_dir=test_file_dir, + results_dir_repeat=test_file_dir, + ) + assert test_files[0]["md5sum"] == "2191e06b28b5ba82378bcc0672d01786" + + +def test_subworkflows_create_test_yml_entry_points(self): + """Test extracting test entry points from a main.nf file""" + subworkflow = "test_subworkflow" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow=f"{subworkflow}/test", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + meta_builder.subworkflow_test_main = Path( + self.nfcore_modules, "tests", "subworkflows", "nf-core", subworkflow, "main.nf" + ) + meta_builder.scrape_workflow_entry_points() + assert meta_builder.entry_points[0] == f"test_{subworkflow}" + + +def test_subworkflows_create_test_yml_check_inputs(self): + """Test the check_inputs() function - raise UserWarning because test.yml exists""" + cwd = os.getcwd() + os.chdir(self.nfcore_modules) + subworkflow = "test_subworkflow" + meta_builder = nf_core.subworkflows.SubworkflowTestYmlBuilder( + subworkflow=f"{subworkflow}", + directory=self.pipeline_dir, + test_yml_output_path="./", + no_prompts=True, + ) + meta_builder.subworkflow_test_main = Path( + self.nfcore_modules, "tests", "subworkflows", "nf-core", subworkflow, "main.nf" + ) + with pytest.raises(UserWarning) as excinfo: + meta_builder.check_inputs() + os.chdir(cwd) + assert "Test YAML file already exists!" in str(excinfo.value) diff --git a/tests/subworkflows/install.py b/tests/subworkflows/install.py index 6c04c9ad22..dfe71686fb 100644 --- a/tests/subworkflows/install.py +++ b/tests/subworkflows/install.py @@ -9,6 +9,7 @@ GITLAB_BRANCH_TEST_BRANCH, GITLAB_REPO, GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL, with_temporary_folder, ) @@ -140,3 +141,14 @@ def test_subworkflows_install_tracking_added_super_subworkflow(self): "installed_by" ] ) == sorted(["subworkflows", "bam_sort_stats_samtools"]) + + +def test_subworkflows_install_alternate_remote(self): + """Test installing a module from a different remote with the same organization path""" + install_obj = SubworkflowInstall( + self.pipeline_dir, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH + ) + # Install a subworkflow from GitLab which is also installed from GitHub with the same org_path + with pytest.raises(Exception) as excinfo: + install_obj.install("fastqc") + assert "Could not find a 'main.nf' or 'nextflow.config' file" in str(excinfo.value) diff --git a/tests/subworkflows/update.py b/tests/subworkflows/update.py index 29e6cb1179..698086e186 100644 --- a/tests/subworkflows/update.py +++ b/tests/subworkflows/update.py @@ -8,6 +8,7 @@ import nf_core.utils from nf_core.modules.modules_json import ModulesJson from nf_core.modules.modules_repo import NF_CORE_MODULES_NAME, NF_CORE_MODULES_REMOTE +from nf_core.modules.remove import ModuleRemove from nf_core.modules.update import ModuleUpdate from nf_core.subworkflows.update import SubworkflowUpdate @@ -321,6 +322,10 @@ def test_update_change_of_included_modules(self): # Check that tabix/tabix is there assert "tabix/tabix" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "tabix/tabix").is_dir() + # Check that ensemblevep is not there but instead we have ensemblevep/vep (due to a file re-naming) + assert "ensemblvep" not in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert "ensemblvep/vep" in mod_json["repos"][NF_CORE_MODULES_REMOTE]["modules"][NF_CORE_MODULES_NAME] + assert Path(self.pipeline_dir, "modules", NF_CORE_MODULES_NAME, "ensemblvep/vep").is_dir() def cmp_component(dir1, dir2): diff --git a/tests/test_cli.py b/tests/test_cli.py index 2bd8af5c59..58c4525a76 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -350,3 +350,20 @@ def test_lint_log_user_warning(self, mock_lint, mock_is_pipeline): assert result.exit_code == 1 assert error_txt in captured_logs.output[-1] assert captured_logs.records[-1].levelname == "ERROR" + + @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + def test_schema_lint(self, mock_get_schema_path): + """Test nf-core schema lint defaults to nextflow_schema.json""" + cmd = ["schema", "lint"] + result = self.invoke_cli(cmd) + assert mock_get_schema_path.called_with("nextflow_schema.json") + assert "nextflow_schema.json" in result.output + + @mock.patch("nf_core.schema.PipelineSchema.get_schema_path") + def test_schema_lint_filename(self, mock_get_schema_path): + """Test nf-core schema lint accepts a filename""" + cmd = ["schema", "lint", "some_other_filename"] + result = self.invoke_cli(cmd) + assert mock_get_schema_path.called_with("some_other_filename") + assert "some_other_filename" in result.output + assert "nextflow_schema.json" not in result.output diff --git a/tests/test_create.py b/tests/test_create.py index baac509d74..cc6bf8ba47 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -2,6 +2,8 @@ """ import os import unittest +from pathlib import Path +from unittest import mock import git @@ -9,17 +11,38 @@ from .utils import with_temporary_folder +TEST_DATA_DIR = Path(__file__).parent / "data" +PIPELINE_TEMPLATE_YML = TEST_DATA_DIR / "pipeline_create_template.yml" + class NfcoreCreateTest(unittest.TestCase): - @with_temporary_folder - def setUp(self, tmp_path): + def setUp(self): self.pipeline_name = "nf-core/test" self.pipeline_description = "just for 4w3s0m3 tests" self.pipeline_author = "Chuck Norris" self.pipeline_version = "1.0.0" self.default_branch = "default" - self.pipeline = nf_core.create.PipelineCreate( + def test_pipeline_creation(self): + pipeline = nf_core.create.PipelineCreate( + name=self.pipeline_name, + description=self.pipeline_description, + author=self.pipeline_author, + version=self.pipeline_version, + no_git=False, + force=True, + plain=True, + default_branch=self.default_branch, + ) + + assert pipeline.template_params["name"] == self.pipeline_name + assert pipeline.template_params["description"] == self.pipeline_description + assert pipeline.template_params["author"] == self.pipeline_author + assert pipeline.template_params["version"] == self.pipeline_version + + @with_temporary_folder + def test_pipeline_creation_initiation(self, tmp_path): + pipeline = nf_core.create.PipelineCreate( name=self.pipeline_name, description=self.pipeline_description, author=self.pipeline_author, @@ -30,14 +53,57 @@ def setUp(self, tmp_path): plain=True, default_branch=self.default_branch, ) + pipeline.init_pipeline() + assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() + assert not os.path.exists(os.path.join(pipeline.outdir, "pipeline_template.yml")) - def test_pipeline_creation(self): - assert self.pipeline.template_params["name"] == self.pipeline_name - assert self.pipeline.template_params["description"] == self.pipeline_description - assert self.pipeline.template_params["author"] == self.pipeline_author - assert self.pipeline.template_params["version"] == self.pipeline_version - - def test_pipeline_creation_initiation(self): - self.pipeline.init_pipeline() - assert os.path.isdir(os.path.join(self.pipeline.outdir, ".git")) - assert f" {self.default_branch}\n" in git.Repo.init(self.pipeline.outdir).git.branch() + @with_temporary_folder + def test_pipeline_creation_initiation_with_yml(self, tmp_path): + pipeline = nf_core.create.PipelineCreate( + name=self.pipeline_name, + description=self.pipeline_description, + author=self.pipeline_author, + version=self.pipeline_version, + no_git=False, + force=True, + outdir=tmp_path, + template_yaml_path=PIPELINE_TEMPLATE_YML, + plain=True, + default_branch=self.default_branch, + ) + pipeline.init_pipeline() + assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() + + # Check pipeline yml has been dumped and matches input + pipeline_template = os.path.join(pipeline.outdir, "pipeline_template.yml") + assert os.path.exists(pipeline_template) + with open(pipeline_template) as fh: + assert fh.read() == PIPELINE_TEMPLATE_YML.read_text() + + @mock.patch.object(nf_core.create.PipelineCreate, "customize_template") + @mock.patch.object(nf_core.create.questionary, "confirm") + @with_temporary_folder + def test_pipeline_creation_initiation_customize_template(self, mock_questionary, mock_customize, tmp_path): + mock_questionary.unsafe_ask.return_value = True + mock_customize.return_value = {"prefix": "testprefix"} + pipeline = nf_core.create.PipelineCreate( + name=self.pipeline_name, + description=self.pipeline_description, + author=self.pipeline_author, + version=self.pipeline_version, + no_git=False, + force=True, + outdir=tmp_path, + default_branch=self.default_branch, + ) + pipeline.init_pipeline() + assert os.path.isdir(os.path.join(pipeline.outdir, ".git")) + assert f" {self.default_branch}\n" in git.Repo.init(pipeline.outdir).git.branch() + + # Check pipeline yml has been dumped and matches input + pipeline_template = os.path.join(pipeline.outdir, "pipeline_template.yml") + assert os.path.exists(pipeline_template) + with open(pipeline_template) as fh: + assert fh.read() == PIPELINE_TEMPLATE_YML.read_text() diff --git a/tests/test_download.py b/tests/test_download.py index 4577a83992..e2ae882394 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -18,7 +18,6 @@ class DownloadTest(unittest.TestCase): - # # Tests for 'get_release_hash' # diff --git a/tests/test_modules.py b/tests/test_modules.py index c50c1f2ba8..047369b7c3 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -6,7 +6,8 @@ import tempfile import unittest -import requests_mock +import requests_cache +import responses import nf_core.create import nf_core.modules @@ -18,7 +19,8 @@ GITLAB_URL, OLD_TRIMGALORE_BRANCH, OLD_TRIMGALORE_SHA, - mock_api_calls, + mock_anaconda_api_calls, + mock_biocontainers_api_calls, ) @@ -35,11 +37,24 @@ def create_modules_repo_dummy(tmp_dir): fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) # mock biocontainers and anaconda response - with requests_mock.Mocker() as mock: - mock_api_calls(mock, "bpipe", "0.9.11--hdfd78af_0") + with responses.RequestsMock() as rsps: + mock_anaconda_api_calls(rsps, "bpipe", "0.9.11--hdfd78af_0") + mock_biocontainers_api_calls(rsps, "bpipe", "0.9.11--hdfd78af_0") # bpipe is a valid package on bioconda that is very unlikely to ever be added to nf-core/modules module_create = nf_core.modules.ModuleCreate(root_dir, "bpipe/test", "@author", "process_single", False, False) - module_create.create() + with requests_cache.disabled(): + module_create.create() + + # Remove doi from meta.yml which makes lint fail + meta_yml = os.path.join(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") + with open(meta_yml, "r") as fh: + lines = fh.readlines() + for line_index in range(len(lines)): + if "doi" in lines[line_index]: + to_pop = line_index + lines.pop(to_pop) + with open(meta_yml, "w") as fh: + fh.writelines(lines) return root_dir @@ -55,13 +70,13 @@ def setUp(self): # Set up the schema root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.pipeline_dir = os.path.join(self.tmp_dir, "mypipeline") + self.pipeline_name = "mypipeline" + self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) nf_core.create.PipelineCreate( - "mypipeline", "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True + self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True ).init_pipeline() # Set up install objects self.mods_install = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=False, force=True) - self.mods_install_alt = nf_core.modules.ModuleInstall(self.pipeline_dir, prompt=True, force=True) self.mods_install_old = nf_core.modules.ModuleInstall( self.pipeline_dir, prompt=False, @@ -73,21 +88,21 @@ def setUp(self): self.mods_install_trimgalore = nf_core.modules.ModuleInstall( self.pipeline_dir, prompt=False, - force=True, + force=False, remote_url=GITLAB_URL, branch=OLD_TRIMGALORE_BRANCH, ) self.mods_install_gitlab = nf_core.modules.ModuleInstall( self.pipeline_dir, prompt=False, - force=True, + force=False, remote_url=GITLAB_URL, branch=GITLAB_DEFAULT_BRANCH, ) self.mods_install_gitlab_old = nf_core.modules.ModuleInstall( self.pipeline_dir, prompt=False, - force=True, + force=False, remote_url=GITLAB_URL, branch=GITLAB_BRANCH_TEST_BRANCH, sha=GITLAB_BRANCH_TEST_OLD_SHA, @@ -146,6 +161,7 @@ def test_modulesrepo_class(self): test_modules_info_remote_gitlab, ) from .modules.install import ( + test_modules_install_alternate_remote, test_modules_install_different_branch_fail, test_modules_install_different_branch_succeed, test_modules_install_emptypipeline, @@ -157,6 +173,7 @@ def test_modulesrepo_class(self): test_modules_install_trimgalore_twice, ) from .modules.lint import ( + test_modules_lint_check_process_labels, test_modules_lint_empty, test_modules_lint_gitlab_modules, test_modules_lint_multiple_remotes, @@ -199,6 +216,7 @@ def test_modulesrepo_class(self): test_create_patch_try_apply_successful, test_create_patch_update_fail, test_create_patch_update_success, + test_remove_patch, ) from .modules.remove import ( test_modules_remove_multiqc_from_gitlab, diff --git a/tests/test_mulled.py b/tests/test_mulled.py deleted file mode 100644 index cf0a4fcfc0..0000000000 --- a/tests/test_mulled.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Test the mulled BioContainers image name generation.""" - -import pytest - -from nf_core.modules import MulledImageNameGenerator - - -@pytest.mark.parametrize( - "specs, expected", - [ - (["foo==0.1.2", "bar==1.1"], [("foo", "0.1.2"), ("bar", "1.1")]), - (["foo=0.1.2", "bar=1.1"], [("foo", "0.1.2"), ("bar", "1.1")]), - ], -) -def test_target_parsing(specs, expected): - """Test that valid specifications are correctly parsed into tool, version pairs.""" - assert MulledImageNameGenerator.parse_targets(specs) == expected - - -@pytest.mark.parametrize( - "specs", - [ - ["foo<0.1.2", "bar==1.1"], - ["foo=0.1.2", "bar>1.1"], - ], -) -def test_wrong_specification(specs): - """Test that unexpected version constraints fail.""" - with pytest.raises(ValueError, match="expected format"): - MulledImageNameGenerator.parse_targets(specs) - - -@pytest.mark.parametrize( - "specs", - [ - ["foo==0a.1.2", "bar==1.1"], - ["foo==0.1.2", "bar==1.b1b"], - ], -) -def test_noncompliant_version(specs): - """Test that version string that do not comply with PEP440 fail.""" - with pytest.raises(ValueError, match="PEP440"): - MulledImageNameGenerator.parse_targets(specs) - - -@pytest.mark.parametrize( - "specs, expected", - [ - ( - [("chromap", "0.2.1"), ("samtools", "1.15")], - "mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:bd74d08a359024829a7aec1638a28607bbcd8a58-0", - ), - ( - [("pysam", "0.16.0.1"), ("biopython", "1.78")], - "mulled-v2-3a59640f3fe1ed11819984087d31d68600200c3f:185a25ca79923df85b58f42deb48f5ac4481e91f-0", - ), - ( - [("samclip", "0.4.0"), ("samtools", "1.15")], - "mulled-v2-d057255d4027721f3ab57f6a599a2ae81cb3cbe3:13051b049b6ae536d76031ba94a0b8e78e364815-0", - ), - ], -) -def test_generate_image_name(specs, expected): - """Test that a known image name is generated from given targets.""" - assert MulledImageNameGenerator.generate_image_name(specs) == expected diff --git a/tests/test_refgenie.py b/tests/test_refgenie.py index 1ff2683416..73fbcb863f 100644 --- a/tests/test_refgenie.py +++ b/tests/test_refgenie.py @@ -7,6 +7,8 @@ import tempfile import unittest +import yaml + class TestRefgenie(unittest.TestCase): """Class for refgenie tests""" @@ -19,6 +21,7 @@ def setUp(self): self.NXF_HOME = os.path.join(self.tmp_dir, ".nextflow") self.NXF_REFGENIE_PATH = os.path.join(self.NXF_HOME, "nf-core", "refgenie_genomes.config") self.REFGENIE = os.path.join(self.tmp_dir, "genomes_config.yaml") + self.translation_file = os.path.join(self.tmp_dir, "alias_translations.yaml") # Set NXF_HOME environment variable # avoids adding includeConfig statement to config file outside the current tmpdir try: @@ -37,6 +40,10 @@ def setUp(self): with open(self.REFGENIE, "a") as fh: fh.write(f"nextflow_config: {os.path.join(self.NXF_REFGENIE_PATH)}\n") + # Add an alias translation to YAML file + with open(self.translation_file, "a") as fh: + fh.write("ensembl_gtf: gtf\n") + def tearDown(self) -> None: # Remove the tempdir again os.system(f"rm -rf {self.tmp_dir}") @@ -53,3 +60,13 @@ def test_update_refgenie_genomes_config(self): out = subprocess.check_output(shlex.split(cmd), stderr=subprocess.STDOUT) assert "Updated nf-core genomes config" in str(out) + + def test_asset_alias_translation(self): + """Test that asset aliases are translated correctly""" + # Populate the config with a genome + cmd = f"refgenie pull hg38/ensembl_gtf -c {self.REFGENIE}" + subprocess.check_output(shlex.split(cmd), stderr=subprocess.STDOUT) + cmd = f"cat {self.NXF_REFGENIE_PATH}" + out = subprocess.check_output(shlex.split(cmd), stderr=subprocess.STDOUT) + assert " gtf = " in str(out) + assert " ensembl_gtf = " not in str(out) diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 552a2ab176..1c290cb882 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -6,13 +6,18 @@ import tempfile import unittest -import requests_mock +import responses import nf_core.create import nf_core.modules import nf_core.subworkflows -from .utils import GITLAB_SUBWORKFLOWS_BRANCH, GITLAB_URL, OLD_SUBWORKFLOWS_SHA +from .utils import ( + GITLAB_SUBWORKFLOWS_BRANCH, + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + OLD_SUBWORKFLOWS_SHA, +) def create_modules_repo_dummy(tmp_dir): @@ -30,9 +35,9 @@ def create_modules_repo_dummy(tmp_dir): with open(os.path.join(root_dir, ".nf-core.yml"), "w") as fh: fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) - with requests_mock.Mocker() as mock: - subworkflow_create = nf_core.subworkflows.SubworkflowCreate(root_dir, "test_subworkflow", "@author", True) - subworkflow_create.create() + # TODO Add a mock here + subworkflow_create = nf_core.subworkflows.SubworkflowCreate(root_dir, "test_subworkflow", "@author", True) + subworkflow_create.create() return root_dir @@ -48,9 +53,10 @@ def setUp(self): # Set up the pipeline structure root_repo_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.template_dir = os.path.join(root_repo_dir, "nf_core", "pipeline-template") - self.pipeline_dir = os.path.join(self.tmp_dir, "mypipeline") + self.pipeline_name = "mypipeline" + self.pipeline_dir = os.path.join(self.tmp_dir, self.pipeline_name) nf_core.create.PipelineCreate( - "mypipeline", "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True + self.pipeline_name, "it is mine", "me", no_git=True, outdir=self.pipeline_dir, plain=True ).init_pipeline() # Set up the nf-core/modules repo dummy @@ -61,6 +67,13 @@ def setUp(self): self.subworkflow_install_gitlab = nf_core.subworkflows.SubworkflowInstall( self.pipeline_dir, prompt=False, force=False, remote_url=GITLAB_URL, branch=GITLAB_SUBWORKFLOWS_BRANCH ) + self.subworkflow_install_gitlab_same_org_path = nf_core.subworkflows.SubworkflowInstall( + self.pipeline_dir, + prompt=False, + force=False, + remote_url=GITLAB_URL, + branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + ) self.subworkflow_install_old = nf_core.subworkflows.SubworkflowInstall( self.pipeline_dir, prompt=False, @@ -93,6 +106,13 @@ def tearDown(self): test_subworkflows_create_nfcore_modules, test_subworkflows_create_succeed, ) + from .subworkflows.create_test_yml import ( + test_subworkflows_create_test_yml_check_inputs, + test_subworkflows_create_test_yml_entry_points, + test_subworkflows_create_test_yml_get_md5, + test_subworkflows_custom_yml_dumper, + test_subworkflows_test_file_dict, + ) from .subworkflows.info import ( test_subworkflows_info_in_modules_repo, test_subworkflows_info_local, @@ -101,6 +121,7 @@ def tearDown(self): ) from .subworkflows.install import ( test_subworkflow_install_nopipeline, + test_subworkflows_install_alternate_remote, test_subworkflows_install_bam_sort_stats_samtools, test_subworkflows_install_bam_sort_stats_samtools_twice, test_subworkflows_install_different_branch_fail, diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index ddf88ef74a..c4e3d49ae0 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -33,7 +33,6 @@ def test_tmp_folder_does_not_exist_after(): def test_set_wd(): - with tempfile.TemporaryDirectory() as tmpdirname: with set_wd(tmpdirname): context_wd = Path().resolve() diff --git a/tests/utils.py b/tests/utils.py index 77e94be464..d39d172a66 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,17 +8,23 @@ from contextlib import contextmanager from pathlib import Path -OLD_TRIMGALORE_SHA = "06348dffce2a732fc9e656bdc5c64c3e02d302cb" +import responses + +import nf_core.modules + +OLD_TRIMGALORE_SHA = "9b7a3bdefeaad5d42324aa7dd50f87bea1b04386" OLD_TRIMGALORE_BRANCH = "mimic-old-trimgalore" GITLAB_URL = "https://gitlab.com/nf-core/modules-test.git" -GITLAB_REPO = "nf-core" +GITLAB_REPO = "nf-core-test" GITLAB_DEFAULT_BRANCH = "main" GITLAB_SUBWORKFLOWS_BRANCH = "subworkflows" +GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH = "subworkflows-org-path" OLD_SUBWORKFLOWS_SHA = "f3c078809a2513f1c95de14f6633fe1f03572fdb" # Branch test stuff GITLAB_BRANCH_TEST_BRANCH = "branch-tester" -GITLAB_BRANCH_TEST_OLD_SHA = "bce3f17980b8d1beae5e917cfd3c65c0c69e04b5" -GITLAB_BRANCH_TEST_NEW_SHA = "2f5f180f6e705bb81d6e7742dc2f24bf4a0c721e" +GITLAB_BRANCH_ORG_PATH_BRANCH = "org-path" +GITLAB_BRANCH_TEST_OLD_SHA = "e772abc22c1ff26afdf377845c323172fb3c19ca" +GITLAB_BRANCH_TEST_NEW_SHA = "7d73e21f30041297ea44367f2b4fd4e045c0b991" def with_temporary_folder(func): @@ -68,23 +74,26 @@ def set_wd(path: Path): os.chdir(start_wd) -def mock_api_calls(mock, module, version): - """Mock biocontainers and anaconda api calls for module""" - biocontainers_api_url = ( - f"https://api.biocontainers.pro/ga4gh/trs/v2/tools/{module}/versions/{module}-{version.split('--')[0]}" - ) +def mock_anaconda_api_calls(rsps: responses.RequestsMock, module, version): + """Mock anaconda api calls for module""" anaconda_api_url = f"https://api.anaconda.org/package/bioconda/{module}" anaconda_mock = { - "status_code": 200, "latest_version": version.split("--")[0], "summary": "", - "doc_url": "", - "dev_url": "", + "doc_url": "http://test", + "dev_url": "http://test", "files": [{"version": version.split("--")[0]}], "license": "", } + rsps.get(anaconda_api_url, json=anaconda_mock, status=200) + + +def mock_biocontainers_api_calls(rsps: responses.RequestsMock, module, version): + """Mock biocontainers api calls for module""" + biocontainers_api_url = ( + f"https://api.biocontainers.pro/ga4gh/trs/v2/tools/{module}/versions/{module}-{version.split('--')[0]}" + ) biocontainers_mock = { - "status_code": 200, "images": [ { "image_type": "Singularity", @@ -93,10 +102,9 @@ def mock_api_calls(mock, module, version): }, { "image_type": "Docker", - "image_name": f"quay.io/biocontainers/{module}:{version}", + "image_name": f"biocontainers/{module}:{version}", "updated": "2021-09-04T00:00:00Z", }, ], } - mock.register_uri("GET", anaconda_api_url, json=anaconda_mock) - mock.register_uri("GET", biocontainers_api_url, json=biocontainers_mock) + rsps.get(biocontainers_api_url, json=biocontainers_mock, status=200)