diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index f07b31e9d..1a3e283d0 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -38,7 +38,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" steps: - name: go to subdirectory and change nextflow workdir diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 56c6c822a..09c5b01c3 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -39,7 +39,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" steps: - name: go to working directory @@ -75,7 +75,7 @@ jobs: pwd # echo content of current directory ls -la - nextflow run nf-core-testpipeline -profile test,self_hosted_runner --outdir ./results + nextflow run nf-core-testpipeline -profile self_hosted_runner,test --outdir ./results - name: Upload log file artifact if: ${{ always() }} diff --git a/CHANGELOG.md b/CHANGELOG.md index f2d494ab1..68399a9c0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ - add option to exclude tower.yml from pipeline template ([#3134](https://github.com/nf-core/tools/pull/3134)) - run nf-core lint `--release` on PRs to master ([#3148](https://github.com/nf-core/tools/pull/3148)) - Add tests to ensure all files are part of a template customisation group and all groups are tested ([#3099](https://github.com/nf-core/tools/pull/3099)) +- Replaces the old custom `check_max()` function with the Nextflow native `resourceLimits` directive ([#3037](https://github.com/nf-core/tools/pull/3037)) - Fixed release announcement hashtags for Mastodon ([#3099](https://github.com/nf-core/tools/pull/3176)) ### Linting diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 63fa99cec..06371cc97 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -24,7 +24,7 @@ jobs: strategy: matrix: NXF_VER: - - "23.10.0" + - "24.04.2" - "latest-everything" steps: - name: Check out pipeline code diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index beb45ed51..433145428 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -20,7 +20,7 @@ [![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX) [![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com) -[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) +[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) [![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/) [![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/) [![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/) diff --git a/nf_core/pipeline-template/conf/base.config b/nf_core/pipeline-template/conf/base.config index 9c62bf063..fa292339e 100644 --- a/nf_core/pipeline-template/conf/base.config +++ b/nf_core/pipeline-template/conf/base.config @@ -11,9 +11,9 @@ process { // TODO nf-core: Check the defaults for all processes - cpus = { check_max( 1 * task.attempt, 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 * task.attempt } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' } maxRetries = 1 @@ -27,30 +27,30 @@ process { // TODO nf-core: Customise requirements for specific processes. // See https://www.nextflow.io/docs/latest/config.html#config-process-selectors withLabel:process_single { - cpus = { check_max( 1 , 'cpus' ) } - memory = { check_max( 6.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 1 } + memory = { 6.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_low { - cpus = { check_max( 2 * task.attempt, 'cpus' ) } - memory = { check_max( 12.GB * task.attempt, 'memory' ) } - time = { check_max( 4.h * task.attempt, 'time' ) } + cpus = { 2 * task.attempt } + memory = { 12.GB * task.attempt } + time = { 4.h * task.attempt } } withLabel:process_medium { - cpus = { check_max( 6 * task.attempt, 'cpus' ) } - memory = { check_max( 36.GB * task.attempt, 'memory' ) } - time = { check_max( 8.h * task.attempt, 'time' ) } + cpus = { 6 * task.attempt } + memory = { 36.GB * task.attempt } + time = { 8.h * task.attempt } } withLabel:process_high { - cpus = { check_max( 12 * task.attempt, 'cpus' ) } - memory = { check_max( 72.GB * task.attempt, 'memory' ) } - time = { check_max( 16.h * task.attempt, 'time' ) } + cpus = { 12 * task.attempt } + memory = { 72.GB * task.attempt } + time = { 16.h * task.attempt } } withLabel:process_long { - time = { check_max( 20.h * task.attempt, 'time' ) } + time = { 20.h * task.attempt } } withLabel:process_high_memory { - memory = { check_max( 200.GB * task.attempt, 'memory' ) } + memory = { 200.GB * task.attempt } } withLabel:error_ignore { errorStrategy = 'ignore' diff --git a/nf_core/pipeline-template/conf/test.config b/nf_core/pipeline-template/conf/test.config index 827e21b7b..bea6f670d 100644 --- a/nf_core/pipeline-template/conf/test.config +++ b/nf_core/pipeline-template/conf/test.config @@ -10,15 +10,18 @@ ---------------------------------------------------------------------------------------- */ +process { + resourceLimits = [ + cpus: 4, + memory: '15.GB', + time: '1.h' + ] +} + params { config_profile_name = 'Test profile' config_profile_description = 'Minimal test dataset to check pipeline function' - // Limit resources so that this can run on GitHub Actions - max_cpus = 2 - max_memory = '6.GB' - max_time = '6.h' - // Input data // TODO nf-core: Specify the paths to your test data on nf-core/test-datasets // TODO nf-core: Give any required params for the test so that command line flags are not needed diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 8201916b3..d129d1a49 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -56,12 +56,6 @@ params { config_profile_url = null {%- endif %} - // Max resource options - // Defaults only, expecting to be overwritten - max_memory = '128.GB' - max_cpus = 16 - max_time = '240.h' - {%- if nf_schema %} // Schema validation default options validate_params = true @@ -82,21 +76,6 @@ process { maxErrors = '-1' } {% endif %} -{% if nf_core_configs -%} -// Load nf-core custom profiles from different Institutions -try { - includeConfig "${params.custom_config_base}/nfcore_custom.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") -} - -// Load {{ name }} custom profiles from different institutions. -try { - includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" -} catch (Exception e) { - System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") -} -{% endif -%} profiles { debug { @@ -205,6 +184,22 @@ profiles { {%- endif %} } +{% if nf_core_configs -%} +// Load nf-core custom profiles from different Institutions +try { + includeConfig "${params.custom_config_base}/nfcore_custom.config" +} catch (Exception e) { + System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config") +} + +// Load {{ name }} custom profiles from different institutions. +try { + includeConfig "${params.custom_config_base}/pipeline/{{ short_name }}.config" +} catch (Exception e) { + System.err.println("WARNING: Could not load nf-core/config/{{ short_name }} profiles: ${params.custom_config_base}/pipeline/{{ short_name }}.config") +} +{% endif -%} + // Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile // Will not be used unless Apptainer / Docker / Podman / Charliecloud / Singularity are enabled // Set to your registry if you have a mirror of containers @@ -271,7 +266,7 @@ manifest { homePage = 'https://github.com/{{ name }}' description = """{{ description }}""" mainScript = 'main.nf' - nextflowVersion = '!>=23.10.0' + nextflowVersion = '!>=24.04.2' version = '{{ version }}' doi = '' } @@ -318,35 +313,3 @@ validation { // Load modules.config for DSL2 module specific options includeConfig 'conf/modules.config' {% endif %} -// Function to ensure that resource requirements don't go beyond -// a maximum limit -def check_max(obj, type) { - if (type == 'memory') { - try { - if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1) - return params.max_memory as nextflow.util.MemoryUnit - else - return obj - } catch (all) { - println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'time') { - try { - if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1) - return params.max_time as nextflow.util.Duration - else - return obj - } catch (all) { - println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj" - return obj - } - } else if (type == 'cpus') { - try { - return Math.min( obj, params.max_cpus as int ) - } catch (all) { - println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj" - return obj - } - } -} diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index 446da25ae..4136a0b49 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -136,41 +136,6 @@ } }, {%- endif %} - "max_job_request_options": { - "title": "Max job request options", - "type": "object", - "fa_icon": "fab fa-acquisitions-incorporated", - "description": "Set the top limit for requested resources for any single job.", - "help_text": "If you are running on a smaller system, a pipeline step requesting more resources than are available may cause the Nextflow to stop the run with an error. These options allow you to cap the maximum resources requested by any single job so that the pipeline will run on your system.\n\nNote that you can not _increase_ the resources requested by any job using these options. For that you will need your own configuration file. See [the nf-core website](https://nf-co.re/usage/configuration) for details.", - "properties": { - "max_cpus": { - "type": "integer", - "description": "Maximum number of CPUs that can be requested for any single job.", - "default": 16, - "fa_icon": "fas fa-microchip", - "hidden": true, - "help_text": "Use to set an upper-limit for the CPU requirement for each process. Should be an integer e.g. `--max_cpus 1`" - }, - "max_memory": { - "type": "string", - "description": "Maximum amount of memory that can be requested for any single job.", - "default": "128.GB", - "fa_icon": "fas fa-memory", - "pattern": "^\\d+(\\.\\d+)?\\.?\\s*(K|M|G|T)?B$", - "hidden": true, - "help_text": "Use to set an upper-limit for the memory requirement for each process. Should be a string in the format integer-unit e.g. `--max_memory '8.GB'`" - }, - "max_time": { - "type": "string", - "description": "Maximum amount of time that can be requested for any single job.", - "default": "240.h", - "fa_icon": "far fa-clock", - "pattern": "^(\\d+\\.?\\s*(s|m|h|d|day)\\s*)+$", - "hidden": true, - "help_text": "Use to set an upper-limit for the time requirement for each process. Should be a string in the format integer-unit e.g. `--max_time '2.h'`" - } - } - }, "generic_options": { "title": "Generic options", "type": "object", @@ -278,9 +243,6 @@ {% if nf_core_configs %}{ "$ref": "#/$defs/institutional_config_options" },{% endif %} - { - "$ref": "#/$defs/max_job_request_options" - }, { "$ref": "#/$defs/generic_options" } diff --git a/nf_core/pipelines/lint/nextflow_config.py b/nf_core/pipelines/lint/nextflow_config.py index 790fc2179..14e91cc60 100644 --- a/nf_core/pipelines/lint/nextflow_config.py +++ b/nf_core/pipelines/lint/nextflow_config.py @@ -80,6 +80,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: * ``params.nf_required_version``: The old method for specifying the minimum Nextflow version. Replaced by ``manifest.nextflowVersion`` * ``params.container``: The old method for specifying the dockerhub container address. Replaced by ``process.container`` * ``igenomesIgnore``: Changed to ``igenomes_ignore`` + * ``params.max_cpus``: Old method of specifying the maximum number of CPUs a process can request. Replaced by native Nextflow `resourceLimits`directive in config files. + * ``params.max_memory``: Old method of specifying the maximum number of memory can request. Replaced by native Nextflow `resourceLimits`directive. + * ``params.max_time``: Old method of specifying the maximum number of CPUs can request. Replaced by native Nextflow `resourceLimits`directive. .. tip:: The ``snake_case`` convention should now be used when defining pipeline parameters @@ -146,7 +149,13 @@ def nextflow_config(self) -> Dict[str, List[str]]: ["params.input"], ] # Throw a warning if these are missing - config_warn = [["manifest.mainScript"], ["timeline.file"], ["trace.file"], ["report.file"], ["dag.file"]] + config_warn = [ + ["manifest.mainScript"], + ["timeline.file"], + ["trace.file"], + ["report.file"], + ["dag.file"], + ] # Old depreciated vars - fail if present config_fail_ifdefined = [ "params.nf_required_version", @@ -155,6 +164,9 @@ def nextflow_config(self) -> Dict[str, List[str]]: "params.igenomesIgnore", "params.name", "params.enable_conda", + "params.max_cpus", + "params.max_memory", + "params.max_time", ] # Lint for plugins diff --git a/nf_core/pipelines/lint/readme.py b/nf_core/pipelines/lint/readme.py index 1c0910425..bdfad5200 100644 --- a/nf_core/pipelines/lint/readme.py +++ b/nf_core/pipelines/lint/readme.py @@ -36,7 +36,7 @@ def readme(self): if "nextflow_badge" not in ignore_configs: # Check that there is a readme badge showing the minimum required version of Nextflow - # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A523.10.0-23aa62.svg)](https://www.nextflow.io/) + # [![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/) # and that it has the correct version nf_badge_re = r"\[!\[Nextflow\]\(https://img\.shields\.io/badge/nextflow%20DSL2-!?(?:%E2%89%A5|%3E%3D)([\d\.]+)-23aa62\.svg\)\]\(https://www\.nextflow\.io/\)" match = re.search(nf_badge_re, content) diff --git a/tests/pipelines/lint/test_nextflow_config.py b/tests/pipelines/lint/test_nextflow_config.py index 3cc935545..a655fb8ac 100644 --- a/tests/pipelines/lint/test_nextflow_config.py +++ b/tests/pipelines/lint/test_nextflow_config.py @@ -30,7 +30,6 @@ def test_default_values_match(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["warned"]) == 0 - assert "Config default value correct: params.max_cpus" in str(result["passed"]) assert "Config default value correct: params.validate_params" in str(result["passed"]) def test_nextflow_config_bad_name_fail(self): @@ -71,18 +70,18 @@ def test_nextflow_config_missing_test_profile_failed(self): def test_default_values_fail(self): """Test linting fails if the default values in nextflow.config do not match the ones defined in the nextflow_schema.json.""" - # Change the default value of max_cpus in nextflow.config + # Change the default value of max_multiqc_email_size in nextflow.config nf_conf_file = Path(self.new_pipeline) / "nextflow.config" with open(nf_conf_file) as f: content = f.read() - fail_content = re.sub(r"\bmax_cpus\s*=\s*16\b", "max_cpus = 0", content) + fail_content = re.sub(r"\bmax_multiqc_email_size\s*=\s*'25.MB'", "max_multiqc_email_size = '0'", content) with open(nf_conf_file, "w") as f: f.write(fail_content) - # Change the default value of max_memory in nextflow_schema.json + # Change the default value of custom_config_version in nextflow_schema.json nf_schema_file = Path(self.new_pipeline) / "nextflow_schema.json" with open(nf_schema_file) as f: content = f.read() - fail_content = re.sub(r'"default": "128.GB"', '"default": "18.GB"', content) + fail_content = re.sub(r'"default": "master"', '"default": "main"', content) with open(nf_schema_file, "w") as f: f.write(fail_content) lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) @@ -90,11 +89,11 @@ def test_default_values_fail(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 2 assert ( - "Config default value incorrect: `params.max_cpus` is set as `16` in `nextflow_schema.json` but is `0` in `nextflow.config`." + "Config default value incorrect: `params.max_multiqc_email_size` is set as `25.MB` in `nextflow_schema.json` but is `0` in `nextflow.config`." in result["failed"] ) assert ( - "Config default value incorrect: `params.max_memory` is set as `18.GB` in `nextflow_schema.json` but is `128.GB` in `nextflow.config`." + "Config default value incorrect: `params.custom_config_version` is set as `main` in `nextflow_schema.json` but is `master` in `nextflow.config`." in result["failed"] ) @@ -103,14 +102,14 @@ def test_catch_params_assignment_in_main_nf(self): # Add parameter assignment in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time = 42") + f.write("params.custom_config_base = 'test'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() - assert len(result["failed"]) == 1 + assert len(result["failed"]) == 2 assert ( - result["failed"][0] - == "Config default value incorrect: `params.max_time` is set as `240.h` in `nextflow_schema.json` but is `null` in `nextflow.config`." + result["failed"][1] + == "Config default value incorrect: `params.custom_config_base` is set as `https://raw.githubusercontent.com/nf-core/configs/master` in `nextflow_schema.json` but is `null` in `nextflow.config`." ) def test_allow_params_reference_in_main_nf(self): @@ -118,7 +117,7 @@ def test_allow_params_reference_in_main_nf(self): # Add parameter reference in main.nf main_nf_file = Path(self.new_pipeline) / "main.nf" with open(main_nf_file, "a") as f: - f.write("params.max_time == 42") + f.write("params.custom_config_version == 'main'") lint_obj = nf_core.pipelines.lint.PipelineLint(self.new_pipeline) lint_obj.load_pipeline_config() result = lint_obj.nextflow_config() @@ -126,10 +125,11 @@ def test_allow_params_reference_in_main_nf(self): def test_default_values_ignored(self): """Test ignoring linting of default values.""" - # Add max_cpus to the ignore list + # Add custom_config_version to the ignore list nf_core_yml_path = Path(self.new_pipeline) / ".nf-core.yml" nf_core_yml = NFCoreYamlConfig( - repository_type="pipeline", lint={"nextflow_config": [{"config_defaults": ["params.max_cpus"]}]} + repository_type="pipeline", + lint={"nextflow_config": [{"config_defaults": ["params.custom_config_version"]}]}, ) with open(nf_core_yml_path, "w") as f: yaml.dump(nf_core_yml.model_dump(), f) @@ -140,8 +140,8 @@ def test_default_values_ignored(self): result = lint_obj.nextflow_config() assert len(result["failed"]) == 0 assert len(result["ignored"]) == 1 - assert "Config default value correct: params.max_cpu" not in str(result["passed"]) - assert "Config default ignored: params.max_cpus" in str(result["ignored"]) + assert "Config default value correct: params.custom_config_version" not in str(result["passed"]) + assert "Config default ignored: params.custom_config_version" in str(result["ignored"]) def test_default_values_float(self): """Test comparing two float values.""" @@ -150,7 +150,9 @@ def test_default_values_float(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) @@ -180,7 +182,9 @@ def test_default_values_float_fail(self): with open(nf_conf_file) as f: content = f.read() fail_content = re.sub( - r"validate_params\s*=\s*true", "params.validate_params = true\ndummy = 0.000000001", content + r"validate_params\s*=\s*true", + "params.validate_params = true\ndummy = 0.000000001", + content, ) with open(nf_conf_file, "w") as f: f.write(fail_content) diff --git a/tests/pipelines/test_launch.py b/tests/pipelines/test_launch.py index d63e7b6dc..5e230528a 100644 --- a/tests/pipelines/test_launch.py +++ b/tests/pipelines/test_launch.py @@ -101,7 +101,12 @@ def test_ob_to_questionary_string(self): "default": "data/*{1,2}.fastq.gz", } result = self.launcher.single_param_to_questionary("input", sc_obj) - assert result == {"type": "input", "name": "input", "message": "", "default": "data/*{1,2}.fastq.gz"} + assert result == { + "type": "input", + "name": "input", + "message": "", + "default": "data/*{1,2}.fastq.gz", + } @mock.patch("questionary.unsafe_prompt", side_effect=[{"use_web_gui": "Web based"}]) def test_prompt_web_gui_true(self, mock_prompt): @@ -123,7 +128,8 @@ def test_launch_web_gui_missing_keys(self, mock_poll_nfcore_web_api): assert exc_info.value.args[0].startswith("Web launch response not recognised:") @mock.patch( - "nf_core.utils.poll_nfcore_web_api", side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}] + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"api_url": "foo", "web_url": "bar", "status": "recieved"}], ) @mock.patch("webbrowser.open") @mock.patch("nf_core.utils.wait_cli_function") @@ -133,7 +139,10 @@ def test_launch_web_gui(self, mock_poll_nfcore_web_api, mock_webbrowser, mock_wa self.launcher.merge_nxf_flag_schema() assert self.launcher.launch_web_gui() is None - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "error", "message": "foo"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "error", "message": "foo"}], + ) def test_get_web_launch_response_error(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status error""" with pytest.raises(AssertionError) as exc_info: @@ -147,12 +156,18 @@ def test_get_web_launch_response_unexpected(self, mock_poll_nfcore_web_api): self.launcher.get_web_launch_response() assert exc_info.value.args[0].startswith("Web launch GUI returned unexpected status (foo): ") - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "waiting_for_user"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "waiting_for_user"}], + ) def test_get_web_launch_response_waiting(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - status waiting_for_user""" assert self.launcher.get_web_launch_response() is False - @mock.patch("nf_core.utils.poll_nfcore_web_api", side_effect=[{"status": "launch_params_complete"}]) + @mock.patch( + "nf_core.utils.poll_nfcore_web_api", + side_effect=[{"status": "launch_params_complete"}], + ) def test_get_web_launch_response_missing_keys(self, mock_poll_nfcore_web_api): """Test polling the website for a launch response - complete, but missing keys""" with pytest.raises(AssertionError) as exc_info: @@ -185,11 +200,9 @@ def test_sanitise_web_response(self): self.launcher.get_pipeline_schema() self.launcher.nxf_flags["-name"] = "" self.launcher.schema_obj.input_params["igenomes_ignore"] = "true" - self.launcher.schema_obj.input_params["max_cpus"] = "12" self.launcher.sanitise_web_response() assert "-name" not in self.launcher.nxf_flags assert self.launcher.schema_obj.input_params["igenomes_ignore"] is True - assert self.launcher.schema_obj.input_params["max_cpus"] == 12 def test_ob_to_questionary_bool(self): """Check converting a python dict to a pyenquirer format - booleans""" @@ -262,7 +275,10 @@ def test_ob_to_questionary_enum(self): def test_ob_to_questionary_pattern(self): """Check converting a python dict to a questionary format - with pattern""" - sc_obj = {"type": "string", "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$"} + sc_obj = { + "type": "string", + "pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + } result = self.launcher.single_param_to_questionary("email", sc_obj) assert result["type"] == "input" assert result["validate"]("test@email.com") is True