diff --git a/.github/workflows/builds.yml b/.github/workflows/builds.yml
index ca2c7d79d1c..288bb20187b 100644
--- a/.github/workflows/builds.yml
+++ b/.github/workflows/builds.yml
@@ -257,6 +257,10 @@ jobs:
- name: Python level tests
shell: bash
+ env:
+ PYTEST_MD_REPORT: true
+ PYTEST_MD_REPORT_VERBOSE: 0
+ PYTEST_MD_REPORT_OUTPUT: pytest.md
run: >
/usr/local/bin/geant4-config --install-datasets
&& source /usr/local/bin/thisroot.sh
@@ -266,7 +270,9 @@ jobs:
&& export PYTHONPATH=/usr/local/python:$PYTHONPATH
&& export LD_LIBRARY_PATH=$PWD/build/thirdparty/OpenDataDetector/factory:$LD_LIBRARY_PATH
&& pip3 install -r Examples/Python/tests/requirements.txt
+ && pip3 install pytest-md-report
&& pytest -rFsv -k "not exatrkx" -v
+ && cat ${PYTEST_MD_REPORT_OUTPUT} >> $GITHUB_STEP_SUMMARY
linux_physmon:
runs-on: ubuntu-latest
@@ -303,7 +309,7 @@ jobs:
run: >
echo "::group::Dependencies"
&& git config --global safe.directory "$GITHUB_WORKSPACE"
- && pip3 install histcmp==0.6.2 spyral-cli==1.1.0 matplotlib
+ && pip3 install histcmp==0.6.3 spyral-cli==1.1.1 matplotlib
&& pip3 install -r Examples/Scripts/requirements.txt
&& /usr/local/bin/geant4-config --install-datasets
&& source /usr/local/bin/thisroot.sh
@@ -314,6 +320,7 @@ jobs:
&& echo "::endgroup::"
&& export PYTHONPATH="${PYTHONPATH}":"${GITHUB_WORKSPACE}/Examples/Scripts/Python"
&& CI/physmon/phys_perf_mon.sh all physmon
+ && cat physmon/summary.md >> $GITHUB_STEP_SUMMARY
- uses: actions/upload-artifact@v3
if: always()
@@ -401,7 +408,7 @@ jobs:
-DACTS_BUILD_ODD=ON
-DACTS_BUILD_EXAMPLES_PYTHON_BINDINGS=ON
-DACTS_BUILD_EXAMPLES_EDM4HEP=ON
- -DACTS_FORCE_ASSERTIONS=ON
+ -DACTS_FORCE_ASSERTIONS=OFF
-DACTS_BUILD_ANALYSIS_APPS=ON
-DACTS_BUILD_PLUGIN_ACTSVG=ON
diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml
index 1e815677268..82272c617e5 100644
--- a/.github/workflows/checks.yml
+++ b/.github/workflows/checks.yml
@@ -97,6 +97,16 @@ jobs:
- name: Check
run: >
CI/check_spelling
+ missing_includes:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Install clang
+ run: >
+ sudo apt-get install -y clang libeigen3-dev libboost-dev
+ - name: Check
+ run: >
+ CI/missing_include_check.sh
fpe_masks:
runs-on: ubuntu-latest
steps:
diff --git a/.github/workflows/pr_commands.yml b/.github/workflows/pr_commands.yml
new file mode 100644
index 00000000000..8a5bfa08611
--- /dev/null
+++ b/.github/workflows/pr_commands.yml
@@ -0,0 +1,26 @@
+name: PR comment ops
+on: issue_comment
+
+jobs:
+ pr_commented:
+ # This job only runs for pull request comments
+ name: PR comment
+ if: "${{ github.event.issue.pull_request && startsWith(github.event.comment.body, '/') }}"
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - name: Install prerequisites
+ run: pip install -r CI/commands/requirements.txt
+ - name: Run command
+ env:
+ GITLAB_TRIGGER_TOKEN: ${{ secrets.GITLAB_ATHENA_BUILD_TRIGGER_TOKEN}}
+ GITLAB_TRIGGER_URL: https://gitlab.cern.ch/api/v4/projects/153873/trigger/pipeline
+ GITHUB_TOKEN: ${{ secrets.PR_COMMANDS_GH_TOKEN }}
+ run: |
+ echo "${{ github.event.comment.body }}" > body.txt
+ cat body.txt
+ CI/commands/pr_commands.py \
+ --pr ${{ github.event.issue.pull_request.url }} \
+ --body body.txt \
+ --sender ${{ github.event.comment.user.login }} \
+ --repository ${{ github.event.repository.full_name }}
diff --git a/.github/workflows/report.yml b/.github/workflows/report.yml
index 8ad7f1a893a..374ab1c8921 100644
--- a/.github/workflows/report.yml
+++ b/.github/workflows/report.yml
@@ -91,9 +91,14 @@ jobs:
- name: Render comment
if: steps.get-pr-number.outputs.result != 'false'
run: |
- pip install Jinja2
ls -al $GITHUB_WORKSPACE/physmon
- CI/physmon/generate_comment.py $GITHUB_WORKSPACE/physmon comment.md
+ echo "# 📊: Physics performance monitoring for ${PR_SHA}" >> comment.md
+ echo "[Full contents](${ARTIFACT_URL})" >> comment.md
+ if [ -f "$GITHUB_WORKSPACE/physmon/summary.md" ]; then
+ cat $GITHUB_WORKSPACE/physmon/summary.md >> comment.md
+ else
+ echo "🟥 summary not found!" >> comment.md
+ fi
cat comment.md
- name: Find Comment
diff --git a/.github/workflows/trigger_athena.yml b/.github/workflows/trigger_athena.yml
index d66775227de..7e01a3dd9f7 100644
--- a/.github/workflows/trigger_athena.yml
+++ b/.github/workflows/trigger_athena.yml
@@ -12,4 +12,5 @@ jobs:
curl -X POST --fail
-F token=${{ secrets.GITLAB_ATHENA_BUILD_TRIGGER_TOKEN}}
-F ref=main
+ --form variables[SOURCE_SHA]="${{ github.sha }}"
https://gitlab.cern.ch/api/v4/projects/153873/trigger/pipeline
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ab7767e3605..94ee96f8d24 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -37,10 +37,8 @@ clang_tidy:
-DACTS_BUILD_EVERYTHING=on
-DACTS_RUN_CLANG_TIDY=on
- - mkdir clang-tidy
-
# Main clang-tidy run during cmake compilation
- - CI/clang_tidy/run_clang_tidy.sh build | tee clang-tidy/clang-tidy.log
+ - CI/clang_tidy/run_clang_tidy.sh clang-tidy build
# Install dependencies for processing scripts
- pip install -r CI/clang_tidy/requirements.txt
@@ -74,7 +72,6 @@ build_exatrkx:
- build/
exclude:
- build/**/*.o
- - build/bin/ActsUnitTest*
- build/bin/ActsIntegrationTest*
script:
@@ -96,13 +93,24 @@ build_exatrkx:
-DCMAKE_CUDA_ARCHITECTURES="75;86"
-DACTS_BUILD_PLUGIN_EXATRKX=ON
-DACTS_BUILD_EXAMPLES_EXATRKX=ON
+ -DACTS_BUILD_UNITTESTS=ON
-DACTS_EXATRKX_ENABLE_TORCH=ON
-DACTS_EXATRKX_ENABLE_ONNX=ON
-DACTS_BUILD_EXAMPLES_PYTHON_BINDINGS=ON
-DACTS_ENABLE_LOG_FAILURE_THRESHOLD=ON
- cmake --build build -- -j3
-test_exatrkx:
+test_exatrkx_unittests:
+ stage: test
+ needs:
+ - build_exatrkx
+ image: ghcr.io/acts-project/ubuntu2004_exatrkx:v41
+ tags:
+ - docker-gpu-nvidia
+ script:
+ - ctest --test-dir build -R ExaTrkX
+
+test_exatrkx_python:
stage: test
needs:
- build_exatrkx
@@ -110,7 +118,7 @@ test_exatrkx:
tags:
- docker-gpu-nvidia
script:
- - apt-get update -y
+ - apt-get update -y || true # TODO revert
- apt-get install -y python3 libxxhash0
- source /usr/local/bin/thisroot.sh
- source build/python/setup.sh
diff --git a/.policy.yml b/.policy.yml
index 96c8f8b0274..ccadc6dccf0 100644
--- a/.policy.yml
+++ b/.policy.yml
@@ -12,7 +12,7 @@ approval_rules:
options:
allow_author: false # just for completeness
allow_contributor: true # Update button 'contributions' should be ignored
- invalidate_on_push: false
+ invalidate_on_push: true
ignore_update_merges: true
if:
targets_branch:
diff --git a/CI/clang_tidy/run_clang_tidy.sh b/CI/clang_tidy/run_clang_tidy.sh
index b30d712ebf5..ecc3054d6c8 100755
--- a/CI/clang_tidy/run_clang_tidy.sh
+++ b/CI/clang_tidy/run_clang_tidy.sh
@@ -1,8 +1,27 @@
#!/bin/bash
+
+set -e
+
+output_dir=$1
+shift
build_dir=$1
+shift
-export NINJA_STATUS="[ninja][%f/%t] "
+mkdir -p $output_dir
+output_dir=$(realpath $output_dir)
pushd $build_dir
-ninja | grep -v '\[ninja\]'
+NINJA_STATUS="[ninja] [%f/%t] " ninja $@ | tee $output_dir/ninja.log
popd
+
+# grep fails if it does not find anything
+set +e
+rm $output_dir/clang-tidy.log
+cat $output_dir/ninja.log | grep -v '\[ninja\]' > $output_dir/clang-tidy.log
+set -e
+
+if [ ! -f $output_dir/clang-tidy.log ]; then
+ exit 1
+fi
+
+exit 0
diff --git a/CI/commands/pr_commands.py b/CI/commands/pr_commands.py
new file mode 100755
index 00000000000..eaee61cbe2f
--- /dev/null
+++ b/CI/commands/pr_commands.py
@@ -0,0 +1,196 @@
+#!/usr/bin/env python3
+from dataclasses import dataclass
+from typing import List, Dict, Any
+from pathlib import Path
+import shlex
+import asyncio
+import functools
+import os
+import click
+
+import typer
+import gidgethub
+from gidgethub.aiohttp import GitHubAPI
+import aiohttp
+
+
+def wrap_async(fn):
+ @functools.wraps(fn)
+ def wrapper(*args, **kwargs):
+ return asyncio.run(fn(*args, **kwargs))
+
+ return wrapper
+
+
+class CommandError(Exception):
+ pass
+
+
+@dataclass
+class Context:
+ pr: Dict[str, Any]
+ sender: str
+ github_token: str
+
+
+@click.group()
+def app():
+ pass
+
+
+@app.group()
+def run_experiment():
+ pass
+
+
+@run_experiment.command()
+@click.option("--revert-sha", "-r", multiple=True)
+@click.pass_obj
+@wrap_async
+async def atlas(ctx: Context, revert_sha: List[str]):
+ gitlab_trigger_token = os.environ["GITLAB_TRIGGER_TOKEN"]
+ gitlab_trigger_url = os.environ["GITLAB_TRIGGER_URL"]
+ async with aiohttp.ClientSession() as session:
+ gh = GitHubAPI(session, "acts-commands", oauth_token=ctx.github_token)
+
+ pr = ctx.pr
+
+ head_clone_url = pr["head"]["repo"]["clone_url"]
+ head_branch = pr["head"]["ref"]
+ head_sha = pr["head"]["sha"]
+
+ variable_summary = f"""
+| Variable | Value |
+|------|------|
+| `ACTS_GIT_REPO` | {head_clone_url} |
+| `ACTS_REF` | `{head_branch}` |
+| `SOURCE_SHA` | {head_sha} |
+| `REVERT_SHAS` | {",".join(revert_sha)} |
+ """
+
+ body = f"""
+@{ctx.sender}
+🟡 I'm going to trigger an ATLAS experiment pipeline for you:
+
+{variable_summary}
+ """
+ comment = await gh.post(pr["comments_url"], data={"body": body})
+
+ variables = {
+ "ACTS_GIT_REPO": head_clone_url,
+ "ACTS_REF": head_branch,
+ "SOURCE_SHA": head_sha,
+ "PR_URL": pr["url"],
+ "REVERT_SHAS": ",".join(revert_sha),
+ "REPORT_COMMENT_URL": comment["url"],
+ }
+ data = {
+ "token": gitlab_trigger_token,
+ "ref": "main",
+ **{f"variables[{k}]": v for k, v in variables.items()},
+ }
+ print(gitlab_trigger_url)
+ print(data)
+ async with session.post(
+ url=gitlab_trigger_url,
+ data=data,
+ ) as resp:
+ if resp.status != 201:
+ body = f"""
+@{ctx.sender}
+🔴 I'm sorry, I couldn't run your command because of an error:
+```
+{await resp.text()}
+```
+{variable_summary}
+ """
+ await gh.post(comment["url"], data={"body": body})
+
+ return
+
+ data = await resp.json()
+ pipeline_url = data["web_url"]
+
+ body = f"""
+@{ctx.sender}
+🟡 I triggered an ATLAS experiment [pipeline]({pipeline_url}) for you
+
+{variable_summary}
+ """
+ await gh.post(comment["url"], data={"body": body})
+
+
+async def get_author_in_team(gh: GitHubAPI, author: str, allow_team: str) -> bool:
+ allow_org, allow_team = allow_team.split("/", 1)
+
+ try:
+ membership = await gh.getitem(
+ f"/orgs/{allow_org}/teams/{allow_team}/memberships/{author}"
+ )
+ return True
+ except gidgethub.BadRequest as e:
+ if e.status_code != 404:
+ raise e
+
+ return False
+
+
+async def preflight(
+ token: str, pr_url: str, sender: str, repository: str, allow_team: str
+):
+ async with aiohttp.ClientSession() as session:
+ gh = GitHubAPI(session, "acts-commands", oauth_token=token)
+
+ if not await get_author_in_team(gh, sender, allow_team):
+ raise RuntimeError(f"{sender} is not in {allow_team}")
+
+ return await gh.getitem(pr_url)
+
+
+async def report_error(token: str, pr: Dict[str, Any], sender: str, error: Exception):
+ async with aiohttp.ClientSession() as session:
+ gh = GitHubAPI(session, "acts-commands", oauth_token=token)
+
+ body = f"""
+@{sender}
+🔴 I'm sorry, I couldn't run your command because of an error:
+```
+{error}
+```
+"""
+ await gh.post(pr["comments_url"], data={"body": body})
+
+
+def main(
+ pr: str = typer.Option(),
+ body: str = typer.Option(),
+ sender: str = typer.Option(),
+ repository: str = typer.Option(),
+ allow_team: str = typer.Option("acts-project/ci-perms", envvar="ALLOW_TEAM"),
+):
+ if Path(body).exists():
+ body = Path(body).read_text().strip()
+
+ if len(body.split("\n")) > 1:
+ raise typer.BadParameter("Body must be a single line")
+
+ if not body.startswith("/"):
+ raise typer.BadParameter("Body must start with a slash")
+ body = body[1:]
+
+ args = shlex.split(body)
+
+ token = os.environ["GITHUB_TOKEN"]
+ pr = asyncio.run(preflight(token, pr, sender, repository, allow_team))
+
+ try:
+ app(
+ args,
+ obj=Context(pr=pr, github_token=token, sender=sender),
+ standalone_mode=False,
+ )
+ except (CommandError, click.exceptions.ClickException) as e:
+ asyncio.run(report_error(token, pr, sender, e))
+
+
+typer.run(main)
diff --git a/CI/commands/requirements.in b/CI/commands/requirements.in
new file mode 100644
index 00000000000..9ed565536ec
--- /dev/null
+++ b/CI/commands/requirements.in
@@ -0,0 +1,3 @@
+typer
+aiohttp
+gidgethub
diff --git a/CI/commands/requirements.txt b/CI/commands/requirements.txt
new file mode 100644
index 00000000000..3a1add6221f
--- /dev/null
+++ b/CI/commands/requirements.txt
@@ -0,0 +1,46 @@
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile CI/commands/requirements.in
+#
+aiohttp==3.8.5
+ # via -r CI/commands/requirements.in
+aiosignal==1.3.1
+ # via aiohttp
+async-timeout==4.0.3
+ # via aiohttp
+attrs==23.1.0
+ # via aiohttp
+cffi==1.15.1
+ # via cryptography
+charset-normalizer==3.2.0
+ # via aiohttp
+click==8.1.7
+ # via typer
+cryptography==41.0.3
+ # via pyjwt
+frozenlist==1.4.0
+ # via
+ # aiohttp
+ # aiosignal
+gidgethub==5.3.0
+ # via -r CI/commands/requirements.in
+idna==3.4
+ # via yarl
+multidict==6.0.4
+ # via
+ # aiohttp
+ # yarl
+pycparser==2.21
+ # via cffi
+pyjwt[crypto]==2.8.0
+ # via gidgethub
+typer==0.9.0
+ # via -r CI/commands/requirements.in
+typing-extensions==4.7.1
+ # via typer
+uritemplate==4.1.1
+ # via gidgethub
+yarl==1.9.2
+ # via aiohttp
diff --git a/CI/missing_include_check.sh b/CI/missing_include_check.sh
new file mode 100755
index 00000000000..970c4a396c4
--- /dev/null
+++ b/CI/missing_include_check.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+RET=0
+ERRORS=0
+
+FILES=$(find Core/include/ -name "*.hpp" | grep -v "/detail/")
+N_FILES=$(echo "$FILES" | wc -l)
+echo "Check $N_FILES files"
+
+ITER=0
+
+for file in $(find Core/include/ -name "*.hpp" | grep -v "/detail/"); do
+ ITER=$((ITER+1))
+ echo "$(date +%H:%M:%S) $((100*ITER/N_FILES))% check $file"
+ out=$(printf "#include <${file:13}>\nint main() { return 0; }" | clang++ -std=c++17 -O0 -c -I "Core/include" -I "/usr/include/eigen3" -x c++ - 2>&1)
+ if [[ "$?" -ne "0" ]]; then
+ echo "------------------------------------"
+ echo "$out"
+ echo "------------------------------------"
+ RET=1
+ ERRORS=$((ERRORS+1))
+ fi
+done
+
+echo "Total errors: $ERRORS"
+exit $RET
diff --git a/CI/physmon/comment_template.md b/CI/physmon/comment_template.md
deleted file mode 100644
index 6717aa480ad..00000000000
--- a/CI/physmon/comment_template.md
+++ /dev/null
@@ -1,167 +0,0 @@
-## :bar_chart: Physics performance monitoring for {{ commit }}
-{% if has_errors %}
-> :red_square: **ERROR** The result has missing elements!
-> This is likely a physmon job failure
-{% endif %}
-
-[Summary]({{ url }}/summary.html)
-[Full report]({{ url }}/)
-Seeding: {{ make_url("seeded", "seeding_seeded.html") }}, {{ make_url("truth estimated", "seeding_truth_estimated.html") }}, {{ make_url("orthogonal", "seeding_orthogonal.html") }}
-CKF: {{ make_url("seeded", "ckf_seeded.html") }}, {{ make_url("truth smeared", "ckf_truth_smeared.html") }}, {{ make_url("truth estimated", "ckf_truth_estimated.html") }}, {{ make_url("orthogonal", "ckf_orthogonal.html") }}
-IVF: {{ make_url("seeded", "ivf_seeded.html") }}, {{ make_url("truth smeared", "ivf_truth_smeared.html") }}, {{ make_url("truth estimated", "ivf_truth_estimated.html") }}, {{ make_url("orthogonal", "ivf_orthogonal.html") }}
-AMVF: {{ make_url("seeded", "amvf_seeded.html") }}, {{ make_url("truth smeared", "amvf_truth_smeared.html") }}, {{ make_url("truth estimated", "amvf_truth_estimated.html") }}, {{ make_url("orthogonal", "amvf_orthogonal.html") }}
-Ambiguity resolution: {{ make_url("seeded", "ambi_seeded.html") }}, {{ make_url("orthogonal", "ambi_orthogonal.html") }}
-{{ make_url("Truth tracking", "truth_tracking.html") }}
-{{ make_url("Truth tracking (GSF)", "gsf.html")}}
-
-### Vertexing {{ "" if all_exist(
- "vertexing_mu_scan.pdf",
- "ivf_seeded_plots",
- "ivf_truth_smeared_plots",
- "ivf_truth_estimated_plots",
- "ivf_orthogonal_plots",
- "amvf_seeded_plots",
- "amvf_truth_smeared_plots",
- "amvf_truth_estimated_plots",
- "amvf_orthogonal_plots",
-) else ":x: "}}
-
-{% call detail_block("Vertexing vs. mu", "vertexing_mu_scan.pdf") %}
-{{ make_image("vertexing_mu_scan.pdf", 350) }}
-{% endcall %}
-
-{% for mode in ["seeded", "truth_smeared", "truth_estimated", "orthogonal"] %}
-
-{% call detail_block("IVF "+mode, "ivf_"+mode+"_plots") %}
-
-{% for url in [
- "covXX.pdf",
- "covYY.pdf",
- "covZZ.pdf",
- "resX.pdf",
- "resY.pdf",
- "resZ.pdf",
- "recoOverTrue.pdf",
-] -%}
-{{- make_image("ivf_"+mode+"_plots/"+url, "50%") -}}
-{%- endfor %}
-
-{% endcall %}
-
-{% endfor %}
-
-{% for mode in ["seeded", "truth_smeared", "truth_estimated", "orthogonal"] %}
-
-{% call detail_block("AMVF "+mode, "amvf_"+mode+"_plots") %}
-
-{% for url in [
- "covXX.pdf",
- "covYY.pdf",
- "covZZ.pdf",
- "resX.pdf",
- "resY.pdf",
- "resZ.pdf",
- "recoOverTrue.pdf",
-] -%}
-{{- make_image("amvf_"+mode+"_plots/"+url, "50%") -}}
-{%- endfor %}
-
-{% endcall %}
-
-{% endfor %}
-
-### Seeding {{ "" if all_exist(
- "seeding_seeded_plots",
- "seeding_truth_estimated_plots",
- "seeding_orthogonal_plots",
-) else ":x: "}}
-
-{% for mode in ["seeded", "truth_estimated", "orthogonal"] %}
-
-{% call detail_block("Seeding "+mode, "seeding_"+mode+"_plots") %}
-
-{% for url in [
- "trackeff_vs_eta.pdf",
- "trackeff_vs_pT.pdf",
- "nDuplicated_vs_eta.pdf",
- "nDuplicated_vs_pT.pdf",
-] -%}
-{{- make_image("seeding_"+mode+"_plots/"+url, "50%") -}}
-{%- endfor %}
-
-{% endcall %}
-
-{% endfor %}
-
-### CKF {{ "" if all_exist(
- "ckf_seeded_plots",
- "ckf_truth_smeared_plots",
- "ckf_truth_estimated_plots",
- "ckf_orthogonal_plots",
-) else ":x: "}}
-
-{% for mode in ["seeded", "truth_smeared", "truth_estimated", "orthogonal"] %}
-
-{% call detail_block("CKF "+mode, "ckf_"+mode+"_plots") %}
-
-{% for url in [
- "trackeff_vs_eta.pdf",
- "trackeff_vs_pT.pdf",
- "nHoles_vs_eta.pdf",
- "nMeasurements_vs_eta.pdf",
-] -%}
-{{- make_image("ckf_"+mode+"_plots/"+url, "50%") -}}
-{%- endfor %}
-
-{% endcall %}
-
-{% endfor %}
-
-### Ambiguity resolution {{ "" if exists("ambi_seeded_plots") else ":x: "}}
-
-{% call detail_block("seeded", "ambi_seeded_plots") %}
-
-{% for url in [
- "trackeff_vs_eta.pdf",
- "trackeff_vs_pT.pdf",
- "nHoles_vs_eta.pdf",
- "nMeasurements_vs_eta.pdf",
-] -%}
-{{- make_image("ambi_seeded_plots/"+url, "50%") -}}
-{%- endfor %}
-
-{% endcall %}
-
-### Truth tracking (Kalman Filter) {{ "" if exists("truth_tracking_plots") else ":x: "}}
-
-{% call detail_block("Truth tracking", "truth_tracking_plots") %}
-
-{% for url in [
- "nHoles_vs_eta.pdf",
- "nMeasurements_vs_eta.pdf",
- "pull_d0.pdf",
- "pull_z0.pdf",
- "pull_theta.pdf",
- "pull_phi.pdf",
- "pull_qop.pdf",
- "pull_t.pdf",
-] -%}
-{{- make_image("truth_tracking_plots/"+url, "50%") -}}
-{%- endfor %}
-
-{% endcall %}
-
-### Truth tracking (GSF) {{ "" if exists("truth_tracking_plots") else ":x: "}}
-
-{% call detail_block("Truth tracking", "truth_tracking_plots") %}
-
-{% for url in [
- "pull_d0.pdf",
- "res_d0.pdf",
- "pull_qop.pdf",
- "res_qop.pdf",
-] -%}
-{{- make_image("gsf_plots/"+url, "50%") -}}
-{%- endfor %}
-
-{% endcall %}
diff --git a/CI/physmon/generate_comment.py b/CI/physmon/generate_comment.py
deleted file mode 100755
index 00854a3107c..00000000000
--- a/CI/physmon/generate_comment.py
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env python3
-
-
-### CHANGE BELOW WITH CAUTION ###
-
-import sys
-import os
-from pathlib import Path, PurePath
-
-import jinja2
-
-template_source = (Path(__file__).parent / "comment_template.md").read_text()
-
-macro_source = """
-{% macro detail_block(title, check) %}
-{% if exists(check) %}
-{{ title }}
- {{ caller() }}
-
\s*(.*)\s*<\/p>', re.RegexFlag.MULTILINE)
re_check = re.compile(r'physmon summary
- """
- )
+
-
+ """
+ )
+
+ for s in summary:
+ f.write(
+ f"""
+
+
+
+ """
)
- f.write(
- """
-