diff --git a/.pyproject_generation/service_template.toml b/.pyproject_generation/service_template.toml deleted file mode 100644 index f3d823f8..00000000 --- a/.pyproject_generation/service_template.toml +++ /dev/null @@ -1,6 +0,0 @@ -[build-system] -requires = ["setuptools>=67.7.2"] -build-backend = "setuptools.build_meta" - -[tool.setuptools.packages.find] -where = ["src"] diff --git a/.readme_generation/description.md b/.readme_generation/description.md index 2ad6dc4f..cb9ccaf2 100644 --- a/.readme_generation/description.md +++ b/.readme_generation/description.md @@ -1,3 +1 @@ - - -Here you should provide a short summary of the purpose of this microservice. +This is a monorepo containing all GHGA file backend microservices. diff --git a/.readme_generation/design.md b/.readme_generation/design.md deleted file mode 100644 index c0afce49..00000000 --- a/.readme_generation/design.md +++ /dev/null @@ -1,7 +0,0 @@ - - -This is monorepo containing Python-based services following the Triple Hexagonal Architecture pattern. -It uses protocol/provider pairs and dependency injection mechanisms provided by the -[hexkit](https://github.com/ghga-de/hexkit) library. diff --git a/.readme_generation/readme_template_monorepo.md b/.readme_generation/readme_template_monorepo.md index c7e27d25..8de834be 100644 --- a/.readme_generation/readme_template_monorepo.md +++ b/.readme_generation/readme_template_monorepo.md @@ -9,10 +9,6 @@ $summary $description -## Architecture and Design: - -$design_description - ## Services: $service_readmes diff --git a/.readme_generation/readme_template_service.md b/.readme_generation/readme_template_service.md index 98a00c09..4ee577ab 100644 --- a/.readme_generation/readme_template_service.md +++ b/.readme_generation/readme_template_service.md @@ -1,6 +1,3 @@ -[![tests](https://github.com/ghga-de/$repo_name/actions/workflows/tests.yaml/badge.svg)](https://github.com/ghga-de/$repo_name/actions/workflows/tests.yaml) -[![Coverage Status](https://coveralls.io/repos/github/ghga-de/$repo_name/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/$repo_name?branch=main) - # $title $summary diff --git a/README.md b/README.md index 322fb9cf..7129e3e6 100644 --- a/README.md +++ b/README.md @@ -7,27 +7,14 @@ File Services Backend - monorepo housing file services ## Description - - -Here you should provide a short summary of the purpose of this microservice. - - -## Architecture and Design: - - - -This is monorepo containing Python-based services following the Triple Hexagonal Architecture pattern. -It uses protocol/provider pairs and dependency injection mechanisms provided by the -[hexkit](https://github.com/ghga-de/hexkit) library. +This is a monorepo containing all GHGA file backend microservices. ## Services: -[Internal-File-Registry-Service - This service acts as a registry for the internal location and representation of files.](services/ifrs/README.md) +[Internal File Registry Service](services/ifrs/README.md) [Interrogation Room Service](services/irs/README.md) -[Purge Controller Service - a service to commission file deletions](services/pcs/README.md) +[Purge Controller Service](services/pcs/README.md) ## Development: diff --git a/lock/requirements-dev.txt b/lock/requirements-dev.txt index 687cc2be..ebc36a24 100644 --- a/lock/requirements-dev.txt +++ b/lock/requirements-dev.txt @@ -1,31 +1,37 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --refresh --generate-hashes --output-file lock/requirements-dev.txt /tmp/tmpvrxms8ag/pyproject.toml lock/requirements-dev.in -aiokafka==0.8.1 \ - --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ - --hash=sha256:1f43d2afd7d3e4407ada8d754895fad7c344ca00648a8a38418d76564eaaf6cd \ - --hash=sha256:1f6044ed270b946d31f265903b5eb101940ed0ff3a902eaf8178103c943bbcc9 \ - --hash=sha256:24373bb2d519abac036d5b04ebc43452ef4ad1916953b6678b9801a9c93ba237 \ - --hash=sha256:2fa54b8b068d9d8735cb6757a0f48168f8cf9be68860b0bae6b3ed1684cef49b \ - --hash=sha256:3816bcfc3c57dfa4ed77fe1dc3a9a464e17b6400061348155115f282c8150c47 \ - --hash=sha256:45cd28af6590d6a999bb706803166570121ba8a5a0d06c51ebd8a59fab53593c \ - --hash=sha256:4693fbe3c10f125bf3e2df8a8ccbca3eff2bdaaa6589d28c7532c10e7d84598b \ - --hash=sha256:4fccd599ab6b3fda4f4187d854b343f153b40d05d6774be9acf238618da50031 \ - --hash=sha256:6421ee81084532f915501074a132acb2afc8cb88bf5ddb11e584230a30f6f006 \ - --hash=sha256:673c163dee62dfe45146d5250af0e395da5cc92b63f8878c592abc7dc1862899 \ - --hash=sha256:7d327d66b41c4e3bafff7f9efb71936a08f940aa665680717e20862e4272a068 \ - --hash=sha256:7f09784322c0d2c4fcc222add4337a5ac394aa30a248eb4e0e4587a125573c75 \ - --hash=sha256:90960356513f3979754261b132b12a96b0d9e3c6eb44420e3a90a7c31156a81a \ - --hash=sha256:935da8c4da9a00a1e16020d88e578206097b4bb72ebc2a25fbd2cb817907ef28 \ - --hash=sha256:9f19d90b7360bc2239fcd8b147508ae39c3e5b1acfc8e6a2a9b0f306070f7ffe \ - --hash=sha256:a8a641a8102c51422afe111d4bc70c51f335f38fc5906e4c839bd17afeaf3cb2 \ - --hash=sha256:af6df9a41e08b61d7e62c0a416feeabd81bad76fa5c70d499b083d6af9ce72c3 \ - --hash=sha256:b2bf97548fa77ad31062ca580368d346b16ba9fdca5856c435f256f3699ab12b \ - --hash=sha256:bbffc431d9285328c0bc108949132ae11cec863f1dd5a43a1fc3d45a69ffb8a9 \ - --hash=sha256:bf7473c55dc7959d4b7f9d750fa6017b325813d6cb761e488c2d9ea44e922954 \ - --hash=sha256:c4332d37cb9d52181cfda4236566b4028c7c188549277f87bcc3027577d72b1b \ - --hash=sha256:d300188e358cd29989c817f6ee2a2965a039e5a71de8ade6f80f02ebb9bd07b8 \ - --hash=sha256:fd8f9e17bc9cd2ea664a7f5133aede39a8fffebffe0c450252d475dbdedb4a35 \ - --hash=sha256:ff318d29ecbeea8c58d69c91c24d48d7ed4a8d3e829b607e670d118a9a35d5ba +# uv pip compile --refresh --generate-hashes --output-file lock/requirements-dev.txt /tmp/tmpb93jgb01/pyproject.toml lock/requirements-dev.in +aiokafka==0.10.0 \ + --hash=sha256:007f1c51f440cc07155d2491f4deea6536492324153296aa73736a74cd833d3e \ + --hash=sha256:05c4a7ced5d6f3dbc289767574d6a5d9b31e1c243e992dcecd34dbc40fcbbf9b \ + --hash=sha256:06060708a4bcf062be496c8641fca382c88782d3c381a34ccb5ac8677bdac695 \ + --hash=sha256:12d703317812262feac6577ff488f2ccddc4408da0ff608a5454062782b5a80d \ + --hash=sha256:1509c1b29cd1d4d920a649f257d72109bbc3d61431135505b8e0d8d488796ff2 \ + --hash=sha256:1fe0194ea72524df37369a8cf0837263b55194ac20616e612f0ab7bfb568b76b \ + --hash=sha256:22299f8d5269dcb00b1b53fdee44dbe729091d4038e1bb63d0bb2f5cdf9af47a \ + --hash=sha256:5efb63686562809f0f9bf0fa6d1e52f222af2d8f8441f8c412b156f15c98da43 \ + --hash=sha256:6e10fdee4189fe7eed36d602df822e9ff4f19535c0a514cf015f78308d206c1a \ + --hash=sha256:7068f0beb8478cde09618dcc9a833cc18ff37bd14864fa8b60ad4e4c3dad6489 \ + --hash=sha256:74229a57c95e2efccec95d9b42554dc168c97a263f013e3e983202bd33ca189d \ + --hash=sha256:781ab300214681e40667185a402abf6b31b4c4b8f1cdabbdc3549d8cf383b34d \ + --hash=sha256:7ce35563f955490b43190e3389b5f3d92d50e22b32d1a40772fd14fb1d50c5db \ + --hash=sha256:82a75ea13d7e6e11c7ee2fb9419e9ea3541744648c69ab27b56fb6bca5b319c1 \ + --hash=sha256:8b74aeacfb8ced9764002c63b58e4c78c94809131d89000cb936c25c298ffb1e \ + --hash=sha256:9728c523f10ac4bb46719cc64f3c1d47625898872bc3901b22b9d48b6e401d1c \ + --hash=sha256:99127ab680f9b08b0213d00b7d1e0480c6d08601f52ad42e829350f9599db301 \ + --hash=sha256:b91109dc25f79be4d27454cc766239a5368d18b26682d4b5c6b913ca92691220 \ + --hash=sha256:c23ec22fbf26e2f84678f0589076bea1ff26ae6dfd3c601e6de10ad00d605261 \ + --hash=sha256:cf4a47659517000a8fe88e0fb353898b718ee214e21f62a2a949be9bf801cd9e \ + --hash=sha256:cf9e241766b7f4c305807763330dacf8c220ad9e8fc7f2b22730a2db66fad61d \ + --hash=sha256:d52c25f3d0db7dd340a5d08108da302db1ba64c2190970dbdb768b79629d6add \ + --hash=sha256:de56c503b3d64e24a5b6705e55bc524a8357b0495402f859f921a71d65274cb1 \ + --hash=sha256:e16d8a23f0e173e5ca86c2d1c270e25a529a0eed973c77d7e8a0dfc868699aa4 \ + --hash=sha256:e833e4ef7fc5f3f637ba5fb4210acc7e5ea916bb7107e4b619b1b1a3e361bc62 \ + --hash=sha256:ebe5be9f578e89e6db961121070f7c35662924abee00ba4ccf64557e2cdd7edf \ + --hash=sha256:f069bda1f31e466d815b631a07bc6fad5190b29dfff5f117bcbf1948cd7a38aa \ + --hash=sha256:f2f19dee69c69389f5911e6b23c361c5285366d237f782eaae118d12acc42d7f \ + --hash=sha256:f4b22a31f40493cea50dddb4dfc92750dfb273635ccb094a16fde9678eb38958 \ + --hash=sha256:fafc95bdaed9e1810fcd80b02ac117e51c72681ffe50353e5d61e2170609e1fc \ + --hash=sha256:ffc30e4c6bfcb00356a002f623c93a51d8336ca67687ea069dd11822da07379c # via hexkit annotated-types==0.6.0 \ --hash=sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43 \ @@ -391,9 +397,9 @@ h11==0.14.0 \ # via # httpcore # uvicorn -hexkit==2.2.0 \ - --hash=sha256:985455cb10e56224afa66f835f185fe87db4d8d0db27e1a9ef9afd105d6f5565 \ - --hash=sha256:e3ab3cf8878614f6c50a905d0b183c665d54fe1dec5a74d55117bd9320bb4f7d +hexkit==3.0.2 \ + --hash=sha256:172c55b756ea0a5ff5e83ba97860947408a132bb37800404e6e7b1cabab9d3e7 \ + --hash=sha256:d8cb77d64c6021725f6e40e87d62792e22a8928810de983f96a6ecfe9245f1f8 # via ghga-service-commons httpcore==1.0.5 \ --hash=sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61 \ @@ -476,10 +482,6 @@ jsonschema-specifications==2023.12.1 \ jsonschema2md==1.1.0 \ --hash=sha256:2386fc4d119330686db3989ea497ab96a4defb6388386fc0ceff756b5c1a66a7 \ --hash=sha256:e89edf2de1bc7fc3e842915c7c29b7b70888555a87002eccc06350c0412a1458 -kafka-python==2.0.2 \ - --hash=sha256:04dfe7fea2b63726cd6f3e79a2d86e709d608d74406638c5da33a01d45a9d7e3 \ - --hash=sha256:2d92418c7cb1c298fa6c7f0fb3519b520d0d7526ac6cb7ae2a4fc65a51a94b6e - # via aiokafka logot==1.3.0 \ --hash=sha256:bb2e8cf8ca949015e1e096e45023095ebd5df06ea4627f5df47d53dcdf62b74e \ --hash=sha256:de392d182308828a0a9a442120e25e4ad2258fef52c4ed275e012aaffb0514a5 diff --git a/lock/requirements.txt b/lock/requirements.txt index 3e3089ca..acb67700 100644 --- a/lock/requirements.txt +++ b/lock/requirements.txt @@ -1,31 +1,37 @@ # This file was autogenerated by uv via the following command: -# uv pip compile --refresh --generate-hashes --output-file lock/requirements.txt /tmp/tmpvrxms8ag/pyproject.toml -c lock/requirements-dev.txt -aiokafka==0.8.1 \ - --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ - --hash=sha256:1f43d2afd7d3e4407ada8d754895fad7c344ca00648a8a38418d76564eaaf6cd \ - --hash=sha256:1f6044ed270b946d31f265903b5eb101940ed0ff3a902eaf8178103c943bbcc9 \ - --hash=sha256:24373bb2d519abac036d5b04ebc43452ef4ad1916953b6678b9801a9c93ba237 \ - --hash=sha256:2fa54b8b068d9d8735cb6757a0f48168f8cf9be68860b0bae6b3ed1684cef49b \ - --hash=sha256:3816bcfc3c57dfa4ed77fe1dc3a9a464e17b6400061348155115f282c8150c47 \ - --hash=sha256:45cd28af6590d6a999bb706803166570121ba8a5a0d06c51ebd8a59fab53593c \ - --hash=sha256:4693fbe3c10f125bf3e2df8a8ccbca3eff2bdaaa6589d28c7532c10e7d84598b \ - --hash=sha256:4fccd599ab6b3fda4f4187d854b343f153b40d05d6774be9acf238618da50031 \ - --hash=sha256:6421ee81084532f915501074a132acb2afc8cb88bf5ddb11e584230a30f6f006 \ - --hash=sha256:673c163dee62dfe45146d5250af0e395da5cc92b63f8878c592abc7dc1862899 \ - --hash=sha256:7d327d66b41c4e3bafff7f9efb71936a08f940aa665680717e20862e4272a068 \ - --hash=sha256:7f09784322c0d2c4fcc222add4337a5ac394aa30a248eb4e0e4587a125573c75 \ - --hash=sha256:90960356513f3979754261b132b12a96b0d9e3c6eb44420e3a90a7c31156a81a \ - --hash=sha256:935da8c4da9a00a1e16020d88e578206097b4bb72ebc2a25fbd2cb817907ef28 \ - --hash=sha256:9f19d90b7360bc2239fcd8b147508ae39c3e5b1acfc8e6a2a9b0f306070f7ffe \ - --hash=sha256:a8a641a8102c51422afe111d4bc70c51f335f38fc5906e4c839bd17afeaf3cb2 \ - --hash=sha256:af6df9a41e08b61d7e62c0a416feeabd81bad76fa5c70d499b083d6af9ce72c3 \ - --hash=sha256:b2bf97548fa77ad31062ca580368d346b16ba9fdca5856c435f256f3699ab12b \ - --hash=sha256:bbffc431d9285328c0bc108949132ae11cec863f1dd5a43a1fc3d45a69ffb8a9 \ - --hash=sha256:bf7473c55dc7959d4b7f9d750fa6017b325813d6cb761e488c2d9ea44e922954 \ - --hash=sha256:c4332d37cb9d52181cfda4236566b4028c7c188549277f87bcc3027577d72b1b \ - --hash=sha256:d300188e358cd29989c817f6ee2a2965a039e5a71de8ade6f80f02ebb9bd07b8 \ - --hash=sha256:fd8f9e17bc9cd2ea664a7f5133aede39a8fffebffe0c450252d475dbdedb4a35 \ - --hash=sha256:ff318d29ecbeea8c58d69c91c24d48d7ed4a8d3e829b607e670d118a9a35d5ba +# uv pip compile --refresh --generate-hashes --output-file lock/requirements.txt /tmp/tmpb93jgb01/pyproject.toml -c lock/requirements-dev.txt +aiokafka==0.10.0 \ + --hash=sha256:007f1c51f440cc07155d2491f4deea6536492324153296aa73736a74cd833d3e \ + --hash=sha256:05c4a7ced5d6f3dbc289767574d6a5d9b31e1c243e992dcecd34dbc40fcbbf9b \ + --hash=sha256:06060708a4bcf062be496c8641fca382c88782d3c381a34ccb5ac8677bdac695 \ + --hash=sha256:12d703317812262feac6577ff488f2ccddc4408da0ff608a5454062782b5a80d \ + --hash=sha256:1509c1b29cd1d4d920a649f257d72109bbc3d61431135505b8e0d8d488796ff2 \ + --hash=sha256:1fe0194ea72524df37369a8cf0837263b55194ac20616e612f0ab7bfb568b76b \ + --hash=sha256:22299f8d5269dcb00b1b53fdee44dbe729091d4038e1bb63d0bb2f5cdf9af47a \ + --hash=sha256:5efb63686562809f0f9bf0fa6d1e52f222af2d8f8441f8c412b156f15c98da43 \ + --hash=sha256:6e10fdee4189fe7eed36d602df822e9ff4f19535c0a514cf015f78308d206c1a \ + --hash=sha256:7068f0beb8478cde09618dcc9a833cc18ff37bd14864fa8b60ad4e4c3dad6489 \ + --hash=sha256:74229a57c95e2efccec95d9b42554dc168c97a263f013e3e983202bd33ca189d \ + --hash=sha256:781ab300214681e40667185a402abf6b31b4c4b8f1cdabbdc3549d8cf383b34d \ + --hash=sha256:7ce35563f955490b43190e3389b5f3d92d50e22b32d1a40772fd14fb1d50c5db \ + --hash=sha256:82a75ea13d7e6e11c7ee2fb9419e9ea3541744648c69ab27b56fb6bca5b319c1 \ + --hash=sha256:8b74aeacfb8ced9764002c63b58e4c78c94809131d89000cb936c25c298ffb1e \ + --hash=sha256:9728c523f10ac4bb46719cc64f3c1d47625898872bc3901b22b9d48b6e401d1c \ + --hash=sha256:99127ab680f9b08b0213d00b7d1e0480c6d08601f52ad42e829350f9599db301 \ + --hash=sha256:b91109dc25f79be4d27454cc766239a5368d18b26682d4b5c6b913ca92691220 \ + --hash=sha256:c23ec22fbf26e2f84678f0589076bea1ff26ae6dfd3c601e6de10ad00d605261 \ + --hash=sha256:cf4a47659517000a8fe88e0fb353898b718ee214e21f62a2a949be9bf801cd9e \ + --hash=sha256:cf9e241766b7f4c305807763330dacf8c220ad9e8fc7f2b22730a2db66fad61d \ + --hash=sha256:d52c25f3d0db7dd340a5d08108da302db1ba64c2190970dbdb768b79629d6add \ + --hash=sha256:de56c503b3d64e24a5b6705e55bc524a8357b0495402f859f921a71d65274cb1 \ + --hash=sha256:e16d8a23f0e173e5ca86c2d1c270e25a529a0eed973c77d7e8a0dfc868699aa4 \ + --hash=sha256:e833e4ef7fc5f3f637ba5fb4210acc7e5ea916bb7107e4b619b1b1a3e361bc62 \ + --hash=sha256:ebe5be9f578e89e6db961121070f7c35662924abee00ba4ccf64557e2cdd7edf \ + --hash=sha256:f069bda1f31e466d815b631a07bc6fad5190b29dfff5f117bcbf1948cd7a38aa \ + --hash=sha256:f2f19dee69c69389f5911e6b23c361c5285366d237f782eaae118d12acc42d7f \ + --hash=sha256:f4b22a31f40493cea50dddb4dfc92750dfb273635ccb094a16fde9678eb38958 \ + --hash=sha256:fafc95bdaed9e1810fcd80b02ac117e51c72681ffe50353e5d61e2170609e1fc \ + --hash=sha256:ffc30e4c6bfcb00356a002f623c93a51d8336ca67687ea069dd11822da07379c # via hexkit annotated-types==0.6.0 \ --hash=sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43 \ @@ -217,9 +223,9 @@ h11==0.14.0 \ --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 # via uvicorn -hexkit==2.2.0 \ - --hash=sha256:985455cb10e56224afa66f835f185fe87db4d8d0db27e1a9ef9afd105d6f5565 \ - --hash=sha256:e3ab3cf8878614f6c50a905d0b183c665d54fe1dec5a74d55117bd9320bb4f7d +hexkit==3.0.2 \ + --hash=sha256:172c55b756ea0a5ff5e83ba97860947408a132bb37800404e6e7b1cabab9d3e7 \ + --hash=sha256:d8cb77d64c6021725f6e40e87d62792e22a8928810de983f96a6ecfe9245f1f8 # via ghga-service-commons httptools==0.6.1 \ --hash=sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563 \ @@ -281,10 +287,6 @@ jsonschema-specifications==2023.12.1 \ --hash=sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc \ --hash=sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c # via jsonschema -kafka-python==2.0.2 \ - --hash=sha256:04dfe7fea2b63726cd6f3e79a2d86e709d608d74406638c5da33a01d45a9d7e3 \ - --hash=sha256:2d92418c7cb1c298fa6c7f0fb3519b520d0d7526ac6cb7ae2a4fc65a51a94b6e - # via aiokafka markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb diff --git a/pyproject.toml b/pyproject.toml index 7868dbdd..9ea6d95f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,6 +2,13 @@ name = "fsb" version = "0.1.0" description = "File Services Backend - monorepo housing file services" +dependencies = [ + "typer >= 0.12", + "ghga-service-commons[api] >= 3.1", + "ghga-event-schemas >= 3.1", + "hexkit[akafka,s3,mongodb] >= 3.0", + "crypt4gh>=1.6", +] readme = "README.md" authors = [ { name = "German Human Genome Phenome Archive (GHGA)", email = "contact@ghga.de" }, @@ -18,13 +25,6 @@ classifiers = [ "Topic :: Software Development :: Libraries", "Intended Audience :: Developers", ] -dependencies = [ - "typer >= 0.9.0", - "ghga-service-commons[api] >= 3.0.0, <4", - "ghga-event-schemas >= 3.0.0, <4", - "hexkit[akafka,s3,mongodb] >= 2, <3", - "crypt4gh>=1.6", -] [project.urls] Repository = "https://github.com/ghga-de/file-services-backend" @@ -43,7 +43,7 @@ exclude = [ line-length = 88 src = [ "src", - "tests_*", + "tests", "examples", "scripts", ] @@ -96,7 +96,7 @@ max-complexity = 10 "SIM", "D", ] -"services/*/tests_*/*" = [ +"services/*/tests/*" = [ "S", "SIM", "PLR", diff --git a/scripts/update_all.py b/scripts/update_all.py deleted file mode 100755 index f2c2b6f7..00000000 --- a/scripts/update_all.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln -# for the German Human Genome-Phenome Archive (GHGA) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -"""Run all update scripts that are present in the repository in the correct order""" - -try: - from update_template_files import main as update_template -except ImportError: - print("update_template_files script not found") -else: - print("Pulling in updates from template repository") - update_template() - -try: - from update_pyproject import process_pyproject as update_pyproject -except ImportError: - print("update_pyproject script not found") -else: - print("Updating pyproject.toml file") - update_pyproject() - -try: - from update_lock import main as update_lock -except ImportError: - print("update_lock script not found") -else: - print("Upgrading the lock file") - update_lock(upgrade=True) - -try: - from update_hook_revs import main as update_hook_revs -except ImportError: - print("update_hook_revs script not found") -else: - print("Updating config docs") - update_hook_revs() - -try: - from update_config_docs import main as update_config -except ImportError: - print("update_config_docs script not found") -else: - print("Updating config docs") - update_config() - -try: - from update_openapi_docs import main as update_openapi -except ImportError: - print("update_openapi_docs script not found") -else: - print("Updating OpenAPI docs") - update_openapi() - -try: - from scripts.update_readme_services import main as update_readme -except ImportError: - print("update_readme script not found") -else: - print("Updating README") - update_readme() diff --git a/scripts/update_config_docs.py b/scripts/update_config_docs.py index 13b89786..2a802d6d 100755 --- a/scripts/update_config_docs.py +++ b/scripts/update_config_docs.py @@ -35,7 +35,6 @@ HERE = Path(__file__).parent.resolve() REPO_ROOT_DIR = HERE.parent SERVICES_DIR = REPO_ROOT_DIR / "services" -DEV_FOLDER_NAME = ".dev" GET_PACKAGE_NAME_SCRIPT = HERE / "get_package_name.py" dev_config_yaml = REPO_ROOT_DIR / ".devcontainer" / ".dev_config.yaml" @@ -59,7 +58,7 @@ def set_service_specific_vars(service: str): exit(1) # set the vars - dev_config_yaml = service_dir / DEV_FOLDER_NAME / ".dev_config.yaml" + dev_config_yaml = service_dir / "dev_config.yaml" config_schema_json = service_dir / "config_schema.json" example_config_yaml = service_dir / "example_config.yaml" diff --git a/scripts/update_openapi_docs.py b/scripts/update_openapi_docs.py index c0d3658d..b0d70ed7 100755 --- a/scripts/update_openapi_docs.py +++ b/scripts/update_openapi_docs.py @@ -55,7 +55,7 @@ def set_service_specific_vars(service: str): # set the vars openapi_yaml = service_dir / "openapi.yaml" - app_openapi_script = service_dir / ".dev" / "app_openapi.py" + app_openapi_script = service_dir / "scripts" / "app_openapi.py" yield diff --git a/scripts/update_pyproject.py b/scripts/update_pyproject.py index 35025863..eda6808a 100755 --- a/scripts/update_pyproject.py +++ b/scripts/update_pyproject.py @@ -21,7 +21,6 @@ from __future__ import annotations import sys -from contextlib import contextmanager from pathlib import Path import tomli @@ -30,38 +29,13 @@ from script_utils import cli REPO_ROOT_DIR = Path(__file__).parent.parent.resolve() -SERVICES_DIR = REPO_ROOT_DIR / "services" PYPROJECT_GENERATION_DIR = REPO_ROOT_DIR / ".pyproject_generation" -SERVICE_TEMPLATE_PATH = PYPROJECT_GENERATION_DIR / "service_template.toml" PYPROJECT_TEMPLATE_PATH = PYPROJECT_GENERATION_DIR / "pyproject_template.toml" pyproject_custom_path = PYPROJECT_GENERATION_DIR / "pyproject_custom.toml" pyproject_toml = REPO_ROOT_DIR / "pyproject.toml" -@contextmanager -def set_service_specific_vars(service: str): - """Adjust global vars for service.""" - global pyproject_custom_path, pyproject_toml - - # verify that the folder exists - service_dir = SERVICES_DIR / service - if not service_dir.exists(): - cli.echo_failure(f"{service_dir} does not exist") - exit(1) - - # set the vars - service_dev_dir = service_dir / ".dev" - pyproject_custom_path = service_dev_dir / "pyproject_custom.toml" - pyproject_toml = service_dir / "pyproject.toml" - - yield - - # reset the vars - pyproject_custom_path = PYPROJECT_GENERATION_DIR / "pyproject_custom.toml" - pyproject_toml = REPO_ROOT_DIR / "pyproject.toml" - - def read_template_pyproject() -> dict[str, object]: """Read the pyproject_template.toml.""" with open(PYPROJECT_TEMPLATE_PATH, "rb") as file: @@ -74,12 +48,6 @@ def read_custom_pyproject() -> dict[str, object]: return tomli.load(file) -def read_supplemental_pyproject() -> dict[str, object]: - """Read the service_template.toml.""" - with open(SERVICE_TEMPLATE_PATH, "rb") as file: - return tomli.load(file) - - def read_current_pyproject() -> dict[str, object]: """Read the current pyproject.toml.""" with open(pyproject_toml, "rb") as file: @@ -127,7 +95,7 @@ def merge_pyprojects(inputs: list[dict[str, object]]) -> dict[str, object]: return pyproject -def process_pyproject(*, root: bool, check: bool) -> bool: +def process_pyproject(*, check: bool) -> bool: """Update the pyproject.toml or checks for updates if the check flag is specified. Returns True if updates were made, False otherwise. @@ -136,9 +104,6 @@ def process_pyproject(*, root: bool, check: bool) -> bool: template_pyproject = read_template_pyproject() custom_pyproject = read_custom_pyproject() sources = [custom_pyproject, template_pyproject] - if not root: - sources.append(read_supplemental_pyproject()) - template_pyproject.pop("tool", "") merged_pyproject = merge_pyprojects(sources) current_pyproject = read_current_pyproject() @@ -151,29 +116,19 @@ def process_pyproject(*, root: bool, check: bool) -> bool: return False -def main(*, service: str, check: bool = False, root_only: bool = False): +def main(*, check: bool = False): """Update the pyproject.toml or checks for updates if the check flag is specified.""" - root_updated = process_pyproject(root=True, check=check) - if root_updated: - cli.echo_success("Root pyproject.toml updated.") - - if root_only: - if not root_updated: - cli.echo_success("Root pyproject.toml is up to date.") - exit(0) - - with set_service_specific_vars(service=service): - updated = process_pyproject(root=False, check=check) - if check: - cli.echo_success(f"{service}: Pyproject.toml is already up to date.") - else: - success_msg = ( - f"Successfully updated pyproject.toml for {service}." - if updated - else f"{service}: Pyproject.toml is already up to date." - ) - - cli.echo_success(success_msg) + updated = process_pyproject(check=check) + if check: + cli.echo_success("Pyproject.toml is already up to date.") + else: + success_msg = ( + "Successfully updated pyproject.toml." + if updated + else "Pyproject.toml is already up to date." + ) + + cli.echo_success(success_msg) if __name__ == "__main__": diff --git a/scripts/update_readme_monorepo.py b/scripts/update_readme_monorepo.py index cea452e0..aa1890f9 100755 --- a/scripts/update_readme_monorepo.py +++ b/scripts/update_readme_monorepo.py @@ -33,7 +33,6 @@ PYPROJECT_TOML_PATH = ROOT_DIR / "pyproject.toml" README_GENERATION_DIR = ROOT_DIR / ".readme_generation" DESCRIPTION_PATH = README_GENERATION_DIR / "description.md" -DESIGN_PATH = README_GENERATION_DIR / "design.md" README_TEMPLATE_PATH = README_GENERATION_DIR / "readme_template_monorepo.md" README_PATH = ROOT_DIR / "README.md" SERVICE_ROOT = ROOT_DIR / "services" @@ -68,13 +67,6 @@ class PackageDetails(PackageHeader, PackageName): description: str = Field( ..., description="A markdown-formatted description of the package." ) - design_description: str = Field( - ..., - description=( - "A markdown-formatted description of overall architecture and design of" - + " the package." - ), - ) service_readmes: str = Field(..., description="") @@ -133,20 +125,16 @@ def read_package_description() -> str: return DESCRIPTION_PATH.read_text() -def read_design_description() -> str: - """Read the design description.""" - - return DESIGN_PATH.read_text() - - def get_service_readmes() -> str: - """TODO""" + """Get links to all service readmes.""" service_readme_links = [] for service_dir in sorted(list_service_dirs()): service_description = read_service_description(service_dir) readme_link = service_dir.relative_to(ROOT_DIR) / "README.md" + if "-" in service_description: + service_description = service_description.split("-")[0].strip() service_readme_links.append(f"[{service_description}]({readme_link})") return " \n".join(service_readme_links) @@ -162,7 +150,6 @@ def get_package_details() -> PackageDetails: **header.model_dump(), **name.model_dump(), description=description, - design_description=read_design_description(), service_readmes=get_service_readmes(), ) diff --git a/scripts/update_readme_services.py b/scripts/update_readme_services.py index 24655904..a2456240 100755 --- a/scripts/update_readme_services.py +++ b/scripts/update_readme_services.py @@ -25,6 +25,7 @@ import jsonschema2md import tomli from pydantic import BaseModel, Field +from stringcase import spinalcase, titlecase from script_utils.cli import echo_failure, echo_success, run from script_utils.utils import list_service_dirs @@ -51,7 +52,6 @@ class PackageHeader(BaseModel): class PackageName(BaseModel): """The name of a package and it's different representations.""" - repo_name: str = Field(..., description="The name of the repo") name: str = Field(..., description="The full name of the package in spinal case.") title: str = Field(..., description="The name of the package formatted as title.") @@ -85,7 +85,7 @@ class PackageDetails(PackageHeader, PackageName): class ServiceDetails: - """TODO""" + """Container class for service specific paths and functionality relying on them.""" def __init__(self, service_dir: Path): self.service_dir = service_dir @@ -101,8 +101,11 @@ def get_package_details(self) -> PackageDetails: """Get details required to build documentation for the package.""" header = self.read_toml_package_header() - name = PackageName(repo_name="TODO", name="TODO", title="TODO") + # parse from pyproject description = self.read_package_description() + service_name = spinalcase(description) + title = titlecase(service_name) + name = PackageName(name=service_name, title=title) config_description = self.generate_config_docs() return PackageDetails( **header.model_dump(), diff --git a/scripts/update_service_files.py b/scripts/update_service_files.py index 6938cbf4..7d8aac07 100755 --- a/scripts/update_service_files.py +++ b/scripts/update_service_files.py @@ -25,7 +25,7 @@ from update_config_docs import main as update_config from update_hook_revs import main as update_hooks from update_openapi_docs import main as update_openapi -from update_pyproject import main as update_pyproject +from update_readme_services import main as update_readmes app = typer.Typer(no_args_is_help=True, add_completion=False) @@ -58,17 +58,6 @@ def wrapper(service: str = ServiceArg, *args, **kwargs): return wrapper -@app.command(name="pyproject") -@run_for_service_or_all -def pyproject( - service: str = ServiceArg, - check: bool = CheckFlag, - root_only: bool = typer.Option(False, "--root-only"), -): - """Run scripts/update_pyproject.py for one or all services.""" - update_pyproject(service=service, check=check, root_only=root_only) - - @app.command(name="config") @run_for_service_or_all def config(service: str = ServiceArg, check: bool = CheckFlag): @@ -91,7 +80,6 @@ def update_one_service(service: str = ServiceArg, check: bool = CheckFlag): This will update the config, pyproject, and openapi. """ print(f"Updating all for {service}") - update_pyproject(service=service, check=check) update_config(service=service, check=check) update_openapi(service=service, check=check) @@ -107,6 +95,7 @@ def update_everything(check: bool = CheckFlag): """Run all update scripts. Service-specific scripts are run for all services.""" hooks(check=check) update_one_service(service="", check=check) + update_readmes() if __name__ == "__main__": diff --git a/services/ifrs/README.md b/services/ifrs/README.md index 34cc83bf..3510eff9 100644 --- a/services/ifrs/README.md +++ b/services/ifrs/README.md @@ -1,9 +1,6 @@ -[![tests](https://github.com/ghga-de/TODO/actions/workflows/tests.yaml/badge.svg)](https://github.com/ghga-de/TODO/actions/workflows/tests.yaml) -[![Coverage Status](https://coveralls.io/repos/github/ghga-de/TODO/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/TODO?branch=main) +# This Service Provides Functionality To Administer Files Stored In An S3 Compatible Object Storage All File Related Metadata Is Stored In An Internal Mongodb Database, Owned And Controlled By This Service It Exposes No R E S T A P I Enpoints And Communicates With Other Services Via Events ### Events Consumed: #### Files To Register This Event Signals That There Is A File To Register In The Database The File Related Metadata From This Event Gets Saved In The Database And The File Is Moved From The Incoming Staging Bucket To The Permanent Storage #### Files To Stage This Event Signals That There Is A File That Needs To Be Staged For Download The File Is Then Copied From The Permanent Storage To The Outbox For The Actual Download ### Events Published: #### File Internally Registered This Event Is Published After A File Was Registered In The Database It Contains All The File Related Metadata That Was Provided By The Files To Register Event #### File Staged For Download This Event Is Published After A File Was Successfully Staged To The Outbox -# TODO - -Internal-File-Registry-Service - This service acts as a registry for the internal location and representation of files. +Internal File Registry Service - This service acts as a registry for the internal location and representation of files. ## Description @@ -37,15 +34,15 @@ This event is published after a file was successfully staged to the outbox. We recommend using the provided Docker container. -A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/TODO): +A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/this-service-provides-functionality-to-administer-files-stored-in-an--s3-compatible-object-storage---all-file-related-metadata-is-stored-in-an-internal-mongodb-database,-owned-and-controlled-by-this-service---it-exposes-no--r-e-s-t--a-p-i-enpoints-and-communicates-with-other-services-via-events---###--events-consumed:--####-files-to-register--this-event-signals-that-there-is-a-file-to-register-in-the-database---the-file-related-metadata-from-this-event-gets-saved-in-the-database-and-the-file-is-moved-from-the-incoming-staging-bucket-to-the-permanent-storage---####-files-to-stage--this-event-signals-that-there-is-a-file-that-needs-to-be-staged-for-download---the-file-is-then-copied-from-the-permanent-storage-to-the-outbox-for-the-actual-download--###--events-published:--####-file-internally-registered--this-event-is-published-after-a-file-was-registered-in-the-database---it-contains-all-the-file-related-metadata-that-was-provided-by-the-files-to-register-event---####-file-staged-for-download--this-event-is-published-after-a-file-was-successfully-staged-to-the-outbox--): ```bash -docker pull ghga/TODO:1.3.0 +docker pull ghga/this-service-provides-functionality-to-administer-files-stored-in-an--s3-compatible-object-storage---all-file-related-metadata-is-stored-in-an-internal-mongodb-database,-owned-and-controlled-by-this-service---it-exposes-no--r-e-s-t--a-p-i-enpoints-and-communicates-with-other-services-via-events---###--events-consumed:--####-files-to-register--this-event-signals-that-there-is-a-file-to-register-in-the-database---the-file-related-metadata-from-this-event-gets-saved-in-the-database-and-the-file-is-moved-from-the-incoming-staging-bucket-to-the-permanent-storage---####-files-to-stage--this-event-signals-that-there-is-a-file-that-needs-to-be-staged-for-download---the-file-is-then-copied-from-the-permanent-storage-to-the-outbox-for-the-actual-download--###--events-published:--####-file-internally-registered--this-event-is-published-after-a-file-was-registered-in-the-database---it-contains-all-the-file-related-metadata-that-was-provided-by-the-files-to-register-event---####-file-staged-for-download--this-event-is-published-after-a-file-was-successfully-staged-to-the-outbox--:1.3.0 ``` Or you can build the container yourself from the [`./Dockerfile`](./Dockerfile): ```bash # Execute in the repo's root dir: -docker build -t ghga/TODO:1.3.0 . +docker build -t ghga/this-service-provides-functionality-to-administer-files-stored-in-an--s3-compatible-object-storage---all-file-related-metadata-is-stored-in-an-internal-mongodb-database,-owned-and-controlled-by-this-service---it-exposes-no--r-e-s-t--a-p-i-enpoints-and-communicates-with-other-services-via-events---###--events-consumed:--####-files-to-register--this-event-signals-that-there-is-a-file-to-register-in-the-database---the-file-related-metadata-from-this-event-gets-saved-in-the-database-and-the-file-is-moved-from-the-incoming-staging-bucket-to-the-permanent-storage---####-files-to-stage--this-event-signals-that-there-is-a-file-that-needs-to-be-staged-for-download---the-file-is-then-copied-from-the-permanent-storage-to-the-outbox-for-the-actual-download--###--events-published:--####-file-internally-registered--this-event-is-published-after-a-file-was-registered-in-the-database---it-contains-all-the-file-related-metadata-that-was-provided-by-the-files-to-register-event---####-file-staged-for-download--this-event-is-published-after-a-file-was-successfully-staged-to-the-outbox--:1.3.0 . ``` For production-ready deployment, we recommend using Kubernetes, however, @@ -53,7 +50,7 @@ for simple use cases, you could execute the service using docker on a single server: ```bash # The entrypoint is preconfigured: -docker run -p 8080:8080 ghga/TODO:1.3.0 --help +docker run -p 8080:8080 ghga/this-service-provides-functionality-to-administer-files-stored-in-an--s3-compatible-object-storage---all-file-related-metadata-is-stored-in-an-internal-mongodb-database,-owned-and-controlled-by-this-service---it-exposes-no--r-e-s-t--a-p-i-enpoints-and-communicates-with-other-services-via-events---###--events-consumed:--####-files-to-register--this-event-signals-that-there-is-a-file-to-register-in-the-database---the-file-related-metadata-from-this-event-gets-saved-in-the-database-and-the-file-is-moved-from-the-incoming-staging-bucket-to-the-permanent-storage---####-files-to-stage--this-event-signals-that-there-is-a-file-that-needs-to-be-staged-for-download---the-file-is-then-copied-from-the-permanent-storage-to-the-outbox-for-the-actual-download--###--events-published:--####-file-internally-registered--this-event-is-published-after-a-file-was-registered-in-the-database---it-contains-all-the-file-related-metadata-that-was-provided-by-the-files-to-register-event---####-file-staged-for-download--this-event-is-published-after-a-file-was-successfully-staged-to-the-outbox--:1.3.0 --help ``` If you prefer not to use containers, you may install the service from source: @@ -251,7 +248,7 @@ The service requires the following configuration parameters: - **`kafka_ssl_keyfile`** *(string)*: Optional filename containing the client private key. Default: `""`. -- **`kafka_ssl_password`** *(string)*: Optional password to be used for the client private key. Default: `""`. +- **`kafka_ssl_password`** *(string, format: password)*: Optional password to be used for the client private key. Default: `""`. - **`generate_correlation_id`** *(boolean)*: A flag, which, if False, will result in an error when trying to publish an event without a valid correlation ID set for the context. If True, the a newly correlation ID will be generated and used in the event header. Default: `true`. diff --git a/services/ifrs/config_schema.json b/services/ifrs/config_schema.json index 744421ee..2e53ebe7 100644 --- a/services/ifrs/config_schema.json +++ b/services/ifrs/config_schema.json @@ -288,8 +288,10 @@ "kafka_ssl_password": { "default": "", "description": "Optional password to be used for the client private key.", + "format": "password", "title": "Kafka Ssl Password", - "type": "string" + "type": "string", + "writeOnly": true }, "generate_correlation_id": { "default": true, diff --git a/services/ifrs/pyproject.toml b/services/ifrs/pyproject.toml index 8e7953a3..550d9d50 100644 --- a/services/ifrs/pyproject.toml +++ b/services/ifrs/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "ifrs" version = "1.3.0" -description = "Internal-File-Registry-Service - This service acts as a registry for the internal location and representation of files." +description = "Internal File Registry Service - This service acts as a registry for the internal location and representation of files." readme = "README.md" authors = [ { name = "German Human Genome Phenome Archive (GHGA)", email = "contact@ghga.de" }, diff --git a/services/irs/README.md b/services/irs/README.md index a595ded6..1436c61a 100644 --- a/services/irs/README.md +++ b/services/irs/README.md @@ -1,7 +1,4 @@ -[![tests](https://github.com/ghga-de/TODO/actions/workflows/tests.yaml/badge.svg)](https://github.com/ghga-de/TODO/actions/workflows/tests.yaml) -[![Coverage Status](https://coveralls.io/repos/github/ghga-de/TODO/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/TODO?branch=main) - -# TODO +# The Interrogation Room Service ( I R S) Interfaces With The Encryption Key Store Service To Process Crypt4 G H Encrypted Files Uploaded To The Inbox Of A Local G H G A Node The I R S Splits Off The File Envelope, Computes Part Checksums Over The Encrypted File Content, Validates The Checksum Over The Unencrypted File Content (in Memory) And Initiates Transfer Of The Encrypted File Content To Its Permanent Storage Interrogation Room Service @@ -15,15 +12,15 @@ The IRS splits off the file envelope, computes part checksums over the encrypted We recommend using the provided Docker container. -A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/TODO): +A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/the--interrogation--room--service-(-i-r-s)-interfaces-with-the--encryption--key--store--service-to-process--crypt4-g-h-encrypted-files-uploaded-to-the-inbox-of-a-local--g-h-g-a-node---the--i-r-s-splits-off-the-file-envelope,-computes-part-checksums-over-the-encrypted-file-content,-validates-the-checksum-over-the-unencrypted-file-content-(in-memory)-and-initiates-transfer-of-the-encrypted-file-content-to-its-permanent-storage--): ```bash -docker pull ghga/TODO:2.1.0 +docker pull ghga/the--interrogation--room--service-(-i-r-s)-interfaces-with-the--encryption--key--store--service-to-process--crypt4-g-h-encrypted-files-uploaded-to-the-inbox-of-a-local--g-h-g-a-node---the--i-r-s-splits-off-the-file-envelope,-computes-part-checksums-over-the-encrypted-file-content,-validates-the-checksum-over-the-unencrypted-file-content-(in-memory)-and-initiates-transfer-of-the-encrypted-file-content-to-its-permanent-storage--:2.1.0 ``` Or you can build the container yourself from the [`./Dockerfile`](./Dockerfile): ```bash # Execute in the repo's root dir: -docker build -t ghga/TODO:2.1.0 . +docker build -t ghga/the--interrogation--room--service-(-i-r-s)-interfaces-with-the--encryption--key--store--service-to-process--crypt4-g-h-encrypted-files-uploaded-to-the-inbox-of-a-local--g-h-g-a-node---the--i-r-s-splits-off-the-file-envelope,-computes-part-checksums-over-the-encrypted-file-content,-validates-the-checksum-over-the-unencrypted-file-content-(in-memory)-and-initiates-transfer-of-the-encrypted-file-content-to-its-permanent-storage--:2.1.0 . ``` For production-ready deployment, we recommend using Kubernetes, however, @@ -31,7 +28,7 @@ for simple use cases, you could execute the service using docker on a single server: ```bash # The entrypoint is preconfigured: -docker run -p 8080:8080 ghga/TODO:2.1.0 --help +docker run -p 8080:8080 ghga/the--interrogation--room--service-(-i-r-s)-interfaces-with-the--encryption--key--store--service-to-process--crypt4-g-h-encrypted-files-uploaded-to-the-inbox-of-a-local--g-h-g-a-node---the--i-r-s-splits-off-the-file-envelope,-computes-part-checksums-over-the-encrypted-file-content,-validates-the-checksum-over-the-unencrypted-file-content-(in-memory)-and-initiates-transfer-of-the-encrypted-file-content-to-its-permanent-storage--:2.1.0 --help ``` If you prefer not to use containers, you may install the service from source: @@ -201,7 +198,7 @@ The service requires the following configuration parameters: - **`kafka_ssl_keyfile`** *(string)*: Optional filename containing the client private key. Default: `""`. -- **`kafka_ssl_password`** *(string)*: Optional password to be used for the client private key. Default: `""`. +- **`kafka_ssl_password`** *(string, format: password)*: Optional password to be used for the client private key. Default: `""`. - **`generate_correlation_id`** *(boolean)*: A flag, which, if False, will result in an error when trying to publish an event without a valid correlation ID set for the context. If True, the a newly correlation ID will be generated and used in the event header. Default: `true`. diff --git a/services/irs/config_schema.json b/services/irs/config_schema.json index 85f694ee..17475e65 100644 --- a/services/irs/config_schema.json +++ b/services/irs/config_schema.json @@ -271,8 +271,10 @@ "kafka_ssl_password": { "default": "", "description": "Optional password to be used for the client private key.", + "format": "password", "title": "Kafka Ssl Password", - "type": "string" + "type": "string", + "writeOnly": true }, "generate_correlation_id": { "default": true, diff --git a/services/pcs/.readme_generation/readme_template.md b/services/pcs/.readme_generation/readme_template.md deleted file mode 100644 index 98a00c09..00000000 --- a/services/pcs/.readme_generation/readme_template.md +++ /dev/null @@ -1,113 +0,0 @@ -[![tests](https://github.com/ghga-de/$repo_name/actions/workflows/tests.yaml/badge.svg)](https://github.com/ghga-de/$repo_name/actions/workflows/tests.yaml) -[![Coverage Status](https://coveralls.io/repos/github/ghga-de/$repo_name/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/$repo_name?branch=main) - -# $title - -$summary - -## Description - -$description - -## Installation - -We recommend using the provided Docker container. - -A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/$name): -```bash -docker pull ghga/$name:$version -``` - -Or you can build the container yourself from the [`./Dockerfile`](./Dockerfile): -```bash -# Execute in the repo's root dir: -docker build -t ghga/$name:$version . -``` - -For production-ready deployment, we recommend using Kubernetes, however, -for simple use cases, you could execute the service using docker -on a single server: -```bash -# The entrypoint is preconfigured: -docker run -p 8080:8080 ghga/$name:$version --help -``` - -If you prefer not to use containers, you may install the service from source: -```bash -# Execute in the repo's root dir: -pip install . - -# To run the service: -$shortname --help -``` - -## Configuration - -### Parameters - -The service requires the following configuration parameters: -$config_description - -### Usage: - -A template YAML for configurating the service can be found at -[`./example-config.yaml`](./example-config.yaml). -Please adapt it, rename it to `.$shortname.yaml`, and place it into one of the following locations: -- in the current working directory were you are execute the service (on unix: `./.$shortname.yaml`) -- in your home directory (on unix: `~/.$shortname.yaml`) - -The config yaml will be automatically parsed by the service. - -**Important: If you are using containers, the locations refer to paths within the container.** - -All parameters mentioned in the [`./example-config.yaml`](./example-config.yaml) -could also be set using environment variables or file secrets. - -For naming the environment variables, just prefix the parameter name with `${shortname}_`, -e.g. for the `host` set an environment variable named `${shortname}_host` -(you may use both upper or lower cases, however, it is standard to define all env -variables in upper cases). - -To using file secrets please refer to the -[corresponding section](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support) -of the pydantic documentation. - -$openapi_doc - -## Architecture and Design: -$design_description - -## Development - -For setting up the development environment, we rely on the -[devcontainer feature](https://code.visualstudio.com/docs/remote/containers) of VS Code -in combination with Docker Compose. - -To use it, you have to have Docker Compose as well as VS Code with its "Remote - Containers" -extension (`ms-vscode-remote.remote-containers`) installed. -Then open this repository in VS Code and run the command -`Remote-Containers: Reopen in Container` from the VS Code "Command Palette". - -This will give you a full-fledged, pre-configured development environment including: -- infrastructural dependencies of the service (databases, etc.) -- all relevant VS Code extensions pre-installed -- pre-configured linting and auto-formatting -- a pre-configured debugger -- automatic license-header insertion - -Moreover, inside the devcontainer, a convenience commands `dev_install` is available. -It installs the service with all development dependencies, installs pre-commit. - -The installation is performed automatically when you build the devcontainer. However, -if you update dependencies in the [`./pyproject.toml`](./pyproject.toml) or the -[`./requirements-dev.txt`](./requirements-dev.txt), please run it again. - -## License - -This repository is free to use and modify according to the -[Apache 2.0 License](./LICENSE). - -## README Generation - -This README file is auto-generated, please see [`readme_generation.md`](./readme_generation.md) -for details. diff --git a/services/pcs/README.md b/services/pcs/README.md index 39a100cc..2b5db9d1 100644 --- a/services/pcs/README.md +++ b/services/pcs/README.md @@ -1,7 +1,4 @@ -[![tests](https://github.com/ghga-de/TODO/actions/workflows/tests.yaml/badge.svg)](https://github.com/ghga-de/TODO/actions/workflows/tests.yaml) -[![Coverage Status](https://coveralls.io/repos/github/ghga-de/TODO/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/TODO?branch=main) - -# TODO +# This Service Exposes An External A P I To Commission File Deletions From The Whole File Backend ### A P I Endpoints: #### ` D E L E T E /files/{file Id}`: This Endpoint Takes A File Id It Commissions The Deletion Of The File With The Given Id From The Whole File Backend ### Events Published: #### File Deletion Requested This Event Is Published After A File Deletion Was Requested Via An A P I Call It Contains The File Id Of The File That Should Be Deleted Purge Controller Service - a service to commission file deletions @@ -27,15 +24,15 @@ It contains the file_id of the file that should be deleted. We recommend using the provided Docker container. -A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/TODO): +A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/this-service-exposes-an-external--a-p-i-to-commission-file-deletions-from-the-whole-file-backend---###--a-p-i-endpoints:--####-`-d-e-l-e-t-e-/files/{file-id}`:---this-endpoint-takes-a-file-id---it-commissions-the-deletion-of-the-file-with-the-given-id-from-the-whole-file-backend--###--events-published:--####-file-deletion-requested--this-event-is-published-after-a-file-deletion-was-requested-via-an--a-p-i-call---it-contains-the-file-id-of-the-file-that-should-be-deleted--): ```bash -docker pull ghga/TODO:1.2.0 +docker pull ghga/this-service-exposes-an-external--a-p-i-to-commission-file-deletions-from-the-whole-file-backend---###--a-p-i-endpoints:--####-`-d-e-l-e-t-e-/files/{file-id}`:---this-endpoint-takes-a-file-id---it-commissions-the-deletion-of-the-file-with-the-given-id-from-the-whole-file-backend--###--events-published:--####-file-deletion-requested--this-event-is-published-after-a-file-deletion-was-requested-via-an--a-p-i-call---it-contains-the-file-id-of-the-file-that-should-be-deleted--:1.2.0 ``` Or you can build the container yourself from the [`./Dockerfile`](./Dockerfile): ```bash # Execute in the repo's root dir: -docker build -t ghga/TODO:1.2.0 . +docker build -t ghga/this-service-exposes-an-external--a-p-i-to-commission-file-deletions-from-the-whole-file-backend---###--a-p-i-endpoints:--####-`-d-e-l-e-t-e-/files/{file-id}`:---this-endpoint-takes-a-file-id---it-commissions-the-deletion-of-the-file-with-the-given-id-from-the-whole-file-backend--###--events-published:--####-file-deletion-requested--this-event-is-published-after-a-file-deletion-was-requested-via-an--a-p-i-call---it-contains-the-file-id-of-the-file-that-should-be-deleted--:1.2.0 . ``` For production-ready deployment, we recommend using Kubernetes, however, @@ -43,7 +40,7 @@ for simple use cases, you could execute the service using docker on a single server: ```bash # The entrypoint is preconfigured: -docker run -p 8080:8080 ghga/TODO:1.2.0 --help +docker run -p 8080:8080 ghga/this-service-exposes-an-external--a-p-i-to-commission-file-deletions-from-the-whole-file-backend---###--a-p-i-endpoints:--####-`-d-e-l-e-t-e-/files/{file-id}`:---this-endpoint-takes-a-file-id---it-commissions-the-deletion-of-the-file-with-the-given-id-from-the-whole-file-backend--###--events-published:--####-file-deletion-requested--this-event-is-published-after-a-file-deletion-was-requested-via-an--a-p-i-call---it-contains-the-file-id-of-the-file-that-should-be-deleted--:1.2.0 --help ``` If you prefer not to use containers, you may install the service from source: @@ -141,7 +138,7 @@ The service requires the following configuration parameters: - **`kafka_ssl_keyfile`** *(string)*: Optional filename containing the client private key. Default: `""`. -- **`kafka_ssl_password`** *(string)*: Optional password to be used for the client private key. Default: `""`. +- **`kafka_ssl_password`** *(string, format: password)*: Optional password to be used for the client private key. Default: `""`. - **`generate_correlation_id`** *(boolean)*: A flag, which, if False, will result in an error when inbound requests don't possess a correlation ID. If True, requests without a correlation ID will be assigned a newly generated ID in the correlation ID middleware function. Default: `true`. diff --git a/services/pcs/config_schema.json b/services/pcs/config_schema.json index eb81ccd2..808d7dda 100644 --- a/services/pcs/config_schema.json +++ b/services/pcs/config_schema.json @@ -114,8 +114,10 @@ "kafka_ssl_password": { "default": "", "description": "Optional password to be used for the client private key.", + "format": "password", "title": "Kafka Ssl Password", - "type": "string" + "type": "string", + "writeOnly": true }, "generate_correlation_id": { "default": true,