Skip to content

Commit

Permalink
Merge pull request #900 from populationgenomics/dev
Browse files Browse the repository at this point in the history
Release: AnalysisUpdateModel.active bug fix and formatting
  • Loading branch information
jmarshall authored Aug 16, 2024
2 parents 21160e7 + 5a0e49f commit a7cc187
Show file tree
Hide file tree
Showing 69 changed files with 646 additions and 281 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 7.3.2
current_version = 7.3.3
commit = True
tag = False
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>[A-z0-9-]+)
Expand Down
26 changes: 3 additions & 23 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,36 +31,16 @@ repos:
metamist/audit/README\.md
)$
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
name: isort (python)

- repo: https://github.com/ambv/black
rev: 23.12.1
hooks:
- id: black
args: [.]
pass_filenames: false
always_run: true
exclude: ^metamist/

- repo: https://github.com/PyCQA/flake8
rev: "6.1.0"
hooks:
- id: flake8
additional_dependencies: [flake8-bugbear, flake8-quotes]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.5.2
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# - id: ruff
# args: [ --fix ]
# Run the formatter.
# - id: ruff-format
- id: ruff-format

# Using system installation of pylint to support checking python module imports
- repo: local
Expand Down
1 change: 1 addition & 0 deletions api/graphql/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Note, we silence a lot of linting here because GraphQL looks at type annotations
and defaults to decide the GraphQL schema, so it might not necessarily look correct.
"""

import datetime
from inspect import isclass

Expand Down
6 changes: 5 additions & 1 deletion api/routes/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,11 @@ async def update_analysis(
"""Update status of analysis"""
atable = AnalysisLayer(connection)
await atable.update_analysis(
analysis_id, status=analysis.status, output=analysis.output, meta=analysis.meta
analysis_id,
status=analysis.status,
output=analysis.output,
meta=analysis.meta,
active=analysis.active,
)
return True

Expand Down
7 changes: 6 additions & 1 deletion api/routes/project_insights.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,12 @@
ProjectInsightsSummary,
)

router = APIRouter(prefix='/project-insights', tags=['project-insights',])
router = APIRouter(
prefix='/project-insights',
tags=[
'project-insights',
],
)


@router.post(
Expand Down
2 changes: 1 addition & 1 deletion api/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from db.python.utils import get_logger

# This tag is automatically updated by bump2version
_VERSION = '7.3.2'
_VERSION = '7.3.3'


logger = get_logger()
Expand Down
4 changes: 4 additions & 0 deletions codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ coverage:
# basic
target: auto
threshold: 0%
informational: true
paths:
- "db"
- "scripts"
- "metamist/parser"
patch:
default:
informational: true
4 changes: 2 additions & 2 deletions db/backup/backup.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/python3
# pylint: disable=broad-exception-caught,broad-exception-raised
""" Daily back up function for databases within a local
MariaDB instance """
"""Daily back up function for databases within a local
MariaDB instance"""

import json
import os
Expand Down
2 changes: 1 addition & 1 deletion db/backup/recovery_test.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
""" A script to test that tests the validity of a database backup
"""A script to test that tests the validity of a database backup
in the event of recovery.
NOTE: DO NOT RUN THIS SCRIPT ON A PRODUCTION SERVER.
It will drop the local mysql database after each run.
Expand Down
4 changes: 2 additions & 2 deletions db/backup/restore.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
""" A script to restore the database instance to the latest
backup """
"""A script to restore the database instance to the latest
backup"""

import os
import subprocess
Expand Down
35 changes: 29 additions & 6 deletions db/deploy/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,14 @@
changelog_file = 'project.xml'


def read_db_credentials(env: Literal['prod', 'dev']) -> Dict[Literal['dbname', 'username', 'password', 'host'], str]:
def read_db_credentials(
env: Literal['prod', 'dev'],
) -> Dict[Literal['dbname', 'username', 'password', 'host'], str]:
"""Get database credentials from Secret Manager."""
try:
secret_path = SECRET_CLIENT.secret_version_path(SECRET_PROJECT, SECRET_NAME, 'latest')
secret_path = SECRET_CLIENT.secret_version_path(
SECRET_PROJECT, SECRET_NAME, 'latest'
)
response = SECRET_CLIENT.access_secret_version(request={'name': secret_path})
return json.loads(response.payload.data.decode('UTF-8'))[env]
except Exception as e: # Broad exception for example; refine as needed
Expand All @@ -35,7 +39,10 @@ def read_db_credentials(env: Literal['prod', 'dev']) -> Dict[Literal['dbname', '


@app.post('/execute-liquibase')
async def execute_liquibase(request: Request, environment: Literal['prod', 'dev'] = Query(default='dev', regex='^(prod|dev)$')):
async def execute_liquibase(
request: Request,
environment: Literal['prod', 'dev'] = Query(default='dev', regex='^(prod|dev)$'),
):
"""Endpoint to remotely trigger Liquibase commands on a GCP VM using XML content."""
xml_content = await request.body()

Expand Down Expand Up @@ -67,10 +74,25 @@ async def execute_liquibase(request: Request, environment: Literal['prod', 'dev'

try:
# Execute the gcloud command
result = subprocess.run(liquibase_command, check=True, capture_output=True, text=True, env={'LIQUIBASE_COMMAND_PASSWORD': db_password, 'LIQUIBASE_COMMAND_USERNAME': db_username, **os.environ},)
logger.log_text(f'Liquibase update successful: {result.stdout}', severity='INFO')
result = subprocess.run(
liquibase_command,
check=True,
capture_output=True,
text=True,
env={
'LIQUIBASE_COMMAND_PASSWORD': db_password,
'LIQUIBASE_COMMAND_USERNAME': db_username,
**os.environ,
},
)
logger.log_text(
f'Liquibase update successful: {result.stdout}', severity='INFO'
)
os.remove(temp_file_path)
return {'message': 'Liquibase update executed successfully', 'output': result.stdout}
return {
'message': 'Liquibase update executed successfully',
'output': result.stdout,
}
except subprocess.CalledProcessError as e:
text = f'Failed to execute Liquibase update: {e.stderr}'
logger.log_text(text, severity='ERROR')
Expand All @@ -79,4 +101,5 @@ async def execute_liquibase(request: Request, environment: Literal['prod', 'dev'

if __name__ == '__main__':
import uvicorn

uvicorn.run(app, host='0.0.0.0', port=int(os.environ.get('PORT', 8080)))
8 changes: 4 additions & 4 deletions db/python/connect.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"""
Code for connecting to Postgres database
"""

import abc
import asyncio
import json
Expand Down Expand Up @@ -229,7 +230,6 @@ async def audit_log_id(self):

async with self._audit_log_lock:
if not self._audit_log_id:

# make this import here, otherwise we'd have a circular import
from db.python.tables.audit_log import ( # pylint: disable=import-outside-toplevel,R0401
AuditLogTable,
Expand Down Expand Up @@ -328,9 +328,9 @@ def get_connection_string(self):
if self.port:
_host += f':{self.port}'

options: dict[str, str | int] = (
{}
) # {'min_size': self.min_pool_size, 'max_size': self.max_pool_size}
options: dict[
str, str | int
] = {} # {'min_size': self.min_pool_size, 'max_size': self.max_pool_size}
_options = '&'.join(f'{k}={v}' for k, v in options.items())

url = f'mysql://{u_p}@{_host}/{self.dbname}?{_options}'
Expand Down
2 changes: 2 additions & 0 deletions db/python/layers/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -572,6 +572,7 @@ async def update_analysis(
status: AnalysisStatus,
meta: dict[str, Any] = None,
output: str | None = None,
active: bool | None = None,
):
"""
Update the status of an analysis, set timestamp_completed if relevant
Expand All @@ -586,4 +587,5 @@ async def update_analysis(
status=status,
meta=meta,
output=output,
active=active,
)
12 changes: 7 additions & 5 deletions db/python/layers/family.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,11 +275,13 @@ async def import_pedigree(
missing_external_family_ids = [
f for f in external_family_ids if f not in external_family_id_map
]
external_participant_ids_map = await participant_table.get_id_map_by_external_ids(
list(external_participant_ids),
project=self.connection.project_id,
# Allow missing participants if we're creating them
allow_missing=create_missing_participants,
external_participant_ids_map = (
await participant_table.get_id_map_by_external_ids(
list(external_participant_ids),
project=self.connection.project_id,
# Allow missing participants if we're creating them
allow_missing=create_missing_participants,
)
)

async with self.connection.connection.transaction():
Expand Down
Loading

0 comments on commit a7cc187

Please sign in to comment.