From 3a19579812ffb2bc72f674d9a593edd31df7df18 Mon Sep 17 00:00:00 2001 From: Wambere Date: Tue, 30 Apr 2024 18:32:52 +0300 Subject: [PATCH] Add support for importing inventory and products (#179) * Add functionality to import products as Group resources * Update read me * Create images folder if it doesn't already exist * Mock image binary * Add support for importing inventories * Update product-group template * Update inventory-group * Update inventory csv and code * Use GROUP_INDEX_MAPPING in group_extras * black formatting * move deletion to outside the condition --- importer/README.md | 15 +- importer/csv/import/inventory.csv | 2 + importer/csv/import/product.csv | 3 + .../inventory_group_payload.json | 128 ++++ .../json_payloads/product_group_payload.json | 145 ++++ importer/main.py | 631 ++++++++++++++---- importer/test_main.py | 62 ++ 7 files changed, 854 insertions(+), 132 deletions(-) create mode 100644 importer/csv/import/inventory.csv create mode 100644 importer/csv/import/product.csv create mode 100644 importer/json_payloads/inventory_group_payload.json create mode 100644 importer/json_payloads/product_group_payload.json diff --git a/importer/README.md b/importer/README.md index 63d532a5..e58b7f1f 100644 --- a/importer/README.md +++ b/importer/README.md @@ -133,4 +133,17 @@ The coverage report `coverage.html` will be at the working directory - The `value` is where you pass the actual parameter value to filter the resources. The set default value is "gt2023-01-01", other examples include, "Good Health Clinic 1" - The `limit` is the number of resources exported at a time. The set default value is '1000' - Specify the `resource_type` you want to export, different resource_types are exported to different csv_files -- The csv_file containing the exported resources is labelled using the current time, to know when the resources were exported for example, csv/exports/2024-02-21-12-21-export_Location.csv \ No newline at end of file +- The csv_file containing the exported resources is labelled using the current time, to know when the resources were exported for example, csv/exports/2024-02-21-12-21-export_Location.csv + +### 10. Import products from openSRP 1 +- Run `python3 main.py --csv_file csv/import/product.csv --setup products --log_level info` +- See example csv [here](/importer/csv/import/product.csv) +- This creates a Group resource for each product imported +- The first two columns __name__ and __active__ is the minimum required +- The last column __imageSourceUrl__ contains a url to the product image. If this source requires authentication, then you need to provide the `product_access_token` in the config file. The image is added as a binary resource and referenced in the product's Group resource + +### 11. Import inventories from openSRP 1 +- Run `python3 main.py --csv_file csv/import/inventory.csv --setup inventories --log_level info` +- See example csv [here](/importer/csv/import/inventory.csv) +- This creates a Group resource for each inventory imported +- The first two columns __name__ and __active__ is the minimum required diff --git a/importer/csv/import/inventory.csv b/importer/csv/import/inventory.csv new file mode 100644 index 00000000..7e5c08af --- /dev/null +++ b/importer/csv/import/inventory.csv @@ -0,0 +1,2 @@ +name,active,method,id,poNumber,serialNumber,usualId,actual,productId,deliveryDate,accountabilityDate,quantity,unicefSection,donor +Bishop Magua - Bed nets,true,create,8adfcfe0-41d0-4f0a-9a89-909c72fbf330,123523,989682,a065c211-cf3e-4b5b-972f-fdac0e45fef7,false,1d86d0e2-bac8-4424-90ae-e2298900ac3c,2024-02-01T00:00:00.00Z,2025-02-01T00:00:00.00Z,34,Health,GAVI \ No newline at end of file diff --git a/importer/csv/import/product.csv b/importer/csv/import/product.csv new file mode 100644 index 00000000..7f49f22f --- /dev/null +++ b/importer/csv/import/product.csv @@ -0,0 +1,3 @@ +name,active,method,id,previousId,isAttractiveItem,availability,condition,appropriateUsage,accountabilityPeriod,imageSourceUrl +thermometer,true,create,1d86d0e2-bac8-4424-90ae-e2298900ac3c,10,true,yes,good,ok,12,https://ona.io/home/wp-content//uploads/2022/06/spotlight-fhir.png +sterilizer,true,create,,53209452,true,no,,,, \ No newline at end of file diff --git a/importer/json_payloads/inventory_group_payload.json b/importer/json_payloads/inventory_group_payload.json new file mode 100644 index 00000000..c436fc9b --- /dev/null +++ b/importer/json_payloads/inventory_group_payload.json @@ -0,0 +1,128 @@ +{ + "request": { + "method": "PUT", + "url": "Group/$unique_uuid", + "ifMatch": "$version" + }, + "resource": { + "resourceType": "Group", + "id": "$unique_uuid", + "identifier": [ + { + "type": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "SERNUM", + "display": "Serial Number" + } + ], + "text": "Serial Number" + }, + "use": "official", + "value": "$serial_number" + }, + { + "type": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "PONUM", + "display": "PO Number" + } + ], + "text": "PO Number" + }, + "use": "secondary", + "value": "$po_number" + }, + { + "use": "usual", + "value": "$usual_id" + } + ], + "active": "$active", + "type": "substance", + "actual": "$actual", + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "78991122", + "display": "Supply Inventory" + } + ] + }, + "name": "$name", + "member": [ + { + "entity": { + "reference": "Group/$product_id" + }, + "period": { + "start": "$delivery_date", + "end": "$accountability_date" + }, + "inactive": false + } + ], + "characteristic": [ + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "33467722", + "display": "Quantity" + } + ] + }, + "valueQuantity": { + "value": "$quantity" + } + }, + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "98734231", + "display": "Unicef Section" + } + ] + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "98734231-1", + "display": "Value entered on the unicef section" + } + ], + "text": "$unicef_section" + } + }, + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "45981276", + "display": "Donor" + } + ] + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "45981276-1", + "display": "Value entered on the donor" + } + ], + "text": "$donor" + } + } + ] + } +} \ No newline at end of file diff --git a/importer/json_payloads/product_group_payload.json b/importer/json_payloads/product_group_payload.json new file mode 100644 index 00000000..a41d494e --- /dev/null +++ b/importer/json_payloads/product_group_payload.json @@ -0,0 +1,145 @@ +{ + "request": { + "method": "PUT", + "url": "Group/$unique_uuid", + "ifMatch" : "$version" + }, + "resource": { + "resourceType": "Group", + "id": "$unique_uuid", + "identifier": [ + { + "type":{ + "coding": { + "system" : "http://smartregister.org/codes", + "code" : "MATNUM" , + "display": "Material Number" + } + }, + "use": "official", + "value": "$unique_uuid" + }, + { + "use": "secondary", + "value": "$previous_id" + } + ], + "active": "$active", + "type": "substance", + "code": { + "coding": [ + { + "system": "http://snomed.info/sct", + "code": "386452003", + "display": "Supply management" + } + ] + }, + "name": "$name", + "characteristic": [ + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "23435363", + "display": "Attractive Item code" + } + ] + }, + "valueBoolean": "$isAttractiveItem" + }, + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "34536373", + "display": "Is it there code" + } + ] + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "34536373-1", + "display": "Value entered on the It is there code" + } + ], + "text": "$availability" + } + }, + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "45647484", + "display": "Is it in good condition? (optional)" + } + ] + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "45647484-1", + "display": "Value entered on the Is it in good condition? (optional)" + } + ], + "text": "$condition" + } + }, + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "56758595", + "display": "Is it being used appropriately?" + } + ] + }, + "valueCodeableConcept": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "56758595-1", + "display": "Value entered on the Is it being used appropriately?" + } + ], + "text": "$appropriateUsage" + } + }, + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "67869606", + "display": "Accountability period (in months)" + } + ] + }, + "valueQuantity": { + "value": "$accountabilityPeriod" + } + }, + { + "code": { + "coding": [ + { + "system": "http://smartregister.org/codes", + "code": "12314156", + "display": "Product Image code" + } + ] + }, + "valueReference": { + "reference": "Binary/$image-binary" + } + } + ] + } +} \ No newline at end of file diff --git a/importer/main.py b/importer/main.py index 5a421198..fe013daa 100644 --- a/importer/main.py +++ b/importer/main.py @@ -21,6 +21,7 @@ global_access_token = "" + # This function takes in a csv file # reads it and returns a list of strings/lines # It ignores the first line (assumes headers) @@ -32,7 +33,9 @@ def read_csv(csv_file): next(records) all_records = [] - with click.progressbar(records, label='Progress::Reading csv ') as read_csv_progress: + with click.progressbar( + records, label="Progress::Reading csv " + ) as read_csv_progress: for record in read_csv_progress: all_records.append(record) @@ -117,13 +120,25 @@ def handle_request(request_type, payload, url): def get_keycloak_url(): return config.keycloak_url + # This function builds the user payload and posts it to # the keycloak api to create a new user # it also adds the user to the provided keycloak group # and sets the user password def create_user(user): - (firstName, lastName, username, email, userId, userType, _, keycloakGroupId, - keycloakGroupName, appId, password) = user + ( + firstName, + lastName, + username, + email, + userId, + userType, + _, + keycloakGroupId, + keycloakGroupName, + appId, + password, + ) = user with open("json_payloads/keycloak_user_payload.json") as json_file: payload_string = json_file.read() @@ -145,7 +160,9 @@ def create_user(user): user_id = (new_user_location.split("/"))[-1] # add user to group - payload = '{"id": "' + keycloakGroupId + '", "name": "' + keycloakGroupName + '"}' + payload = ( + '{"id": "' + keycloakGroupId + '", "name": "' + keycloakGroupName + '"}' + ) group_endpoint = user_id + "/groups/" + keycloakGroupId url = keycloak_url + "/users/" + group_endpoint logging.info("Adding user to Keycloak group: " + keycloakGroupName) @@ -167,8 +184,19 @@ def create_user(user): # new user and posts them to the FHIR api for creation def create_user_resources(user_id, user): logging.info("Creating user resources") - (firstName, lastName, username, email, id, userType, - enableUser, keycloakGroupId, keycloakGroupName, _, password) = user + ( + firstName, + lastName, + username, + email, + id, + userType, + enableUser, + keycloakGroupId, + keycloakGroupName, + _, + password, + ) = user # generate uuids if len(str(id).strip()) == 0: @@ -280,8 +308,19 @@ def check_parent_admin_level(locationParentId): # custom extras for locations def location_extras(resource, payload_string): try: - (locationName, *_, locationParentName, locationParentId, locationType, locationTypeCode, - locationAdminLevel, locationPhysicalType, locationPhysicalTypeCode, longitude, latitude) = resource + ( + locationName, + *_, + locationParentName, + locationParentId, + locationType, + locationTypeCode, + locationAdminLevel, + locationPhysicalType, + locationPhysicalTypeCode, + longitude, + latitude, + ) = resource except ValueError: locationParentName = "parentName" locationParentId = "ParentId" @@ -294,9 +333,9 @@ def location_extras(resource, payload_string): try: if locationParentName and locationParentName != "parentName": - payload_string = payload_string.replace("$parentName", locationParentName).replace( - "$parentID", locationParentId - ) + payload_string = payload_string.replace( + "$parentName", locationParentName + ).replace("$parentID", locationParentId) else: obj = json.loads(payload_string) del obj["resource"]["partOf"] @@ -330,12 +369,16 @@ def location_extras(resource, payload_string): try: if len(locationAdminLevel.strip()) > 0 and locationAdminLevel != "adminLevel": - payload_string = payload_string.replace("$adminLevelCode", locationAdminLevel) + payload_string = payload_string.replace( + "$adminLevelCode", locationAdminLevel + ) else: if locationAdminLevel in resource: admin_level = check_parent_admin_level(locationParentId) if admin_level: - payload_string = payload_string.replace("$adminLevelCode", admin_level) + payload_string = payload_string.replace( + "$adminLevelCode", admin_level + ) else: obj = json.loads(payload_string) obj_type = obj["resource"]["type"] @@ -371,10 +414,18 @@ def location_extras(resource, payload_string): payload_string = json.dumps(obj, indent=4) try: - if len(locationPhysicalType.strip()) > 0 and locationPhysicalType != "physicalType": + if ( + len(locationPhysicalType.strip()) > 0 + and locationPhysicalType != "physicalType" + ): payload_string = payload_string.replace("$pt_display", locationPhysicalType) - if len(locationPhysicalTypeCode.strip()) > 0 and locationPhysicalTypeCode != "physicalTypeCode": - payload_string = payload_string.replace("$pt_code", locationPhysicalTypeCode) + if ( + len(locationPhysicalTypeCode.strip()) > 0 + and locationPhysicalTypeCode != "physicalTypeCode" + ): + payload_string = payload_string.replace( + "$pt_code", locationPhysicalTypeCode + ) else: obj = json.loads(payload_string) del obj["resource"]["physicalType"] @@ -387,7 +438,8 @@ def location_extras(resource, payload_string): try: if longitude and longitude != "longitude": payload_string = payload_string.replace('"$longitude"', longitude).replace( - '"$latitude"', latitude) + '"$latitude"', latitude + ) else: obj = json.loads(payload_string) del obj["resource"]["position"] @@ -401,9 +453,7 @@ def location_extras(resource, payload_string): # custom extras for careTeams -def care_team_extras( - resource, payload_string, ftype -): +def care_team_extras(resource, payload_string, ftype): orgs_list = [] participant_list = [] elements = [] @@ -475,9 +525,225 @@ def care_team_extras( return payload_string +# custom extras for product import +def group_extras(resource, payload_string, group_type): + payload_obj = json.loads(payload_string) + item_name = resource[0] + del_indexes = [] + + GROUP_INDEX_MAPPING = { + "product_secondary_id_index": 1, + "product_is_attractive_index": 0, + "product_is_available_index": 1, + "product_condition_index": 2, + "product_appropriate_usage_index": 3, + "product_accountability_period_index": 4, + "product_image_index": 5, + "inventory_official_id_index": 0, + "inventory_secondary_id_index": 1, + "inventory_usual_id_index": 2, + "inventory_member_index": 0, + "inventory_quantity_index": 0, + "inventory_unicef_section_index": 1, + "inventory_donor_index": 2, + } + + if group_type == "product": + ( + _, + active, + *_, + previous_id, + is_attractive_item, + availability, + condition, + appropriate_usage, + accountability_period, + image_source_url, + ) = resource + + if active: + payload_obj["resource"]["active"] = active + else: + del payload_obj["resource"]["active"] + + if previous_id: + payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["product_secondary_id_index"] + ]["value"] = previous_id + else: + del payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["product_secondary_id_index"] + ] + + if is_attractive_item: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["product_is_attractive_index"] + ]["valueBoolean"] = is_attractive_item + else: + del_indexes.append(GROUP_INDEX_MAPPING["product_is_attractive_index"]) + + if availability: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["product_is_available_index"] + ]["valueCodeableConcept"]["text"] = availability + else: + del_indexes.append(GROUP_INDEX_MAPPING["product_is_available_index"]) + + if condition: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["product_condition_index"] + ]["valueCodeableConcept"]["text"] = condition + else: + del_indexes.append(GROUP_INDEX_MAPPING["product_condition_index"]) + + if appropriate_usage: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["product_appropriate_usage_index"] + ]["valueCodeableConcept"]["text"] = appropriate_usage + else: + del_indexes.append(GROUP_INDEX_MAPPING["product_appropriate_usage_index"]) + + if accountability_period: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["product_accountability_period_index"] + ]["valueQuantity"]["value"] = accountability_period + else: + del_indexes.append( + GROUP_INDEX_MAPPING["product_accountability_period_index"] + ) + + if image_source_url: + image_binary = save_image(image_source_url) + if image_binary != 0: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["product_image_index"] + ]["valueReference"]["reference"] = ("Binary/" + image_binary) + else: + logging.error( + "Unable to link the image Binary resource for product " + item_name + ) + del_indexes.append(GROUP_INDEX_MAPPING["product_image_index"]) + else: + del_indexes.append(GROUP_INDEX_MAPPING["product_image_index"]) + + elif group_type == "inventory": + ( + _, + active, + *_, + po_number, + serial_number, + usual_id, + actual, + product_id, + delivery_date, + accountability_date, + quantity, + unicef_section, + donor, + ) = resource + + if active: + payload_obj["resource"]["active"] = bool(active) + else: + del payload_obj["resource"]["active"] + + if serial_number: + payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["inventory_official_id_index"] + ]["value"] = serial_number + else: + del payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["inventory_official_id_index"] + ] + + if po_number: + payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["inventory_secondary_id_index"] + ]["value"] = po_number + else: + del payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["inventory_secondary_id_index"] + ] + + if usual_id: + payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["inventory_usual_id_index"] + ]["value"] = usual_id + else: + del payload_obj["resource"]["identifier"][ + GROUP_INDEX_MAPPING["inventory_usual_id_index"] + ] + + if actual: + payload_obj["resource"]["actual"] = bool(actual) + else: + del payload_obj["resource"]["actual"] + + if product_id: + payload_obj["resource"]["member"][ + GROUP_INDEX_MAPPING["inventory_member_index"] + ]["entity"]["reference"] = ("Group/" + product_id) + else: + payload_obj["resource"]["member"][ + GROUP_INDEX_MAPPING["inventory_member_index"] + ]["entity"]["reference"] = "Group/" + + if delivery_date: + payload_obj["resource"]["member"][ + GROUP_INDEX_MAPPING["inventory_member_index"] + ]["period"]["start"] = delivery_date + else: + payload_obj["resource"]["member"][ + GROUP_INDEX_MAPPING["inventory_member_index"] + ]["period"]["start"] = "" + + if accountability_date: + payload_obj["resource"]["member"][ + GROUP_INDEX_MAPPING["inventory_member_index"] + ]["period"]["end"] = accountability_date + else: + payload_obj["resource"]["member"][ + GROUP_INDEX_MAPPING["inventory_member_index"] + ]["period"]["end"] = "" + + if quantity: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["inventory_quantity_index"] + ]["valueQuantity"]["value"] = int(quantity) + else: + del_indexes.append(GROUP_INDEX_MAPPING["inventory_quantity_index"]) + + if unicef_section: + payload_obj["resource"]["characteristic"][ + GROUP_INDEX_MAPPING["inventory_unicef_section_index"] + ]["valueCodeableConcept"]["text"] = unicef_section + else: + del_indexes.append(GROUP_INDEX_MAPPING["inventory_unicef_section_index"]) + + if donor: + payload_obj["resource"]["characteristic"][2]["valueCodeableConcept"][ + "text" + ] = donor + else: + del_indexes.append(GROUP_INDEX_MAPPING["inventory_donor_index"]) + + else: + logging.info("Group type not defined") + + for x in reversed(del_indexes): + del payload_obj["resource"]["characteristic"][x] + + payload_string = json.dumps(payload_obj, indent=4) + return payload_string + + def extract_matches(resource_list): teamMap = {} - with click.progressbar(resource_list, label='Progress::Extract matches ') as extract_progress: + with click.progressbar( + resource_list, label="Progress::Extract matches " + ) as extract_progress: for resource in extract_progress: group_name, group_id, item_name, item_id = resource if group_id.strip() and item_id.strip(): @@ -498,8 +764,13 @@ def build_assign_payload(rows, resource_type): # check if already exists base_url = get_base_url() - check_url = (base_url + "/" + resource_type + "/_search?_count=1&practitioner=Practitioner/" - + practitioner_id) + check_url = ( + base_url + + "/" + + resource_type + + "/_search?_count=1&practitioner=Practitioner/" + + practitioner_id + ) response = handle_request("GET", "", check_url) json_response = json.loads(response[0]) @@ -508,13 +779,15 @@ def build_assign_payload(rows, resource_type): resource = json_response["entry"][0]["resource"] try: - resource["organization"]["reference"] = "Organization/" + organization_id + resource["organization"]["reference"] = ( + "Organization/" + organization_id + ) resource["organization"]["display"] = organization_name except KeyError: org = { "organization": { "reference": "Organization/" + organization_id, - "display": organization_name + "display": organization_name, } } resource.update(org) @@ -527,9 +800,13 @@ def build_assign_payload(rows, resource_type): logging.info("Creating a new resource") # generate a new id - new_id = str(uuid.uuid5(uuid.NAMESPACE_DNS, practitioner_id + organization_id)) + new_id = str( + uuid.uuid5(uuid.NAMESPACE_DNS, practitioner_id + organization_id) + ) - with open("json_payloads/practitioner_organization_payload.json") as json_file: + with open( + "json_payloads/practitioner_organization_payload.json" + ) as json_file: payload_string = json_file.read() # replace the variables in payload @@ -545,15 +822,17 @@ def build_assign_payload(rows, resource_type): resource = json.loads(payload_string) else: - raise ValueError ("The number of practitioner references should only be 0 or 1") + raise ValueError( + "The number of practitioner references should only be 0 or 1" + ) payload = { "request": { "method": "PUT", "url": resource_type + "/" + practitioner_role_id, - "ifMatch": version + "ifMatch": version, }, - "resource": resource + "resource": resource, } full_string = json.dumps(payload, indent=4) final_string = final_string + full_string + "," @@ -576,7 +855,9 @@ def build_org_affiliation(resources, resource_list): with open("json_payloads/organization_affiliation_payload.json") as json_file: payload_string = json_file.read() - with click.progressbar(resources, label='Progress::Build payload ') as build_progress: + with click.progressbar( + resources, label="Progress::Build payload " + ) as build_progress: for key in build_progress: rp = "" unique_uuid = str(uuid.uuid5(uuid.NAMESPACE_DNS, key)) @@ -618,12 +899,22 @@ def get_valid_resource_type(resource_type): # This function gets the current resource version from the API def get_resource(resource_id, resource_type): - resource_type = get_valid_resource_type(resource_type) + if resource_type != "Group": + resource_type = get_valid_resource_type(resource_type) resource_url = "/".join([config.fhir_base_url, resource_type, resource_id]) response = handle_request("GET", "", resource_url) return json.loads(response[0])["meta"]["versionId"] if response[1] == 200 else "0" +def check_for_nulls(resource: list) -> list: + for index, value in enumerate(resource): + if len(value.strip()) < 1: + resource[index] = None + else: + resource[index] = value.strip() + return resource + + # This function builds a json payload # which is posted to the api to create resources def build_payload(resource_type, resources, resource_payload_file): @@ -633,10 +924,14 @@ def build_payload(resource_type, resources, resource_payload_file): with open(resource_payload_file) as json_file: payload_string = json_file.read() - with click.progressbar(resources, label='Progress::Building payload ') as build_payload_progress: + with click.progressbar( + resources, label="Progress::Building payload " + ) as build_payload_progress: for resource in build_payload_progress: logging.info("\t") + resource = check_for_nulls(resource) + try: name, status, method, id, *_ = resource except ValueError: @@ -645,39 +940,27 @@ def build_payload(resource_type, resources, resource_payload_file): method = "create" id = str(uuid.uuid5(uuid.NAMESPACE_DNS, name)) - try: - if method == "create": - version = "1" - if len(id.strip()) > 0: - # use the provided id - unique_uuid = id.strip() - identifier_uuid = id.strip() - else: - # generate a new uuid - unique_uuid = str(uuid.uuid5(uuid.NAMESPACE_DNS, name)) - identifier_uuid = unique_uuid - except IndexError: - # default if method is not provided - unique_uuid = str(uuid.uuid5(uuid.NAMESPACE_DNS, name)) - identifier_uuid = unique_uuid + if method == "create": version = "1" + if id: + unique_uuid = identifier_uuid = id + else: + unique_uuid = identifier_uuid = str( + uuid.uuid5(uuid.NAMESPACE_DNS, name) + ) - try: - if method == "update": - if len(id.strip()) > 0: - version = get_resource(id, resource_type) - if version != "0": - # use the provided id - unique_uuid = id.strip() - identifier_uuid = id.strip() - else: - logging.info("Failed to get resource!") - raise ValueError("Trying to update a Non-existent resource") + if method == "update": + if id: + version = get_resource(id, resource_type) + + if version != "0": + unique_uuid = identifier_uuid = id else: - logging.info("The id is required!") - raise ValueError("The id is required to update a resource") - except IndexError: - raise ValueError("The id is required to update a resource") + logging.info("Failed to get resource!") + raise ValueError("Trying to update a Non-existent resource") + else: + logging.info("The id is required!") + raise ValueError("The id is required to update a resource") # ps = payload_string ps = ( @@ -698,6 +981,14 @@ def build_payload(resource_type, resources, resource_payload_file): ps = location_extras(resource, ps) elif resource_type == "careTeams": ps = care_team_extras(resource, ps, "orgs & users") + elif resource_type == "Group": + if "inventory" in resource_payload_file: + group_type = "inventory" + elif "product" in resource_payload_file: + group_type = "product" + else: + logging.error("Undefined group type") + ps = group_extras(resource, ps, group_type) final_string = final_string + ps + "," @@ -746,9 +1037,7 @@ def confirm_practitioner(user, user_id): base_url = get_base_url() if not practitioner_uuid: # If practitioner uuid not provided in csv, check if any practitioners exist linked to the keycloak user_id - r = handle_request( - "GET", "", base_url + "/Practitioner?identifier=" + user_id - ) + r = handle_request("GET", "", base_url + "/Practitioner?identifier=" + user_id) json_r = json.loads(r[0]) counter = json_r["total"] if counter > 0: @@ -759,9 +1048,7 @@ def confirm_practitioner(user, user_id): else: return False - r = handle_request( - "GET", "", base_url + "/Practitioner/" + practitioner_uuid - ) + r = handle_request("GET", "", base_url + "/Practitioner/" + practitioner_uuid) if r[1] == 404: logging.info("Practitioner does not exist, proceed to creation") @@ -994,16 +1281,18 @@ def clean_duplicates(users, cascade_delete): # Create a csv file and initialize the CSV writer def write_csv(data, resource_type, fieldnames): logging.info("Writing to csv file") - path = 'csv/exports' + path = "csv/exports" if not os.path.exists(path): os.makedirs(path) current_time = datetime.now().strftime("%Y-%m-%d-%H-%M") csv_file = f"{path}/{current_time}-export_{resource_type}.csv" - with open(csv_file, 'w', newline='') as file: + with open(csv_file, "w", newline="") as file: csv_writer = csv.writer(file) csv_writer.writerow(fieldnames) - with click.progressbar(data, label='Progress:: Writing csv') as write_csv_progress: + with click.progressbar( + data, label="Progress:: Writing csv" + ) as write_csv_progress: for row in write_csv_progress: csv_writer.writerow(row) return csv_file @@ -1019,7 +1308,7 @@ def export_resources_to_csv(resource_type, parameter, value, limit): resource_url = "/".join([str(base_url), resource_type]) if len(parameter) > 0: resource_url = ( - resource_url + "?" + parameter + "=" + value + "&_count=" + str(limit) + resource_url + "?" + parameter + "=" + value + "&_count=" + str(limit) ) response = handle_request("GET", "", resource_url) if response[1] == 200: @@ -1028,17 +1317,36 @@ def export_resources_to_csv(resource_type, parameter, value, limit): try: if resources["entry"]: if resource_type == "Location": - elements = ["name", "status", "method", "id", "identifier", "parentName", "parentID", "type", - "typeCode", - "physicalType", "physicalTypeCode"] + elements = [ + "name", + "status", + "method", + "id", + "identifier", + "parentName", + "parentID", + "type", + "typeCode", + "physicalType", + "physicalTypeCode", + ] elif resource_type == "Organization": elements = ["name", "active", "method", "id", "identifier"] elif resource_type == "CareTeam": - elements = ["name", "status", "method", "id", "identifier", "organizations", "participants"] + elements = [ + "name", + "status", + "method", + "id", + "identifier", + "organizations", + "participants", + ] else: elements = [] - with click.progressbar(resources["entry"], - label='Progress:: Extracting resource') as extract_resources_progress: + with click.progressbar( + resources["entry"], label="Progress:: Extracting resource" + ) as extract_resources_progress: for x in extract_resources_progress: rl = [] orgs_list = [] @@ -1052,21 +1360,33 @@ def export_resources_to_csv(resource_type, parameter, value, limit): elif element == "identifier": value = x["resource"]["identifier"][0]["value"] elif element == "organizations": - organizations = x["resource"]["managingOrganization"] + organizations = x["resource"][ + "managingOrganization" + ] for index, value in enumerate(organizations): - reference = x["resource"]["managingOrganization"][index]["reference"] + reference = x["resource"][ + "managingOrganization" + ][index]["reference"] new_reference = reference.split("/", 1)[1] - display = x["resource"]["managingOrganization"][index]["display"] - organization = ":".join([new_reference, display]) + display = x["resource"]["managingOrganization"][ + index + ]["display"] + organization = ":".join( + [new_reference, display] + ) orgs_list.append(organization) string = "|".join(map(str, orgs_list)) value = string elif element == "participants": participants = x["resource"]["participant"] for index, value in enumerate(participants): - reference = x["resource"]["participant"][index]["member"]["reference"] + reference = x["resource"]["participant"][index][ + "member" + ]["reference"] new_reference = reference.split("/", 1)[1] - display = x["resource"]["participant"][index]["member"]["display"] + display = x["resource"]["participant"][index][ + "member" + ]["display"] participant = ":".join([new_reference, display]) participants_list.append(participant) string = "|".join(map(str, participants_list)) @@ -1077,13 +1397,21 @@ def export_resources_to_csv(resource_type, parameter, value, limit): reference = x["resource"]["partOf"]["reference"] value = reference.split("/", 1)[1] elif element == "type": - value = x["resource"]["type"][0]["coding"][0]["display"] + value = x["resource"]["type"][0]["coding"][0][ + "display" + ] elif element == "typeCode": - value = x["resource"]["type"][0]["coding"][0]["code"] + value = x["resource"]["type"][0]["coding"][0][ + "code" + ] elif element == "physicalType": - value = x["resource"]["physicalType"]["coding"][0]["display"] + value = x["resource"]["physicalType"]["coding"][0][ + "display" + ] elif element == "physicalTypeCode": - value = x["resource"]["physicalType"]["coding"][0]["code"] + value = x["resource"]["physicalType"]["coding"][0][ + "code" + ] else: value = x["resource"][element] except KeyError: @@ -1097,11 +1425,13 @@ def export_resources_to_csv(resource_type, parameter, value, limit): except KeyError: logging.info("No Resources Found") else: - logging.error(f"Failed to retrieve resource. Status code: {response[1]} response: {response[0]}") + logging.error( + f"Failed to retrieve resource. Status code: {response[1]} response: {response[0]}" + ) def encode_image(image_file): - with open(image_file, 'rb') as image: + with open(image_file, "rb") as image: image_b64_data = base64.b64encode(image.read()) return image_b64_data @@ -1110,34 +1440,43 @@ def encode_image(image_file): # and saves it as a Binary resource. It returns the id of the Binary resource if # successful and 0 if failed def save_image(image_source_url): - headers = {"Authorization": "Bearer " + config.product_access_token} + try: + headers = {"Authorization": "Bearer " + config.product_access_token} + except AttributeError: + headers = {} + data = requests.get(url=image_source_url, headers=headers) + if not os.path.exists("images"): + os.makedirs("images") + if data.status_code == 200: - with open('images/image_file', 'wb') as image_file: + with open("images/image_file", "wb") as image_file: image_file.write(data.content) # get file type mime = magic.Magic(mime=True) - file_type = mime.from_file('images/image_file') + file_type = mime.from_file("images/image_file") - encoded_image = encode_image('images/image_file') + encoded_image = encode_image("images/image_file") resource_id = str(uuid.uuid5(uuid.NAMESPACE_DNS, image_source_url)) payload = { "resourceType": "Bundle", "type": "transaction", - "entry": [{ - "request": { - "method": "PUT", - "url": "Binary/" + resource_id, - "ifMatch": "1" - }, - "resource": { - "resourceType": "Binary", - "id": resource_id, - "contentType": file_type, - "data": str(encoded_image) + "entry": [ + { + "request": { + "method": "PUT", + "url": "Binary/" + resource_id, + "ifMatch": "1", + }, + "resource": { + "resourceType": "Binary", + "id": resource_id, + "contentType": file_type, + "data": str(encoded_image), + }, } - }] + ], } payload_string = json.dumps(payload, indent=4) response = handle_request("POST", payload_string, get_base_url()) @@ -1168,23 +1507,17 @@ def filter(self, record): LOGGING = { - 'version': 1, - 'filters': { - 'custom-filter': { - '()': ResponseFilter, - 'param': 'final-response', + "version": 1, + "filters": { + "custom-filter": { + "()": ResponseFilter, + "param": "final-response", } }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'filters': ['custom-filter'] - } - }, - 'root': { - 'level': 'INFO', - 'handlers': ['console'] + "handlers": { + "console": {"class": "logging.StreamHandler", "filters": ["custom-filter"]} }, + "root": {"level": "INFO", "handlers": ["console"]}, } @@ -1198,21 +1531,41 @@ def filter(self, record): @click.option("--roles_max", required=False, default=500) @click.option("--cascade_delete", required=False, default=False) @click.option("--only_response", required=False) -@click.option("--log_level", type=click.Choice(["DEBUG", "INFO", "ERROR"], case_sensitive=False)) +@click.option( + "--log_level", type=click.Choice(["DEBUG", "INFO", "ERROR"], case_sensitive=False) +) @click.option("--export_resources", required=False) @click.option("--parameter", required=False, default="_lastUpdated") @click.option("--value", required=False, default="gt2023-01-01") @click.option("--limit", required=False, default=1000) def main( - csv_file, access_token, resource_type, assign, setup, group, roles_max, cascade_delete, only_response, log_level, - export_resources, parameter, value, limit + csv_file, + access_token, + resource_type, + assign, + setup, + group, + roles_max, + cascade_delete, + only_response, + log_level, + export_resources, + parameter, + value, + limit, ): if log_level == "DEBUG": - logging.basicConfig(filename='importer.log', encoding='utf-8', level=logging.DEBUG) + logging.basicConfig( + filename="importer.log", encoding="utf-8", level=logging.DEBUG + ) elif log_level == "INFO": - logging.basicConfig(filename='importer.log', encoding='utf-8', level=logging.INFO) + logging.basicConfig( + filename="importer.log", encoding="utf-8", level=logging.INFO + ) elif log_level == "ERROR": - logging.basicConfig(filename='importer.log', encoding='utf-8', level=logging.ERROR) + logging.basicConfig( + filename="importer.log", encoding="utf-8", level=logging.ERROR + ) logging.getLogger().addHandler(logging.StreamHandler()) if only_response: @@ -1239,7 +1592,9 @@ def main( if resource_list: if resource_type == "users": logging.info("Processing users") - with click.progressbar(resource_list, label="Progress:Processing users ") as process_user_progress: + with click.progressbar( + resource_list, label="Progress:Processing users " + ) as process_user_progress: for user in process_user_progress: user_id = create_user(user) if user_id == 0: @@ -1251,7 +1606,9 @@ def main( practitioner_exists = confirm_practitioner(user, user_id) if not practitioner_exists: payload = create_user_resources(user_id, user) - final_response = handle_request("POST", payload, config.fhir_base_url) + final_response = handle_request( + "POST", payload, config.fhir_base_url + ) logging.info("Processing complete!") elif resource_type == "locations": logging.info("Processing locations") @@ -1294,24 +1651,36 @@ def main( assign_group_roles(resource_list, group, roles_max) logging.info("Processing complete") elif setup == "clean_duplicates": - logging.info("=========================================") logging.info( "You are about to clean/delete Practitioner resources on the HAPI server" ) click.confirm("Do you want to continue?", abort=True) clean_duplicates(resource_list, cascade_delete) logging.info("Processing complete!") + elif setup == "products": + logging.info("Importing products as FHIR Group resources") + json_payload = build_payload( + "Group", resource_list, "json_payloads/product_group_payload.json" + ) + final_response = handle_request("POST", json_payload, config.fhir_base_url) + elif setup == "inventories": + logging.info("Importing inventories as FHIR Group resources") + json_payload = build_payload( + "Group", resource_list, "json_payloads/inventory_group_payload.json" + ) + final_response = handle_request("POST", json_payload, config.fhir_base_url) else: logging.error("Unsupported request!") else: logging.error("Empty csv file!") - logging.info("{ \"final-response\": " + final_response.text + "}") + logging.info('{ "final-response": ' + final_response.text + "}") end_time = datetime.now() logging.info("End time: " + end_time.strftime("%H:%M:%S")) total_time = end_time - start_time logging.info("Total time: " + str(total_time.total_seconds()) + " seconds") + if __name__ == "__main__": main() diff --git a/importer/test_main.py b/importer/test_main.py index ec3289b0..1bda7696 100644 --- a/importer/test_main.py +++ b/importer/test_main.py @@ -382,6 +382,68 @@ def test_build_payload_care_teams(self, mock_get_resource): } validate(payload_obj["entry"][0]["request"], request_schema) + @patch("main.save_image") + @patch("main.get_resource") + def test_build_payload_group(self, mock_get_resource, mock_save_image): + mock_get_resource.return_value = "1" + mock_save_image.return_value = "f374a23a-3c6a-4167-9970-b10c16a91bbd" + + csv_file = "csv/import/product.csv" + resource_list = read_csv(csv_file) + payload = build_payload( + "Group", resource_list, "json_payloads/product_group_payload.json") + payload_obj = json.loads(payload) + + self.assertIsInstance(payload_obj, dict) + self.assertEqual(payload_obj["resourceType"], "Bundle") + self.assertEqual(len(payload_obj["entry"]), 2) + + resource_schema_0 = { + "type": "object", + "properties": { + "resourceType": {"const": "Group"}, + "id": {"const": "1d86d0e2-bac8-4424-90ae-e2298900ac3c"}, + "identifier": {"type": "array", "items": {"type": "object"}}, + "active": {"const": "true"}, + "name": {"const": "thermometer"}, + "characteristic": { + "type": "array", + "minItems": 6, + "maxItems": 6 + } + }, + "required": ["resourceType", "id", "identifier", "active", "name"] + } + validate(payload_obj["entry"][0]["resource"], resource_schema_0) + + resource_schema_1 = { + "type": "object", + "properties": { + "resourceType": {"const": "Group"}, + "id": {"const": "334ec316-b44b-5678-b110-4d7ad6b1972f"}, + "identifier": {"type": "array", "items": {"type": "object"}}, + "active": {"const": "true"}, + "name": {"const": "sterilizer"}, + "characteristic": { + "type": "array", + "minItems": 2, + "maxItems": 2 + } + }, + "required": ["resourceType", "id", "identifier", "active", "name"] + } + validate(payload_obj["entry"][1]["resource"], resource_schema_1) + + request_schema = { + "type": "object", + "properties": { + "method": {"const": "PUT"}, + "url": {"const": "Group/1d86d0e2-bac8-4424-90ae-e2298900ac3c"}, + "ifMatch": {"const": "1"}, + }, + } + validate(payload_obj["entry"][0]["request"], request_schema) + def test_extract_matches(self): csv_file = "csv/organizations/organizations_locations.csv" resource_list = read_csv(csv_file)