Skip to content

Commit

Permalink
Merge branch 'main' into 199-validate-sm
Browse files Browse the repository at this point in the history
  • Loading branch information
sharon2719 authored Oct 16, 2024
2 parents 6cf6cef + 654fd0f commit bd37d38
Show file tree
Hide file tree
Showing 5 changed files with 154 additions and 65 deletions.
4 changes: 2 additions & 2 deletions importer/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Setup Keycloak Roles

This script is used to set up keycloak roles and groups. It takes in a CSV file with the following columns:
This script is used to set up keycloak roles and groups. It takes in a csv file with the following columns:

- **role**: The actual names of the roles you would like to create
- **composite**: A boolean value that tells if the role has composite roles or not
Expand Down Expand Up @@ -56,7 +56,7 @@ and then posts them to the API for creation
4. Run script - `python3 main.py --csv_file csv/locations.csv --resource_type locations`
5. You can turn on logging by passing a `--log_level` to the command line as `info`, `debug` or `error`. For example `python3 main.py --csv_file csv/locations.csv --resource_type locations --log_level info`
6. There is a progress bar that shows the read_csv and build_payload progress as it is going on
7. You can get only the response from the api after the import is done by passing `--only_response true`
7. You can get a nicely formatted report response from the api after the import is done by passing `--report_response true`.


See example csvs in the csv folder
Expand Down
59 changes: 59 additions & 0 deletions importer/importer/builder.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import base64
import csv
import json
import logging
import os
Expand Down Expand Up @@ -1065,3 +1066,61 @@ def link_to_location(resource_list):
return build_assign_payload(arr, "List", "subject=Location/")
else:
return ""


def count_records(csv_filepath):
with open(csv_filepath, newline="") as csvfile:
reader = csv.reader(csvfile)
return sum(1 for _ in reader) - 1


def process_response(response):
json_response = json.loads(response)
issues = json_response["issue"]
return issues


def build_report(csv_file, response, error_details, fail_count, fail_all):
# Get total number of records
total_records = count_records(csv_file)
issues = []

# Get status code
if hasattr(response, "status_code") and response.status_code > 201:
status = "Failed"
processed_records = 0

if response.text:
issues = process_response(response.text)
for issue in issues:
del issue["code"]
else:
if fail_count > 0:
status = "Failed"
if fail_all:
processed_records = total_records
else:
processed_records = total_records - fail_count
else:
status = "Completed"
processed_records = total_records

report = {
"status": status,
"totalRecords": total_records,
"processedRecords": processed_records,
}
if len(issues) > 0:
report["failedRecords"] = len(issues)

all_errors = issues + error_details

if len(all_errors) > 0:
report["errorDetails"] = all_errors

string_report = json.dumps(report, indent=4)
logging.info("============================================================")
logging.info("============================================================")
logging.info(string_report)
logging.info("============================================================")
logging.info("============================================================")
59 changes: 39 additions & 20 deletions importer/importer/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,11 @@ def create_user(user):
logging.info("Setting user password")
r = handle_request("PUT", payload, url)

return user_id
return user_id, {}
else:
logging.info(r.text)
return 0
obj = {"task": "Create user", "row": str(user), "error": r.text}
return 0, obj


# This function build the FHIR resources related to a
Expand Down Expand Up @@ -178,21 +179,23 @@ def confirm_keycloak_user(user):
try:
response_username = json_response[0]["username"]
except IndexError:
logging.error("Skipping user: " + str(user))
logging.error("Username not found!")
return 0
message = "Username not found! Skipping user: " + str(user[2])
logging.error(message)
obj = {"task": "Confirm Keycloak user", "row": str(user), "error": message}
return 0, obj

if response_username != user_username:
logging.error("Skipping user: " + str(user))
logging.error("Username does not match")
return 0
message = "Username does not match! Skipping user: " + str(user[2])
logging.error(message)
obj = {"task": "Confirm Keycloak user", "row": str(user), "error": message}
return 0, obj

if len(response_email) > 0 and response_email != user_email:
logging.error("Email does not match for user: " + str(user))
logging.error("Email does not match for user: " + str(user[2]))

keycloak_id = json_response[0]["id"]
logging.info("User confirmed with id: " + keycloak_id)
return keycloak_id
return keycloak_id, {}


def confirm_practitioner(user, user_id):
Expand All @@ -204,18 +207,24 @@ def confirm_practitioner(user, user_id):
json_r = json.loads(r[0])
counter = json_r["total"]
if counter > 0:
logging.info(
str(counter) + " Practitioner(s) exist, linked to the provided user"
message = (
"User "
+ str(user[2])
+ " is linked to "
+ str(counter)
+ " Practitioner(s)"
)
return True
logging.info(message)
obj = {"task": "Confirm Practitioner", "row": str(user), "error": message}
return True, obj
else:
return False
return False, {}

r = handle_request("GET", "", base_url + "/Practitioner/" + practitioner_uuid)

if r[1] == 404:
logging.info("Practitioner does not exist, proceed to creation")
return False
return False, {}
else:
try:
json_r = json.loads(r[0])
Expand All @@ -229,16 +238,26 @@ def confirm_practitioner(user, user_id):
logging.info(
"The Keycloak user and Practitioner are linked as expected"
)
return True
return True, {}
else:
logging.error(
message = (
"The Keycloak user and Practitioner are not linked as expected"
)
return True
logging.error(message)
obj = {
"task": "Confirm Practitioner",
"row": str(user),
"error": message,
}
return True, obj

except Exception as err:
logging.error("Error occurred trying to find Practitioner: " + str(err))
return True
message = (
"Error occurred trying to find Practitioner. The error is: " + str(err)
)
logging.error(message)
obj = {"task": "Confirm Practitioner", "row": str(user), "error": message}
return True, obj


def create_roles(role_list, roles_max):
Expand Down
89 changes: 50 additions & 39 deletions importer/main.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import json
import logging
import logging.config
import pathlib
from datetime import datetime

import click

from importer.builder import (build_assign_payload, build_group_list_resource,
build_org_affiliation, build_payload,
extract_matches, extract_resources, link_to_location)
build_report, extract_matches, extract_resources,
link_to_location)
from importer.config.settings import fhir_base_url
from importer.request import handle_request
from importer.users import (assign_default_groups_roles, assign_group_roles,
Expand All @@ -20,33 +21,6 @@
dir_path = str(pathlib.Path(__file__).parent.resolve())


class ResponseFilter(logging.Filter):
def __init__(self, param=None):
self.param = param

def filter(self, record):
if self.param is None:
allow = True
else:
allow = self.param in record.msg
return allow


LOGGING = {
"version": 1,
"filters": {
"custom-filter": {
"()": ResponseFilter,
"param": "final-response",
}
},
"handlers": {
"console": {"class": "logging.StreamHandler", "filters": ["custom-filter"]}
},
"root": {"level": "INFO", "handlers": ["console"]},
}


@click.command()
@click.option("--csv_file", required=False)
@click.option("--json_file", required=False)
Expand All @@ -57,7 +31,7 @@ def filter(self, record):
@click.option("--roles_max", required=False, default=500)
@click.option("--default_groups", required=False, default=True)
@click.option("--cascade_delete", required=False, default=False)
@click.option("--only_response", required=False)
@click.option("--report_response", required=False)
@click.option("--export_resources", required=False)
@click.option("--parameter", required=False, default="_lastUpdated")
@click.option("--value", required=False, default="gt2023-01-01")
Expand Down Expand Up @@ -90,7 +64,7 @@ def main(
roles_max,
default_groups,
cascade_delete,
only_response,
report_response,
log_level,
export_resources,
parameter,
Expand All @@ -117,11 +91,11 @@ def main(
)
logging.getLogger().addHandler(logging.StreamHandler())

if only_response:
logging.config.dictConfig(LOGGING)

start_time = datetime.now()
logging.info("Start time: " + start_time.strftime("%H:%M:%S"))
issues = []
fail_count = 0
fail_all = False

if export_resources == "True":
logging.info("Starting export...")
Expand Down Expand Up @@ -153,19 +127,30 @@ def main(
resource_list, label="Progress:Processing users "
) as process_user_progress:
for user in process_user_progress:
user_id = create_user(user)
user_id, create_issue = create_user(user)
if user_id == 0:
fail_count = fail_count + 1
if create_issue:
issues.append(create_issue)
# user was not created above, check if it already exists
user_id = confirm_keycloak_user(user)
user_id, confirm_issue = confirm_keycloak_user(user)
if confirm_issue:
issues.append(confirm_issue)
if user_id != 0:
# user_id has been retrieved
# check practitioner
practitioner_exists = confirm_practitioner(user, user_id)
practitioner_exists, practitioner_issue = confirm_practitioner(
user, user_id
)
if practitioner_issue:
issues.append(practitioner_issue)
if not practitioner_exists:
payload = create_user_resources(user_id, user)
final_response = handle_request(
"POST", payload, fhir_base_url
)
if final_response.status_code > 201:
issues.append(final_response.text)
logging.info("Processing complete!")
elif resource_type == "locations":
logging.info("Processing locations")
Expand Down Expand Up @@ -199,7 +184,6 @@ def main(
matches = extract_matches(resource_list)
json_payload = build_org_affiliation(matches, resource_list)
final_response = handle_request("POST", json_payload, fhir_base_url)
logging.info(final_response)
logging.info("Processing complete!")
elif assign == "users-organizations":
logging.info("Assigning practitioner to Organization")
Expand Down Expand Up @@ -244,7 +228,13 @@ def main(
final_response = handle_request("POST", "", fhir_base_url, list_payload)
logging.info("Processing complete!")
else:
logging.error(product_creation_response.text)
fail_count = fail_count + 1
fail_all = True
json_response = json.loads(product_creation_response.text)
for _ in json_response["issue"]:
del _["code"]
issues.append(_)
logging.error(json_response)
elif setup == "inventories":
logging.info("Importing inventories as FHIR Group resources")
json_payload = build_payload(
Expand All @@ -258,13 +248,28 @@ def main(
groups_created = extract_resources(
groups_created, inventory_creation_response.text
)
else:
fail_count = fail_count + 1
fail_all = True
json_response = json.loads(inventory_creation_response.text)
for _ in json_response["issue"]:
del _["code"]
issues.append(_)
logging.error(json_response)

lists_created = []
link_payload = link_to_location(resource_list)
if len(link_payload) > 0:
link_response = handle_request("POST", link_payload, fhir_base_url)
if link_response.status_code == 200:
lists_created = extract_resources(lists_created, link_response.text)
else:
fail_count = fail_count + 1
fail_all = True
json_response = json.loads(link_response.text)
for _ in json_response["issue"]:
del _["code"]
issues.append(_)
logging.info(link_response.text)

full_list_created_resources = groups_created + lists_created
Expand All @@ -278,6 +283,9 @@ def main(
final_response = handle_request("POST", "", fhir_base_url, list_payload)
logging.info("Processing complete!")
else:
message = "Unsupported request!"
fail_all = True
issues.append({"Error": message})
logging.error("Unsupported request!")
else:
logging.error("Empty csv file!")
Expand All @@ -290,6 +298,9 @@ def main(
total_time = end_time - start_time
logging.info("Total time: " + str(total_time.total_seconds()) + " seconds")

if report_response:
build_report(csv_file, final_response, issues, fail_count, fail_all)


if __name__ == "__main__":
main()
Loading

0 comments on commit bd37d38

Please sign in to comment.