Skip to content

Commit

Permalink
Print final response (#146)
Browse files Browse the repository at this point in the history
* Print final response

* Update readme
  • Loading branch information
Wambere authored Feb 27, 2024
1 parent 3f9c508 commit 174645a
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 8 deletions.
1 change: 1 addition & 0 deletions importer/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ and then posts them to the API for creation
4. Run script - `python3 main.py --csv_file csv/locations.csv --resource_type locations`
5. You can turn on logging by passing a `--log_level` to the command line as `info`, `debug` or `error`. For example `python3 main.py --csv_file csv/locations.csv --resource_type locations --log_level info`
6. There is a progress bar that shows the read_csv and build_payload progress as it is going on
7. You can get only the response from the api after the import is done by passing `--only_response true`


See example csvs in the csv folder
Expand Down
58 changes: 50 additions & 8 deletions importer/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import click
import requests
import logging
import logging.config
import backoff
from datetime import datetime
from oauthlib.oauth2 import LegacyApplicationClient
Expand Down Expand Up @@ -864,6 +865,39 @@ def clean_duplicates(users, cascade_delete):
logging.info("No Practitioners found")


class ResponseFilter(logging.Filter):
def __init__(self, param=None):
self.param = param

def filter(self, record):
if self.param is None:
allow = True
else:
allow = self.param in record.msg
return allow


LOGGING = {
'version': 1,
'filters': {
'custom-filter': {
'()': ResponseFilter,
'param': 'final-response',
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'filters': ['custom-filter']
}
},
'root': {
'level': 'INFO',
'handlers': ['console']
},
}


@click.command()
@click.option("--csv_file", required=True)
@click.option("--access_token", required=False)
Expand All @@ -873,11 +907,12 @@ def clean_duplicates(users, cascade_delete):
@click.option("--group", required=False)
@click.option("--roles_max", required=False, default=500)
@click.option("--cascade_delete", required=False, default=False)
@click.option("--only_response", required=False)
@click.option(
"--log_level", type=click.Choice(["DEBUG", "INFO", "ERROR"], case_sensitive=False)
)
def main(
csv_file, access_token, resource_type, assign, setup, group, roles_max, cascade_delete, log_level
csv_file, access_token, resource_type, assign, setup, group, roles_max, cascade_delete, only_response, log_level
):
if log_level == "DEBUG":
logging.basicConfig(filename='importer.log', encoding='utf-8', level=logging.DEBUG)
Expand All @@ -887,6 +922,9 @@ def main(
logging.basicConfig(filename='importer.log', encoding='utf-8', level=logging.ERROR)
logging.getLogger().addHandler(logging.StreamHandler())

if only_response:
logging.config.dictConfig(LOGGING)

start_time = datetime.now()
logging.info("Start time: " + start_time.strftime("%H:%M:%S"))

Expand All @@ -895,6 +933,8 @@ def main(
global global_access_token
global_access_token = access_token

final_response = ""

logging.info("Starting csv import...")
resource_list = read_csv(csv_file)
if resource_list:
Expand All @@ -912,14 +952,14 @@ def main(
practitioner_exists = confirm_practitioner(user, user_id)
if not practitioner_exists:
payload = create_user_resources(user_id, user)
handle_request("POST", payload, config.fhir_base_url)
final_response = handle_request("POST", payload, config.fhir_base_url)
logging.info("Processing complete!")
elif resource_type == "locations":
logging.info("Processing locations")
json_payload = build_payload(
"locations", resource_list, "json_payloads/locations_payload.json"
)
handle_request("POST", json_payload, config.fhir_base_url)
final_response = handle_request("POST", json_payload, config.fhir_base_url)
logging.info("Processing complete!")
elif resource_type == "organizations":
logging.info("Processing organizations")
Expand All @@ -928,32 +968,32 @@ def main(
resource_list,
"json_payloads/organizations_payload.json",
)
handle_request("POST", json_payload, config.fhir_base_url)
final_response = handle_request("POST", json_payload, config.fhir_base_url)
logging.info("Processing complete!")
elif resource_type == "careTeams":
logging.info("Processing CareTeams")
json_payload = build_payload(
"careTeams", resource_list, "json_payloads/careteams_payload.json"
)
handle_request("POST", json_payload, config.fhir_base_url)
final_response = handle_request("POST", json_payload, config.fhir_base_url)
logging.info("Processing complete!")
elif assign == "organization-Location":
logging.info("Assigning Organizations to Locations")
matches = extract_matches(resource_list)
json_payload = build_org_affiliation(matches, resource_list)
handle_request("POST", json_payload, config.fhir_base_url)
final_response = handle_request("POST", json_payload, config.fhir_base_url)
logging.info("Processing complete!")
elif assign == "careTeam-Organization":
logging.info("Assigning CareTeam to Organization")
matches = extract_matches(resource_list)
json_payload = fetch_and_build(matches, "orgs")
handle_request("POST", json_payload, config.fhir_base_url)
final_response = handle_request("POST", json_payload, config.fhir_base_url)
logging.info("Processing complete!")
elif assign == "user-careTeam":
logging.info("Assigning users to careTeam")
matches = extract_matches(resource_list)
json_payload = fetch_and_build(matches, "users")
handle_request("POST", json_payload, config.fhir_base_url)
final_response = handle_request("POST", json_payload, config.fhir_base_url)
logging.info("Processing complete!")
elif setup == "roles":
logging.info("Setting up keycloak roles")
Expand All @@ -974,6 +1014,8 @@ def main(
else:
logging.error("Empty csv file!")

logging.info("{ \"final-response\": " + final_response.text + "}")

end_time = datetime.now()
logging.info("End time: " + end_time.strftime("%H:%M:%S"))
total_time = end_time - start_time
Expand Down

0 comments on commit 174645a

Please sign in to comment.