diff --git a/importer/README.md b/importer/README.md index 782b5c46..b56989d5 100644 --- a/importer/README.md +++ b/importer/README.md @@ -35,6 +35,7 @@ and then posts them to the API for creation 4. Run script - `python3 main.py --csv_file csv/locations.csv --resource_type locations` 5. You can turn on logging by passing a `--log_level` to the command line as `info`, `debug` or `error`. For example `python3 main.py --csv_file csv/locations.csv --resource_type locations --log_level info` 6. There is a progress bar that shows the read_csv and build_payload progress as it is going on +7. You can get only the response from the api after the import is done by passing `--only_response true` See example csvs in the csv folder diff --git a/importer/main.py b/importer/main.py index 0fc8108a..b414cff3 100644 --- a/importer/main.py +++ b/importer/main.py @@ -4,6 +4,7 @@ import click import requests import logging +import logging.config import backoff from datetime import datetime from oauthlib.oauth2 import LegacyApplicationClient @@ -864,6 +865,39 @@ def clean_duplicates(users, cascade_delete): logging.info("No Practitioners found") +class ResponseFilter(logging.Filter): + def __init__(self, param=None): + self.param = param + + def filter(self, record): + if self.param is None: + allow = True + else: + allow = self.param in record.msg + return allow + + +LOGGING = { + 'version': 1, + 'filters': { + 'custom-filter': { + '()': ResponseFilter, + 'param': 'final-response', + } + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'filters': ['custom-filter'] + } + }, + 'root': { + 'level': 'INFO', + 'handlers': ['console'] + }, +} + + @click.command() @click.option("--csv_file", required=True) @click.option("--access_token", required=False) @@ -873,11 +907,12 @@ def clean_duplicates(users, cascade_delete): @click.option("--group", required=False) @click.option("--roles_max", required=False, default=500) @click.option("--cascade_delete", required=False, default=False) +@click.option("--only_response", required=False) @click.option( "--log_level", type=click.Choice(["DEBUG", "INFO", "ERROR"], case_sensitive=False) ) def main( - csv_file, access_token, resource_type, assign, setup, group, roles_max, cascade_delete, log_level + csv_file, access_token, resource_type, assign, setup, group, roles_max, cascade_delete, only_response, log_level ): if log_level == "DEBUG": logging.basicConfig(filename='importer.log', encoding='utf-8', level=logging.DEBUG) @@ -887,6 +922,9 @@ def main( logging.basicConfig(filename='importer.log', encoding='utf-8', level=logging.ERROR) logging.getLogger().addHandler(logging.StreamHandler()) + if only_response: + logging.config.dictConfig(LOGGING) + start_time = datetime.now() logging.info("Start time: " + start_time.strftime("%H:%M:%S")) @@ -895,6 +933,8 @@ def main( global global_access_token global_access_token = access_token + final_response = "" + logging.info("Starting csv import...") resource_list = read_csv(csv_file) if resource_list: @@ -912,14 +952,14 @@ def main( practitioner_exists = confirm_practitioner(user, user_id) if not practitioner_exists: payload = create_user_resources(user_id, user) - handle_request("POST", payload, config.fhir_base_url) + final_response = handle_request("POST", payload, config.fhir_base_url) logging.info("Processing complete!") elif resource_type == "locations": logging.info("Processing locations") json_payload = build_payload( "locations", resource_list, "json_payloads/locations_payload.json" ) - handle_request("POST", json_payload, config.fhir_base_url) + final_response = handle_request("POST", json_payload, config.fhir_base_url) logging.info("Processing complete!") elif resource_type == "organizations": logging.info("Processing organizations") @@ -928,32 +968,32 @@ def main( resource_list, "json_payloads/organizations_payload.json", ) - handle_request("POST", json_payload, config.fhir_base_url) + final_response = handle_request("POST", json_payload, config.fhir_base_url) logging.info("Processing complete!") elif resource_type == "careTeams": logging.info("Processing CareTeams") json_payload = build_payload( "careTeams", resource_list, "json_payloads/careteams_payload.json" ) - handle_request("POST", json_payload, config.fhir_base_url) + final_response = handle_request("POST", json_payload, config.fhir_base_url) logging.info("Processing complete!") elif assign == "organization-Location": logging.info("Assigning Organizations to Locations") matches = extract_matches(resource_list) json_payload = build_org_affiliation(matches, resource_list) - handle_request("POST", json_payload, config.fhir_base_url) + final_response = handle_request("POST", json_payload, config.fhir_base_url) logging.info("Processing complete!") elif assign == "careTeam-Organization": logging.info("Assigning CareTeam to Organization") matches = extract_matches(resource_list) json_payload = fetch_and_build(matches, "orgs") - handle_request("POST", json_payload, config.fhir_base_url) + final_response = handle_request("POST", json_payload, config.fhir_base_url) logging.info("Processing complete!") elif assign == "user-careTeam": logging.info("Assigning users to careTeam") matches = extract_matches(resource_list) json_payload = fetch_and_build(matches, "users") - handle_request("POST", json_payload, config.fhir_base_url) + final_response = handle_request("POST", json_payload, config.fhir_base_url) logging.info("Processing complete!") elif setup == "roles": logging.info("Setting up keycloak roles") @@ -974,6 +1014,8 @@ def main( else: logging.error("Empty csv file!") + logging.info("{ \"final-response\": " + final_response.text + "}") + end_time = datetime.now() logging.info("End time: " + end_time.strftime("%H:%M:%S")) total_time = end_time - start_time