Skip to content

Commit

Permalink
Merge branch 'develop' into feature/QPPSE-2005-Upload_New_Entity_IDs
Browse files Browse the repository at this point in the history
  • Loading branch information
sivaksb authored Oct 28, 2024
2 parents 9a5a483 + 2138943 commit 71c17ac
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 19 deletions.
6 changes: 3 additions & 3 deletions tools/scripts/format-participation-file.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
def main(argv):
# Open the workbook and select the second sheet
wb = load_workbook(filename = argv[0])
sh = wb['2023_Providers']
sh = wb['2024_Providers']
data_list = []
for row in sh.iter_rows(sh.min_row+1, sh.max_row):
data = OrderedDict()
data['npi'] = row[12].value
data['tin'] = row[14].value
data['npi'] = row[10].value
data['tin'] = row[11].value
data['apm_entity_id'] = row[0].value
data_list.append(data)
j = json.dumps(data_list)
Expand Down
40 changes: 24 additions & 16 deletions tools/scripts/retrieve-fms-file.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
#!/usr/bin/env python3

import os
import sys
import boto3
import argparse
import requests
import urllib.request
import simplejson as json
from io import BytesIO
from dotenv import dotenv_values
from openpyxl import load_workbook

config = dotenv_values("local.env")
config = dotenv_values("../../local.env")
s3_client = boto3.client('s3')
pcf_filename = "pcf_apm_entity_ids.json"

Expand All @@ -29,7 +31,7 @@ def get_user_inputs():
return args


def download_from_fms(auth_url, fms_url, fms_token, fms_path):
def download_from_fms(auth_url, fms_url, fms_token, fms_path, filename):
d = {'client_assertion': fms_token,
'client_assertion_type': 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer',
'grant_type': 'client_credentials',
Expand All @@ -45,35 +47,33 @@ def download_from_fms(auth_url, fms_url, fms_token, fms_path):
}
)
s2s_token = get_s2s_token.json()["data"]["token"]
# print('starting download from fms for file - ' + fms_path)
# print('starting download from fms for file - ' + filename)
get_download_url = requests.post(
url=fms_url + '/get-file',
json={"path": fms_path},
json={"path": fms_path + filename},
verify=False,
headers={
'Accept': 'application/vnd.qpp.cms.gov.v2+json',
'Authorization': 'Bearer ' + s2s_token
}
)
download_url = get_download_url.json()['presigned_url']
download_result = requests.get(url=download_url)
return download_result
urllib.request.urlretrieve(download_url, filename)


def process_file(download_result):
def process_file(filename):
print('processing file')
file_object = BytesIO(download_result.content)
wb = load_workbook(file_object)
sh = wb['2023_Practices']
wb = load_workbook(filename)
sh = wb['2024_Practices']
data_list = []
for row in sh.iter_rows(sh.min_row + 1, sh.max_row):
data_list.append(row[0].value)
json_data = json.dumps(data_list)
return str(json_data).replace(" ", "")

def update_local_repo(data):
# print('writing ' + pcf_filename + ' to local repository')
with open('./converter/src/main/resources/' + pcf_filename, 'w') as f:
# print('writing ' + pcf_filename + ' to local repository')
with open('../../converter/src/main/resources/' + pcf_filename, 'w') as f:
f.write(data)


Expand All @@ -88,16 +88,24 @@ def upload_to_s3(data):
)
print(upload_status)

def delete_file(filename):
if os.path.exists("./" + filename):
os.remove("./" + filename)
print("File " + filename + " has been processed and removed successfully!")
else:
print("Can not process or delete file " + filename + ", as it doesn't exists")

def main():
try:
# args = get_user_inputs()
# s3_url = download_from_fms(args.auth_url, args.fms_url, args.fms_token, args.fms_path)
download_result = download_from_fms(config.get('auth_url'), config.get('fms_url'), config.get('fms_token'),
config.get('fms_path'))
processed_data = process_file(download_result)
filename = config.get('filename')
download_from_fms(config.get('auth_url'), config.get('fms_url'), config.get('fms_token'),
config.get('fms_path'), filename)
processed_data = process_file(filename)
update_local_repo(processed_data)
upload_to_s3(processed_data)
# upload_to_s3(processed_data)
delete_file(filename)
except Exception as err:
print(f"Unexpected Error. {err = }, {type(err) = }")
sys.exit(1)
Expand Down

0 comments on commit 71c17ac

Please sign in to comment.