Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SFR-2306: Integrating with Airtable to get backlist records #449

Merged
merged 6 commits into from
Nov 22, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 29 additions & 12 deletions processes/ingest/publisher_backlist.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from ..util import airtable_integration
from services import PublisherBacklistService

from ..core import CoreProcess
from logger import create_log
Expand All @@ -10,24 +10,41 @@ class PublisherBacklistProcess(CoreProcess):
def __init__(self, *args):
super(PublisherBacklistProcess, self).__init__(*args[:4])

self.ingest_offset = int(args[5] or 0)
self.ingest_limit = (int(args[4]) + self.ingestOffset) if args[4] else 5000
self.full_import = self.process == 'complete'

self.generateEngine()
self.createSession()
self.limit = (len(args) >= 5 and args[4] and args(4) <= 100) or None
self.offset = (len(args) >= 6 and args[5]) or None

self.s3_bucket = os.environ['FILE_BUCKET']
self.createS3Client()

self.publisher_backlist_service = PublisherBacklistService()

def runProcess(self):
try:

response = airtable_integration.create_airtable_request()
self.generateEngine()
self.createSession()

if self.process == 'daily':
records = self.publisher_backlist_service.get_records(offset=self.offset, limit=self.limit)
elif self.process == 'complete':
records = self.publisher_backlist_service.get_records(full_import=True)
elif self.process == 'custom':
records = self.publisher_backlist_service.get_records(start_timestamp=self.ingestPeriod, offset=self.offset, limit=self.limit)
else:
logger.warning(f'Unknown Publisher Backlist ingestion process type {self.process}')
return

for record in records:
self.addDCDWToUpdateList(record)

print(response)
self.saveRecords()
self.commitChanges()

logger.info(f'Ingested {len(self.records)} Publisher Backlist records')

except Exception as e:
logger.exception('Failed to run Pub Backlist process')
raise e
logger.exception('Failed to run Publisher Backlist process')
raise e
finally:
self.close_connection()


mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
17 changes: 0 additions & 17 deletions processes/util/airtable_integration.py

This file was deleted.

1 change: 1 addition & 0 deletions services/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
from .sources.nypl_bib_service import NYPLBibService
from .sources.publisher_backlist_service import PublisherBacklistService
92 changes: 92 additions & 0 deletions services/sources/publisher_backlist_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
from datetime import datetime, timedelta, timezone
import os
import requests
import json
import urllib.parse
from typing import Optional

from logger import create_log
from mappings.UofM import UofMMapping
from .source_service import SourceService


logger = create_log(__name__)

BASE_URL = "https://api.airtable.com/v0/appBoLf4lMofecGPU/Publisher%20Backlists%20%26%20Collections%20%F0%9F%93%96?view=UofMichigan%20Press"
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Based on our conversation on Wednesday - do we want to update this to look at the all lists view?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm fine with changing the view to All lists view since we clarified which view we will be using for the process in the future.

class PublisherBacklistService(SourceService):
def __init__(self):
self.airtable_auth_token = os.environ.get('AIRTABLE_KEY', None)

pass
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved

def get_records(
self,
full_import: bool=False,
start_timestamp: datetime=None,
offset: Optional[int]=None,
limit: Optional[int]=None
) -> list[UofMMapping]:

mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
array_json_records = self.get_records_json(full_import, start_timestamp, offset, limit)

array_index = 0
while array_index != len(array_json_records):
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
for dict_index in range(0, len(array_json_records[array_index]['records'])):
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
meta_dict = array_json_records[array_index]['records'][dict_index]
raise Exception
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
array_index += 1

def get_records_json(self, full_import=False, start_timestamp=None, offset=None, limit=None):

if offset == None:
limit = 100

headers = {"Authorization": f"Bearer {self.airtable_auth_token}"}
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved

if not full_import:
if start_timestamp:
start_date_time_str = start_date_time.strftime("%Y-%m-%d %H:%M:%S.%f")
start_date_time_encoded = urllib.parse.quote(start_date_time_str)
filter_by_formula = f"OR(IS_SAME(%7BLast%20Modified%7D,%20%22{start_date_time_encoded}%22,%20%22second%22),%20IS_AFTER(%7BLast%20Modified%7D,%20%22{start_date_time_encoded}%22))"

mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
response = requests.get(f'{BASE_URL}&filterByFormula={filter_by_formula}&pageSize={limit}', headers=headers)
response_json = response.json()
array_json = [response_json]

while 'offset' in response_json:
offset = response_json['offset']
response = requests.get(f'{BASE_URL}&filterByFormula={filter_by_formula}&pageSize={limit}&offset={offset}', headers=headers)
response_json = response.json()
array_json.append(response_json)

return array_json
else:
start_date_time = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(hours=24)
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
start_date_time_str = start_date_time.strftime("%Y-%m-%d %H:%M:%S.%f")
start_date_time_encoded = urllib.parse.quote(start_date_time_str)
filter_by_formula = f"OR(IS_SAME(%7BLast%20Modified%7D,%20%22{start_date_time_encoded}%22,%20%22second%22),%20IS_AFTER(%7BLast%20Modified%7D,%20%22{start_date_time_encoded}%22))"

response = requests.get(f'{BASE_URL}&filterByFormula={filter_by_formula}&pageSize={limit}', headers=headers)
response_json = response.json()
array_json = [response_json]

while 'offset' in response_json:
offset = response_json['offset']
response = requests.get(f'{BASE_URL}&filterByFormula={filter_by_formula}&pageSize={limit}&offset={offset}', headers=headers)
response_json = response.json()
array_json.append(response_json)

return array_json
else:
mitri-slory marked this conversation as resolved.
Show resolved Hide resolved
response = requests.get(f'{BASE_URL}&pageSize={limit}', headers=headers)
response_json = response.json()
array_json = [response_json]

while 'offset' in response_json:
offset = response_json['offset']
response = requests.get(f'{BASE_URL}&pageSize={limit}&offset={offset}', headers=headers)
response_json = response.json()
array_json.append(response_json)

return array_json

Loading