From 76cced2e78fdf955a11b3e8ab1ea9d42f36b2cb5 Mon Sep 17 00:00:00 2001 From: Kyle Villegas <86266231+kylevillegas93@users.noreply.github.com> Date: Mon, 25 Nov 2024 11:07:52 -0500 Subject: [PATCH] SFR-2327: Removing S3Manager as an Ancestor of CoreProcess (#451) --- processes/core.py | 4 ++-- processes/file/covers.py | 7 ++++--- processes/file/fulfill_url_manifest.py | 8 +++++--- processes/ingest/chicago_isac.py | 7 ++++--- processes/ingest/doab.py | 7 ++++--- processes/ingest/loc.py | 6 ++++-- processes/ingest/met.py | 7 ++++--- processes/ingest/muse.py | 7 ++++--- processes/ingest/publisher_backlist.py | 3 ++- processes/ingest/u_of_m.py | 11 ++++++----- processes/ingest/u_of_sc.py | 7 ++++--- tests/unit/processes/file/test_cover_process.py | 4 ++-- .../file/test_fulfill_manifest_process.py | 9 +++------ .../processes/ingest/test_chicago_isac_process.py | 5 ++--- tests/unit/processes/ingest/test_doab_process.py | 15 +++------------ tests/unit/processes/ingest/test_loc_process.py | 12 ++---------- tests/unit/processes/ingest/test_met_process.py | 11 ++--------- tests/unit/processes/ingest/test_muse_process.py | 9 +++------ .../unit/processes/ingest/test_u_of_m_process.py | 9 +-------- 19 files changed, 61 insertions(+), 87 deletions(-) diff --git a/processes/core.py b/processes/core.py index 21723f84ab..6428415c76 100644 --- a/processes/core.py +++ b/processes/core.py @@ -1,4 +1,4 @@ -from managers import DBManager, S3Manager +from managers import DBManager from model import Record from logger import create_log @@ -7,7 +7,7 @@ logger = create_log(__name__) -class CoreProcess(DBManager, S3Manager): +class CoreProcess(DBManager): def __init__(self, process, customFile, ingestPeriod, singleRecord, batchSize=500): super(CoreProcess, self).__init__() self.process = process diff --git a/processes/file/covers.py b/processes/file/covers.py index a882051af1..6cf92b6a51 100644 --- a/processes/file/covers.py +++ b/processes/file/covers.py @@ -2,7 +2,7 @@ import os from ..core import CoreProcess -from managers import CoverManager, RedisManager +from managers import CoverManager, RedisManager, S3Manager from model import Edition, Link from model.postgres.edition import EDITION_LINKS from logger import create_log @@ -20,7 +20,8 @@ def __init__(self, *args): self.redis_manager = RedisManager() self.redis_manager.createRedisClient() - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() self.fileBucket = os.environ['FILE_BUCKET'] self.ingestLimit = None @@ -87,7 +88,7 @@ def storeFoundCover(self, manager, edition): manager.coverFormat.lower() ) - self.putObjectInBucket(manager.coverContent, coverPath, self.fileBucket) + self.s3_manager.putObjectInBucket(manager.coverContent, coverPath, self.fileBucket) coverLink = Link( url='https://{}.s3.amazonaws.com/{}'.format(self.fileBucket, coverPath), diff --git a/processes/file/fulfill_url_manifest.py b/processes/file/fulfill_url_manifest.py index 500680bf5f..69397f40f0 100644 --- a/processes/file/fulfill_url_manifest.py +++ b/processes/file/fulfill_url_manifest.py @@ -5,6 +5,7 @@ from ..core import CoreProcess from datetime import datetime, timedelta, timezone +from managers import S3Manager from model import Link from logger import create_log @@ -24,7 +25,8 @@ def __init__(self, *args): self.s3Bucket = os.environ['FILE_BUCKET'] self.host = os.environ['DRB_API_HOST'] self.prefix = 'manifests/UofM/' - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() def runProcess(self): if self.process == 'daily': @@ -40,7 +42,7 @@ def runProcess(self): def fetch_and_update_manifests(self, start_timestamp=None): - batches = self.load_batches(self.prefix, self.s3Bucket) + batches = self.s3_manager.load_batches(self.prefix, self.s3Bucket) if start_timestamp: #Using JMESPath to extract keys from the JSON batches filtered_batch_keys = batches.search(f"Contents[?to_string(LastModified) > '\"{start_timestamp}\"'].Key") @@ -88,7 +90,7 @@ def replace_manifest_object(self, metadata_json, metadata_json_copy, bucket_name if metadata_json != metadata_json_copy: try: fulfill_manifest = json.dumps(metadata_json, ensure_ascii = False) - return self.s3Client.put_object( + return self.s3_manager.s3Client.put_object( Bucket=bucket_name, Key=curr_key, Body=fulfill_manifest, diff --git a/processes/ingest/chicago_isac.py b/processes/ingest/chicago_isac.py index 671ebb0d22..e44b5b68d1 100644 --- a/processes/ingest/chicago_isac.py +++ b/processes/ingest/chicago_isac.py @@ -3,7 +3,7 @@ from ..core import CoreProcess from mappings.chicagoISAC import ChicagoISACMapping -from managers import WebpubManifest +from managers import S3Manager, WebpubManifest from logger import create_log logger = create_log(__name__) @@ -17,7 +17,8 @@ def __init__(self, *args): self.createSession() self.s3Bucket = os.environ['FILE_BUCKET'] - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() def runProcess(self): with open('ingestJSONFiles/chicagoISAC_metadata.json') as f: @@ -56,7 +57,7 @@ def store_pdf_manifest(self, record): manifest_json = self.generate_manifest(record, uri, manifest_url) - self.createManifestInS3(manifest_path, manifest_json) + self.s3_manager.createManifestInS3(manifest_path, manifest_json) link_string = '|'.join([ item_no, diff --git a/processes/ingest/doab.py b/processes/ingest/doab.py index 4622c27e13..7c6f27a6ba 100644 --- a/processes/ingest/doab.py +++ b/processes/ingest/doab.py @@ -9,7 +9,7 @@ from logger import create_log from mappings.doab import DOABMapping from mappings.base_mapping import MappingError -from managers import DOABLinkManager, RabbitMQManager +from managers import DOABLinkManager, RabbitMQManager, S3Manager from model import get_file_message @@ -35,7 +35,8 @@ def __init__(self, *args): self.generateEngine() self.createSession() - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() self.s3Bucket = os.environ['FILE_BUCKET'] self.fileQueue = os.environ['FILE_QUEUE'] @@ -75,7 +76,7 @@ def parseDOABRecord(self, oaiRec): for manifest in linkManager.manifests: manifestPath, manifestJSON = manifest - self.createManifestInS3(manifestPath, manifestJSON) + self.s3_manager.createManifestInS3(manifestPath, manifestJSON) for epubLink in linkManager.ePubLinks: ePubPath, ePubURI = epubLink diff --git a/processes/ingest/loc.py b/processes/ingest/loc.py index ad57944531..67649f8e23 100644 --- a/processes/ingest/loc.py +++ b/processes/ingest/loc.py @@ -5,7 +5,7 @@ from ..core import CoreProcess from mappings.base_mapping import MappingError from mappings.loc import LOCMapping -from managers import RabbitMQManager, WebpubManifest +from managers import RabbitMQManager, S3Manager, WebpubManifest from model import get_file_message from logger import create_log from datetime import datetime, timedelta, timezone @@ -28,6 +28,8 @@ def __init__(self, *args): self.generateEngine() self.createSession() + self.createS3Client() + self.s3_manager = S3Manager() self.createS3Client() self.s3Bucket = os.environ['FILE_BUCKET'] @@ -226,7 +228,7 @@ def storePDFManifest(self, record): manifestJSON = self.generateManifest(record, uri, manifestURI) - self.createManifestInS3(manifestPath, manifestJSON) + self.s3_manager.createManifestInS3(manifestPath, manifestJSON) linkString = '|'.join([ itemNo, diff --git a/processes/ingest/met.py b/processes/ingest/met.py index 83c2ea81ed..c7d255f716 100644 --- a/processes/ingest/met.py +++ b/processes/ingest/met.py @@ -7,7 +7,7 @@ from ..core import CoreProcess from mappings.base_mapping import MappingError from mappings.met import METMapping -from managers import RabbitMQManager, WebpubManifest +from managers import RabbitMQManager, S3Manager, WebpubManifest from model import get_file_message from logger import create_log @@ -42,7 +42,8 @@ def __init__(self, *args): self.rabbitmq_manager.createOrConnectQueue(self.fileQueue, self.fileRoute) self.s3Bucket = os.environ['FILE_BUCKET'] - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() def runProcess(self): self.setStartTime() @@ -173,7 +174,7 @@ def storePDFManifest(self, record): manifestJSON = self.generateManifest(record, uri, manifestURI) - self.createManifestInS3(manifestPath, manifestJSON) + self.s3_manager.createManifestInS3(manifestPath, manifestJSON) linkString = '|'.join([ itemNo, diff --git a/processes/ingest/muse.py b/processes/ingest/muse.py index ff1653f007..731d45b6cf 100644 --- a/processes/ingest/muse.py +++ b/processes/ingest/muse.py @@ -8,7 +8,7 @@ from ..core import CoreProcess from mappings.muse import MUSEMapping -from managers import MUSEError, MUSEManager, RabbitMQManager +from managers import MUSEError, MUSEManager, RabbitMQManager, S3Manager from model import get_file_message from logger import create_log @@ -27,7 +27,8 @@ def __init__(self, *args): self.generateEngine() self.createSession() - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() self.fileQueue = os.environ['FILE_QUEUE'] self.fileRoute = os.environ['FILE_ROUTING_KEY'] @@ -70,7 +71,7 @@ def parseMuseRecord(self, marcRec): museManager.addReadableLinks() if museManager.pdfWebpubManifest: - self.putObjectInBucket( + self.s3_manager.putObjectInBucket( museManager.pdfWebpubManifest.toJson().encode('utf-8'), museManager.s3PDFReadPath, museManager.s3Bucket diff --git a/processes/ingest/publisher_backlist.py b/processes/ingest/publisher_backlist.py index 713dcb7850..a2ff9656b1 100644 --- a/processes/ingest/publisher_backlist.py +++ b/processes/ingest/publisher_backlist.py @@ -3,6 +3,7 @@ from ..core import CoreProcess from logger import create_log +from managers import S3Manager logger = create_log(__name__) @@ -14,7 +15,7 @@ def __init__(self, *args): self.offset = (len(args) >= 6 and args[5]) or None self.s3_bucket = os.environ['FILE_BUCKET'] - self.createS3Client() + self.s3_manager = S3Manager() self.publisher_backlist_service = PublisherBacklistService() diff --git a/processes/ingest/u_of_m.py b/processes/ingest/u_of_m.py index 0cc0f3796d..968f596588 100644 --- a/processes/ingest/u_of_m.py +++ b/processes/ingest/u_of_m.py @@ -7,7 +7,7 @@ from urllib.error import HTTPError from mappings.base_mapping import MappingError from mappings.UofM import UofMMapping -from managers import WebpubManifest +from managers import S3Manager, WebpubManifest from logger import create_log logger = create_log(__name__) @@ -25,7 +25,8 @@ def __init__(self, *args): self.createSession() self.s3Bucket = os.environ['FILE_BUCKET'] - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() def runProcess(self): with open('ingestJSONFiles/UofM_Updated_CSV.json') as f: @@ -57,7 +58,7 @@ def addHasPartMapping(self, resultsRecord, record): try: #The get_object method is to make sure the object with a specific bucket and key exists in S3 - self.s3Client.get_object(Bucket=bucket, + self.s3_manager.s3Client.get_object(Bucket=bucket, Key=f'{resultsRecord["File ID 1"]}_060pct.pdf') key = f'{resultsRecord["File ID 1"]}_060pct.pdf' urlPDFObject = f'https://{bucket}.s3.amazonaws.com/{key}' @@ -91,7 +92,7 @@ def addHasPartMapping(self, resultsRecord, record): if not record.has_part: try: #The get_object method is to make sure the object with a specific bucket and key exists in S3 - self.s3Client.get_object(Bucket= 'ump-pdf-repository', + self.s3_manager.s3Client.get_object(Bucket= 'ump-pdf-repository', Key= f'{resultsRecord["File ID 1"]}_100pct.pdf') key = f'{resultsRecord["File ID 1"]}_100pct.pdf' urlPDFObject = f'https://{bucket}.s3.amazonaws.com/{key}' @@ -138,7 +139,7 @@ def storePDFManifest(self, record): manifestJSON = self.generateManifest(record, uri, manifestURI) - self.createManifestInS3(manifestPath, manifestJSON) + self.s3_manager.createManifestInS3(manifestPath, manifestJSON) if 'in_copyright' in record.rights: linkString = '|'.join([ diff --git a/processes/ingest/u_of_sc.py b/processes/ingest/u_of_sc.py index abb8122f50..c9bcaf0aa1 100644 --- a/processes/ingest/u_of_sc.py +++ b/processes/ingest/u_of_sc.py @@ -5,7 +5,7 @@ from ..core import CoreProcess from mappings.base_mapping import MappingError from mappings.UofSC import UofSCMapping -from managers import WebpubManifest +from managers import S3Manager, WebpubManifest from logger import create_log logger = create_log(__name__) @@ -23,7 +23,8 @@ def __init__(self, *args): self.createSession() self.s3Bucket = os.environ['FILE_BUCKET'] - self.createS3Client() + self.s3_manager = S3Manager() + self.s3_manager.createS3Client() def runProcess(self): with open('UofSC_metadata.json') as f: @@ -61,7 +62,7 @@ def storePDFManifest(self, record): manifestJSON = self.generateManifest(record, uri, manifestURI) - self.createManifestInS3(manifestPath, manifestJSON) + self.s3_manager.createManifestInS3(manifestPath, manifestJSON) linkString = '|'.join([ itemNo, diff --git a/tests/unit/processes/file/test_cover_process.py b/tests/unit/processes/file/test_cover_process.py index 2540208fe8..0a9f3b3622 100644 --- a/tests/unit/processes/file/test_cover_process.py +++ b/tests/unit/processes/file/test_cover_process.py @@ -15,6 +15,7 @@ def __init__(self, *args): self.batchSize = 3 self.runTime = datetime(1900, 1, 1) self.redis_manager = mocker.MagicMock() + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) return TestCoverProcess() @@ -182,7 +183,6 @@ def test_getEditionIdentifiers(self, testProcess, mocker): ]) def test_storeFoundCover(self, testProcess, mocker): - mockPut = mocker.patch.object(CoverProcess, 'putObjectInBucket') mockSave = mocker.patch.object(CoverProcess, 'bulkSaveObjects') mockFetcher = mocker.MagicMock(SOURCE='test', coverID=1) @@ -197,4 +197,4 @@ def test_storeFoundCover(self, testProcess, mocker): assert mockEdition.links[0].media_type == 'image/tst' assert mockEdition.links[0].flags == {'cover': True} assert mockSave.call_args[0][0] == set(['ed1', 'ed2', mockEdition]) - mockPut.assert_called_once_with('testBytes', 'covers/test/1.tst', 'test_aws_bucket') + testProcess.s3_manager.putObjectInBucket.assert_called_once_with('testBytes', 'covers/test/1.tst', 'test_aws_bucket') diff --git a/tests/unit/processes/file/test_fulfill_manifest_process.py b/tests/unit/processes/file/test_fulfill_manifest_process.py index 1f056fdeb4..c9f8965820 100644 --- a/tests/unit/processes/file/test_fulfill_manifest_process.py +++ b/tests/unit/processes/file/test_fulfill_manifest_process.py @@ -17,7 +17,7 @@ def test_process(self, mocker): class TestFulfill(FulfillURLManifestProcess): def __init__(self): self.s3Bucket = 'test_aws_bucket' - self.s3Client = mocker.MagicMock(s3Client='testS3Client') + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) self.session = mocker.MagicMock(session='testSession') self.records = mocker.MagicMock(record='testRecord') self.batchSize = mocker.MagicMock(batchSize='testBatchSize') @@ -41,14 +41,11 @@ def test_runProcess(self, test_process, mocker): def test_fetch_and_update_manifests(self, test_process, mocker): - process_mocks = mocker.patch.multiple(FulfillURLManifestProcess, - load_batches=mocker.DEFAULT, - update_metadata_object=mocker.DEFAULT - ) + mocker.patch.multiple(FulfillURLManifestProcess, update_metadata_object=mocker.DEFAULT) mock_timestamp = mocker.MagicMock(time_stamp='test_timestamp') test_process.fetch_and_update_manifests(mock_timestamp) - process_mocks['load_batches'].assert_called_once_with('testPrefix','test_aws_bucket') + test_process.s3_manager.load_batches.assert_called_once_with('testPrefix','test_aws_bucket') \ No newline at end of file diff --git a/tests/unit/processes/ingest/test_chicago_isac_process.py b/tests/unit/processes/ingest/test_chicago_isac_process.py index 2694e6149d..5cd5d05446 100644 --- a/tests/unit/processes/ingest/test_chicago_isac_process.py +++ b/tests/unit/processes/ingest/test_chicago_isac_process.py @@ -18,7 +18,7 @@ def test_process(self, mocker): class TestISAC(ChicagoISACProcess): def __init__(self): self.s3Bucket = 'test_aws_bucket' - self.s3_client = mocker.MagicMock(s3_client='test_s3_client') + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) self.session = mocker.MagicMock(session='test_session') self.records = mocker.MagicMock(record='test_record') self.batch_size = mocker.MagicMock(batch_size='test_batch_size') @@ -71,7 +71,6 @@ def test_store_pdf_manifest(self, test_process, mocker): mock_generate_man = mocker.patch.object(ChicagoISACProcess, 'generate_manifest') mock_generate_man.return_value = 'test_json' - mock_create_man = mocker.patch.object(ChicagoISACProcess, 'createManifestInS3') test_process.store_pdf_manifest(mock_record) @@ -79,7 +78,7 @@ def test_store_pdf_manifest(self, test_process, mocker): assert mock_record.has_part[0] == '1|{}|isac|application/webpub+json|{{}}'.format(test_manifest_url) mock_generate_man.assert_called_once_with(mock_record, 'test_url', test_manifest_url) - mock_create_man.assert_called_once_with('manifests/isac/1.json', 'test_json') + test_process.s3_manager.createManifestInS3.assert_called_once_with('manifests/isac/1.json', 'test_json') def test_generate_manifest(self, mocker): mock_manifest = mocker.MagicMock(links=[]) diff --git a/tests/unit/processes/ingest/test_doab_process.py b/tests/unit/processes/ingest/test_doab_process.py index 683e9134bd..a2a30e0283 100644 --- a/tests/unit/processes/ingest/test_doab_process.py +++ b/tests/unit/processes/ingest/test_doab_process.py @@ -25,6 +25,7 @@ def testProcess(self, mocker): class TestDOAB(DOABProcess): def __init__(self): self.s3Bucket = 'test_aws_bucket' + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) self.fileQueue = 'test_file_queue' self.fileRoute = 'test_file_key' self.constants = {} @@ -281,17 +282,14 @@ def test_parseDOABRecord_success(self, testProcess, mocker): mockLinkManager = mocker.patch('processes.ingest.doab.DOABLinkManager') mockLinkManager.return_value = mockManager - processMocks = mocker.patch.multiple(DOABProcess, - createManifestInS3=mocker.DEFAULT, - addDCDWToUpdateList=mocker.DEFAULT - ) + processMocks = mocker.patch.multiple(DOABProcess, addDCDWToUpdateList=mocker.DEFAULT) testProcess.parseDOABRecord('testMARC') mockMapper.assert_called_once_with('testMARC', testProcess.OAI_NAMESPACES, {}) mockMapping.applyMapping.assert_called_once() mockManager.parseLinks.assert_called_once() - processMocks['createManifestInS3'].assert_called_once_with('pdfPath', 'pdfJSON') + testProcess.s3_manager.createManifestInS3.assert_called_once_with('pdfPath', 'pdfJSON') testProcess.rabbitmq_manager.sendMessageToQueue.assert_called_once_with( testProcess.fileQueue, testProcess.fileRoute, @@ -307,10 +305,3 @@ def test_parseDOABRecord_error(self, testProcess, mocker): with pytest.raises(DOABError): testProcess.parseDOABRecord('testMARC') - - def test_createManifestInS3(self, testProcess, mocker): - mockPut = mocker.patch.object(DOABProcess, 'putObjectInBucket') - - testProcess.createManifestInS3('testPath', 'testManifest') - - mockPut.assert_called_once_with(b'testManifest', 'testPath', 'test_aws_bucket') diff --git a/tests/unit/processes/ingest/test_loc_process.py b/tests/unit/processes/ingest/test_loc_process.py index 3d7b714ca0..c48e891dac 100644 --- a/tests/unit/processes/ingest/test_loc_process.py +++ b/tests/unit/processes/ingest/test_loc_process.py @@ -19,7 +19,7 @@ def testProcess(self, mocker): class TestLOC(LOCProcess): def __init__(self): self.s3Bucket = 'test_aws_bucket' - self.s3Client = mocker.MagicMock(s3Client='testS3Client') + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) self.session = mocker.MagicMock(session='testSession') self.records = mocker.MagicMock(record='testRecord') self.batchSize = mocker.MagicMock(batchSize='testBatchSize') @@ -92,7 +92,6 @@ def test_storePDFManifest(self, testProcess, mocker): mockGenerateMan = mocker.patch.object(LOCProcess, 'generateManifest') mockGenerateMan.return_value = 'testJSON' - mockCreateMan = mocker.patch.object(LOCProcess, 'createManifestInS3') testProcess.storePDFManifest(mockRecord) @@ -100,7 +99,7 @@ def test_storePDFManifest(self, testProcess, mocker): assert mockRecord.has_part[0] == '1|{}|loc|application/webpub+json|{{"catalog": false, "download": false, "reader": true, "embed": false}}'.format(testManifestURI) mockGenerateMan.assert_called_once_with(mockRecord, 'testURI', testManifestURI) - mockCreateMan.assert_called_once_with('manifests/loc/1.json', 'testJSON') + testProcess.s3_manager.createManifestInS3.assert_called_once_with('manifests/loc/1.json', 'testJSON') def test_storeEpubsInS3(self, testProcess, mocker): mockRecord = mocker.MagicMock(identifiers=['1|loc']) @@ -122,13 +121,6 @@ def test_storeEpubsInS3(self, testProcess, mocker): mocker.call(mockRecord, '1', 'loc', '{"reader": false, "catalog": false, "download": true}', 'application/epub+zip', 'epubs/loc/1.epub'), ]) - def test_createManifestInS3(self, testProcess, mocker): - mockPut = mocker.patch.object(LOCProcess, 'putObjectInBucket') - - testProcess.createManifestInS3('testPath', '{"data": "testJSON"}') - - mockPut.assert_called_once_with(b'{"data": "testJSON"}', 'testPath', 'test_aws_bucket') - def test_addEPUBManifest(self, testProcess, mocker): mockRecord = mocker.MagicMock(has_part=[]) diff --git a/tests/unit/processes/ingest/test_met_process.py b/tests/unit/processes/ingest/test_met_process.py index 96a37b78b2..79724ed344 100644 --- a/tests/unit/processes/ingest/test_met_process.py +++ b/tests/unit/processes/ingest/test_met_process.py @@ -20,6 +20,7 @@ def teardown_class(cls): def testProcess(self, mocker): class TestMET(METProcess): def __init__(self): + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) self.s3Bucket = 'test_aws_bucket' self.fileQueue = 'test_file_queue' self.fileRoute = 'test_file_key' @@ -184,7 +185,6 @@ def test_storePDFManifest(self, testProcess, mocker): mockGenerateMan = mocker.patch.object(METProcess, 'generateManifest') mockGenerateMan.return_value = 'testJSON' - mockCreateMan = mocker.patch.object(METProcess, 'createManifestInS3') testProcess.storePDFManifest(mockRecord) @@ -192,14 +192,7 @@ def test_storePDFManifest(self, testProcess, mocker): assert mockRecord.has_part[0] == '1|{}|met|application/webpub+json|{{}}'.format(testManifestURI) mockGenerateMan.assert_called_once_with(mockRecord, 'testURI', testManifestURI) - mockCreateMan.assert_called_once_with('manifests/met/1.json', 'testJSON') - - def test_createManifestInS3(self, testProcess, mocker): - mockPut = mocker.patch.object(METProcess, 'putObjectInBucket') - - testProcess.createManifestInS3('testPath', '{"data": "testJSON"}') - - mockPut.assert_called_once_with(b'{"data": "testJSON"}', 'testPath', 'test_aws_bucket') + testProcess.s3_manager.createManifestInS3.assert_called_once_with('manifests/met/1.json', 'testJSON') def test_generateManifest(self, mocker): mockManifest = mocker.MagicMock(links=[]) diff --git a/tests/unit/processes/ingest/test_muse_process.py b/tests/unit/processes/ingest/test_muse_process.py index 0d1ddda31c..0ed03495ec 100644 --- a/tests/unit/processes/ingest/test_muse_process.py +++ b/tests/unit/processes/ingest/test_muse_process.py @@ -23,6 +23,7 @@ def testProcess(self, mocker): class TestMUSE(MUSEProcess): def __init__(self): self.s3Bucket = 'test_aws_bucket' + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) self.records = [] self.ingest_limit = None self.fileQueue = 'fileQueue' @@ -258,11 +259,7 @@ def test_parseMuseRecord(self, testProcess, mocker): mockManagerInit = mocker.patch('processes.ingest.muse.MUSEManager') mockManagerInit.return_value = mockManager - processMocks = mocker.patch.multiple( - MUSEProcess, - putObjectInBucket=mocker.DEFAULT, - addDCDWToUpdateList=mocker.DEFAULT - ) + processMocks = mocker.patch.multiple(MUSEProcess, addDCDWToUpdateList=mocker.DEFAULT) testProcess.parseMuseRecord('testMARC') @@ -273,7 +270,7 @@ def test_parseMuseRecord(self, testProcess, mocker): mockManager.identifyReadableVersions.assert_called_once() mockManager.addReadableLinks.assert_called_once() - processMocks['putObjectInBucket'].assert_called_once_with( + testProcess.s3_manager.putObjectInBucket.assert_called_once_with( b'testManifest', 'testPDFPath', 'testBucket' ) testProcess.rabbitmq_manager.sendMessageToQueue.assert_called_once_with( diff --git a/tests/unit/processes/ingest/test_u_of_m_process.py b/tests/unit/processes/ingest/test_u_of_m_process.py index 2fdfdf7982..f0dba8978a 100644 --- a/tests/unit/processes/ingest/test_u_of_m_process.py +++ b/tests/unit/processes/ingest/test_u_of_m_process.py @@ -18,7 +18,7 @@ def testProcess(self, mocker): class TestUofM(UofMProcess): def __init__(self): self.s3Bucket = 'test_aws_bucket' - self.s3Client = mocker.MagicMock(s3Client='testS3Client') + self.s3_manager = mocker.MagicMock(s3Client=mocker.MagicMock()) self.session = mocker.MagicMock(session='testSession') self.records = mocker.MagicMock(record='testRecord') self.batchSize = mocker.MagicMock(batchSize='testBatchSize') @@ -83,13 +83,6 @@ def test_processUofMRecord_error(self, mocker): # mockGenerateMan.assert_called_once_with(mockRecord, 'testURI', testManifestURI) # mockCreateMan.assert_called_once_with('manifests/UofM/1.json', 'testJSON') - def test_createManifestInS3(self, testProcess, mocker): - mockPut = mocker.patch.object(UofMProcess, 'putObjectInBucket') - - testProcess.createManifestInS3('testPath', '{"data": "testJSON"}') - - mockPut.assert_called_once_with(b'{"data": "testJSON"}', 'testPath', 'test_aws_bucket') - def test_generateManifest(self, mocker): mockManifest = mocker.MagicMock(links=[]) mockManifest.toJson.return_value = 'testJSON'