From b6e37326ee408de160027300f153d772bdf75090 Mon Sep 17 00:00:00 2001 From: Koen Vossen Date: Tue, 24 Sep 2024 23:07:43 +0200 Subject: [PATCH] Allow chaining of 'add_file' calls --- .../models/resources/dataset_resource.py | 3 + ingestify/tests/test_engine.py | 102 +++++++++--------- 2 files changed, 54 insertions(+), 51 deletions(-) diff --git a/ingestify/domain/models/resources/dataset_resource.py b/ingestify/domain/models/resources/dataset_resource.py index 67e8d4d..0c658b8 100644 --- a/ingestify/domain/models/resources/dataset_resource.py +++ b/ingestify/domain/models/resources/dataset_resource.py @@ -94,3 +94,6 @@ def add_file( ) self.files[file_id] = file_resource + + # Allow chaining + return self diff --git a/ingestify/tests/test_engine.py b/ingestify/tests/test_engine.py index 65af306..d6a1754 100644 --- a/ingestify/tests/test_engine.py +++ b/ingestify/tests/test_engine.py @@ -66,35 +66,36 @@ def find_datasets( season_id, **kwargs ): - dataset_resource = DatasetResource( - dict( - competition_id=competition_id, - season_id=season_id, - ), - provider="fake", - dataset_type="match", - name="Test Dataset", - ) - last_modified = datetime.now(pytz.utc) - dataset_resource.add_file( - last_modified=last_modified, - data_feed_key="file1", - data_spec_version="v1", - file_loader=file_loader, - ) - dataset_resource.add_file( - last_modified=last_modified, - data_feed_key="file2", - data_spec_version="v1", - file_loader=file_loader, - ) - dataset_resource.add_file( - last_modified=last_modified, - data_feed_key="file3", - data_spec_version="v1", - json_content={"test": "some-content"}, + yield ( + DatasetResource( + dict( + competition_id=competition_id, + season_id=season_id, + ), + provider="fake", + dataset_type="match", + name="Test Dataset", + ) + .add_file( + last_modified=last_modified, + data_feed_key="file1", + data_spec_version="v1", + file_loader=file_loader, + ) + .add_file( + last_modified=last_modified, + data_feed_key="file2", + data_spec_version="v1", + file_loader=file_loader, + ) + .add_file( + last_modified=last_modified, + data_feed_key="file3", + data_spec_version="v1", + json_content={"test": "some-content"}, + ) ) # dataset_resource.add_file( # last_modified=last_modified, @@ -104,8 +105,6 @@ def find_datasets( # data_serialization_format="json" # ) - yield dataset_resource - class BatchSource(Source): def __init__(self, name, callback): @@ -128,30 +127,31 @@ def find_datasets( for i in range(10): match_id = self.idx self.idx += 1 - dataset_resource = DatasetResource( - dict( - competition_id=competition_id, - season_id=season_id, - match_id=match_id, - ), - name="Test dataset", - provider="fake", - dataset_type="match", - ) last_modified = datetime.now(pytz.utc) - - dataset_resource.add_file( - last_modified=last_modified, - data_feed_key="file1", - data_spec_version="v1", - file_loader=file_loader, - ) - dataset_resource.add_file( - last_modified=last_modified, - data_feed_key="file2", - data_spec_version="v1", - file_loader=file_loader, + dataset_resource = ( + DatasetResource( + dict( + competition_id=competition_id, + season_id=season_id, + match_id=match_id, + ), + name="Test dataset", + provider="fake", + dataset_type="match", + ) + .add_file( + last_modified=last_modified, + data_feed_key="file1", + data_spec_version="v1", + file_loader=file_loader, + ) + .add_file( + last_modified=last_modified, + data_feed_key="file2", + data_spec_version="v1", + file_loader=file_loader, + ) ) items.append(dataset_resource)