From cade791a825379156df71b5d8beccb99fb181d1c Mon Sep 17 00:00:00 2001 From: Nick Macholl Date: Tue, 16 Jan 2024 11:39:12 -0800 Subject: [PATCH] MOD: Upgrade databento-dbn to v0.15.0 --- CHANGELOG.md | 1 + README.md | 2 +- databento/common/dbnstore.py | 2 +- pyproject.toml | 2 +- tests/mock_live_server.py | 2 +- tests/test_common_symbology.py | 6 +++--- tests/test_historical_bento.py | 28 ++++++++-------------------- 7 files changed, 16 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d10684..39e5adf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ This release adds support for transcoding DBN data into Apache parquet. #### Enhancements - Added `DBNStore.to_parquet` for transcoding DBN data into Apache parquet using `pyarrow` +- Upgraded `databento-dbn` to 0.15.0 ## 0.25.0 - 2024-01-09 diff --git a/README.md b/README.md index 57605f4..f2c453f 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ The library is fully compatible with the latest distribution of Anaconda 3.8 and The minimum dependencies as found in the `pyproject.toml` are also listed below: - python = "^3.8" - aiohttp = "^3.8.3" -- databento-dbn = "0.14.2" +- databento-dbn = "0.15.0" - numpy= ">=1.23.5" - pandas = ">=1.5.3" - pyarrow = ">=13.0.0" diff --git a/databento/common/dbnstore.py b/databento/common/dbnstore.py index 882f3ed..dcdcaaf 100644 --- a/databento/common/dbnstore.py +++ b/databento/common/dbnstore.py @@ -1192,7 +1192,7 @@ def _transcode( pretty_ts=pretty_ts, has_metadata=True, map_symbols=map_symbols, - symbol_interval_map=symbol_map, # type: ignore [arg-type] + symbol_interval_map=symbol_map, schema=schema, ) diff --git a/pyproject.toml b/pyproject.toml index 537c1d0..cec8a63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ aiohttp = [ {version = "^3.8.3", python = "<3.12"}, {version = "^3.9.0", python = "^3.12"} ] -databento-dbn = "0.14.2" +databento-dbn = "0.15.0" numpy = [ {version = ">=1.23.5", python = "<3.12"}, {version = "^1.26.0", python = "^3.12"} diff --git a/tests/mock_live_server.py b/tests/mock_live_server.py index d4aaff1..f2ae54f 100644 --- a/tests/mock_live_server.py +++ b/tests/mock_live_server.py @@ -434,7 +434,7 @@ def _(self, message: SessionStart) -> None: self.__transport.close() elif self.mode is MockLiveMode.REPEAT: - metadata = Metadata("UNIT.TEST", 0, SType.RAW_SYMBOL, [], [], [], []) # type: ignore [call-arg] + metadata = Metadata("UNIT.TEST", 0, SType.RAW_SYMBOL, [], [], [], []) self.__transport.write(bytes(metadata)) loop = asyncio.get_event_loop() diff --git a/tests/test_common_symbology.py b/tests/test_common_symbology.py index bac23b9..bb92c7a 100644 --- a/tests/test_common_symbology.py +++ b/tests/test_common_symbology.py @@ -148,7 +148,7 @@ def create_symbol_mapping_message( SymbolMappingMsg """ - return SymbolMappingMsg( # type: ignore [call-arg] + return SymbolMappingMsg( publisher_id=publisher_id, instrument_id=instrument_id, ts_event=ts_event, @@ -172,7 +172,7 @@ def create_metadata( limit: int | None = None, ts_out: bool = False, ) -> Metadata: - return Metadata( # type: ignore [call-arg] + return Metadata( dataset=dataset, start=start, stype_out=stype_out, @@ -357,7 +357,7 @@ def test_instrument_map_insert_symbol_mapping_message_v1( start_ts=start_date, end_ts=end_date, ) - sym_msg_v1 = SymbolMappingMsgV1( # type: ignore [call-arg] + sym_msg_v1 = SymbolMappingMsgV1( publisher_id=sym_msg.publisher_id, instrument_id=sym_msg.instrument_id, ts_event=sym_msg.ts_event, diff --git a/tests/test_historical_bento.py b/tests/test_historical_bento.py index 36bbffe..8f7022d 100644 --- a/tests/test_historical_bento.py +++ b/tests/test_historical_bento.py @@ -782,8 +782,8 @@ def test_dbnstore_iterable( dbnstore = DBNStore.from_bytes(data=stub_data) record_list: list[DBNRecord] = list(dbnstore) - first: MBOMsg = record_list[0] # type: ignore - second: MBOMsg = record_list[1] # type: ignore + first: MBOMsg = record_list[0] + second: MBOMsg = record_list[1] # Assert assert first.hd.length == 14 @@ -882,9 +882,7 @@ def test_dbnstore_buffer_short( """ # Arrange dbn_stub_data = ( - zstandard.ZstdDecompressor() - .stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)) - .read() + zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read() ) # Act @@ -917,9 +915,7 @@ def test_dbnstore_buffer_long( """ # Arrange dbn_stub_data = ( - zstandard.ZstdDecompressor() - .stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)) - .read() + zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read() ) # Act @@ -952,9 +948,7 @@ def test_dbnstore_buffer_rewind( """ # Arrange dbn_stub_data = ( - zstandard.ZstdDecompressor() - .stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)) - .read() + zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read() ) # Act @@ -989,9 +983,7 @@ def test_dbnstore_to_ndarray_with_count( """ # Arrange dbn_stub_data = ( - zstandard.ZstdDecompressor() - .stream_reader(test_data(Dataset.GLBX_MDP3, schema)) - .read() + zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read() ) # Act @@ -1074,9 +1066,7 @@ def test_dbnstore_to_ndarray_with_schema( """ # Arrange dbn_stub_data = ( - zstandard.ZstdDecompressor() - .stream_reader(test_data(Dataset.GLBX_MDP3, schema)) - .read() + zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read() ) # Act @@ -1210,9 +1200,7 @@ def test_dbnstore_to_df_with_count( """ # Arrange dbn_stub_data = ( - zstandard.ZstdDecompressor() - .stream_reader(test_data(Dataset.GLBX_MDP3, schema)) - .read() + zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read() ) # Act