Skip to content

Commit

Permalink
MOD: Upgrade databento-dbn to v0.15.0
Browse files Browse the repository at this point in the history
  • Loading branch information
nmacholl committed Jan 16, 2024
1 parent ab5d54d commit cade791
Show file tree
Hide file tree
Showing 7 changed files with 16 additions and 27 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ This release adds support for transcoding DBN data into Apache parquet.

#### Enhancements
- Added `DBNStore.to_parquet` for transcoding DBN data into Apache parquet using `pyarrow`
- Upgraded `databento-dbn` to 0.15.0

## 0.25.0 - 2024-01-09

Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ The library is fully compatible with the latest distribution of Anaconda 3.8 and
The minimum dependencies as found in the `pyproject.toml` are also listed below:
- python = "^3.8"
- aiohttp = "^3.8.3"
- databento-dbn = "0.14.2"
- databento-dbn = "0.15.0"
- numpy= ">=1.23.5"
- pandas = ">=1.5.3"
- pyarrow = ">=13.0.0"
Expand Down
2 changes: 1 addition & 1 deletion databento/common/dbnstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -1192,7 +1192,7 @@ def _transcode(
pretty_ts=pretty_ts,
has_metadata=True,
map_symbols=map_symbols,
symbol_interval_map=symbol_map, # type: ignore [arg-type]
symbol_interval_map=symbol_map,
schema=schema,
)

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ aiohttp = [
{version = "^3.8.3", python = "<3.12"},
{version = "^3.9.0", python = "^3.12"}
]
databento-dbn = "0.14.2"
databento-dbn = "0.15.0"
numpy = [
{version = ">=1.23.5", python = "<3.12"},
{version = "^1.26.0", python = "^3.12"}
Expand Down
2 changes: 1 addition & 1 deletion tests/mock_live_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,7 @@ def _(self, message: SessionStart) -> None:
self.__transport.close()

elif self.mode is MockLiveMode.REPEAT:
metadata = Metadata("UNIT.TEST", 0, SType.RAW_SYMBOL, [], [], [], []) # type: ignore [call-arg]
metadata = Metadata("UNIT.TEST", 0, SType.RAW_SYMBOL, [], [], [], [])
self.__transport.write(bytes(metadata))

loop = asyncio.get_event_loop()
Expand Down
6 changes: 3 additions & 3 deletions tests/test_common_symbology.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def create_symbol_mapping_message(
SymbolMappingMsg
"""
return SymbolMappingMsg( # type: ignore [call-arg]
return SymbolMappingMsg(
publisher_id=publisher_id,
instrument_id=instrument_id,
ts_event=ts_event,
Expand All @@ -172,7 +172,7 @@ def create_metadata(
limit: int | None = None,
ts_out: bool = False,
) -> Metadata:
return Metadata( # type: ignore [call-arg]
return Metadata(
dataset=dataset,
start=start,
stype_out=stype_out,
Expand Down Expand Up @@ -357,7 +357,7 @@ def test_instrument_map_insert_symbol_mapping_message_v1(
start_ts=start_date,
end_ts=end_date,
)
sym_msg_v1 = SymbolMappingMsgV1( # type: ignore [call-arg]
sym_msg_v1 = SymbolMappingMsgV1(
publisher_id=sym_msg.publisher_id,
instrument_id=sym_msg.instrument_id,
ts_event=sym_msg.ts_event,
Expand Down
28 changes: 8 additions & 20 deletions tests/test_historical_bento.py
Original file line number Diff line number Diff line change
Expand Up @@ -782,8 +782,8 @@ def test_dbnstore_iterable(
dbnstore = DBNStore.from_bytes(data=stub_data)

record_list: list[DBNRecord] = list(dbnstore)
first: MBOMsg = record_list[0] # type: ignore
second: MBOMsg = record_list[1] # type: ignore
first: MBOMsg = record_list[0]
second: MBOMsg = record_list[1]

# Assert
assert first.hd.length == 14
Expand Down Expand Up @@ -882,9 +882,7 @@ def test_dbnstore_buffer_short(
"""
# Arrange
dbn_stub_data = (
zstandard.ZstdDecompressor()
.stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO))
.read()
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read()
)

# Act
Expand Down Expand Up @@ -917,9 +915,7 @@ def test_dbnstore_buffer_long(
"""
# Arrange
dbn_stub_data = (
zstandard.ZstdDecompressor()
.stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO))
.read()
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read()
)

# Act
Expand Down Expand Up @@ -952,9 +948,7 @@ def test_dbnstore_buffer_rewind(
"""
# Arrange
dbn_stub_data = (
zstandard.ZstdDecompressor()
.stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO))
.read()
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, Schema.MBO)).read()
)

# Act
Expand Down Expand Up @@ -989,9 +983,7 @@ def test_dbnstore_to_ndarray_with_count(
"""
# Arrange
dbn_stub_data = (
zstandard.ZstdDecompressor()
.stream_reader(test_data(Dataset.GLBX_MDP3, schema))
.read()
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read()
)

# Act
Expand Down Expand Up @@ -1074,9 +1066,7 @@ def test_dbnstore_to_ndarray_with_schema(
"""
# Arrange
dbn_stub_data = (
zstandard.ZstdDecompressor()
.stream_reader(test_data(Dataset.GLBX_MDP3, schema))
.read()
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read()
)

# Act
Expand Down Expand Up @@ -1210,9 +1200,7 @@ def test_dbnstore_to_df_with_count(
"""
# Arrange
dbn_stub_data = (
zstandard.ZstdDecompressor()
.stream_reader(test_data(Dataset.GLBX_MDP3, schema))
.read()
zstandard.ZstdDecompressor().stream_reader(test_data(Dataset.GLBX_MDP3, schema)).read()
)

# Act
Expand Down

0 comments on commit cade791

Please sign in to comment.