Skip to content

Commit

Permalink
VER: Release 0.27.0
Browse files Browse the repository at this point in the history
  • Loading branch information
nmacholl authored Jan 23, 2024
2 parents 18f45c2 + f5ddca8 commit 63ce2a3
Show file tree
Hide file tree
Showing 15 changed files with 141 additions and 78 deletions.
14 changes: 13 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
# Changelog

## 0.27.0 - 2024-01-23

#### Enhancements
- Added `Session.session_id` property which will contain the numerical session ID once a live session has been authenticated
- Upgraded `databento-dbn` to 0.15.1

#### Breaking changes
- Renamed `DatabentoLiveProtocol.started` to `DatabentoLiveProtocol.is_started` which now returns a bool instead of an `asyncio.Event`

#### Bug fixes
- Fixed an issue where an error message from the live gateway would not properly raise an exception if the connection closed before `Live.start` was called

## 0.26.0 - 2024-01-16

This release adds support for transcoding DBN data into Apache parquet.
Expand Down Expand Up @@ -246,7 +258,7 @@ This release includes updates to the fields in text encodings (CSV and JSON), yo
## 0.14.0 - 2023-06-14

#### Enhancements
- Added `DatatbentoLiveProtocol` class
- Added `DatabentoLiveProtocol` class
- Added `metadata` property to `Live`
- Added support for reusing a `Live` client to reconnect
- Added support for emitting warnings in API response headers
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ The library is fully compatible with the latest distribution of Anaconda 3.8 and
The minimum dependencies as found in the `pyproject.toml` are also listed below:
- python = "^3.8"
- aiohttp = "^3.8.3"
- databento-dbn = "0.15.0"
- databento-dbn = "0.15.1"
- numpy= ">=1.23.5"
- pandas = ">=1.5.3"
- pyarrow = ">=13.0.0"
Expand Down
24 changes: 12 additions & 12 deletions databento/common/dbnstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ class FileDataSource(DataSource):
The name of the file.
nbytes : int
The size of the data in bytes; equal to the file size.
path : PathLike or str
path : PathLike[str] or str
The path of the file.
reader : IO[bytes]
A `BufferedReader` for this file-backed data.
Expand Down Expand Up @@ -634,7 +634,7 @@ def from_file(cls, path: PathLike[str] | str) -> DBNStore:
Parameters
----------
path : Path or str
path : PathLike[str] or str
The path to read from.
Returns
Expand Down Expand Up @@ -695,7 +695,7 @@ def replay(self, callback: Callable[[Any], None]) -> None:
def request_full_definitions(
self,
client: Historical,
path: Path | str | None = None,
path: PathLike[str] | str | None = None,
) -> DBNStore:
"""
Request full instrument definitions based on the metadata properties.
Expand All @@ -706,7 +706,7 @@ def request_full_definitions(
----------
client : Historical
The historical client to use for the request (contains the API key).
path : Path or str, optional
path : PathLike[str] or str, optional
The path to stream the data to on disk (will then return a `DBNStore`).
Returns
Expand Down Expand Up @@ -768,7 +768,7 @@ def request_symbology(self, client: Historical) -> dict[str, Any]:

def to_csv(
self,
path: Path | str,
path: PathLike[str] | str,
pretty_px: bool = True,
pretty_ts: bool = True,
map_symbols: bool = True,
Expand All @@ -780,7 +780,7 @@ def to_csv(
Parameters
----------
path : Path or str
path : PathLike[str] or str
The file path to write to.
pretty_px : bool, default True
If all price columns should be converted from `int` to `float` at
Expand Down Expand Up @@ -922,7 +922,7 @@ def to_df(

def to_parquet(
self,
path: Path | str,
path: PathLike[str] | str,
price_type: Literal["fixed", "float"] = "float",
pretty_ts: bool = True,
map_symbols: bool = True,
Expand Down Expand Up @@ -995,13 +995,13 @@ def to_parquet(
if writer is not None:
writer.close()

def to_file(self, path: Path | str) -> None:
def to_file(self, path: PathLike[str] | str) -> None:
"""
Write the data to a DBN file at the given path.
Parameters
----------
path : str
path : PathLike[str] or str
The file path to write to.
Raises
Expand All @@ -1021,7 +1021,7 @@ def to_file(self, path: Path | str) -> None:

def to_json(
self,
path: Path | str,
path: PathLike[str] | str,
pretty_px: bool = True,
pretty_ts: bool = True,
map_symbols: bool = True,
Expand All @@ -1033,7 +1033,7 @@ def to_json(
Parameters
----------
path : Path or str
path : PathLike[str] or str
The file path to write to.
pretty_px : bool, default True
If all price columns should be converted from `int` to `float` at
Expand Down Expand Up @@ -1319,7 +1319,7 @@ def __next__(self) -> np.ndarray[Any, Any]:
except ValueError:
raise BentoError(
"DBN file is truncated or contains an incomplete record",
)
) from None


class DataFrameIterator:
Expand Down
48 changes: 25 additions & 23 deletions databento/common/symbology.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ class MappingInterval(NamedTuple):


def _validate_path_pair(
in_file: Path | PathLike[str] | str,
out_file: Path | PathLike[str] | str | None,
in_file: PathLike[str] | str,
out_file: PathLike[str] | str | None,
) -> tuple[Path, Path]:
in_file_valid = Path(in_file)

Expand Down Expand Up @@ -74,22 +74,22 @@ def _validate_path_pair(


def map_symbols_csv(
symbology_file: Path | PathLike[str] | str,
csv_file: Path | PathLike[str] | str,
out_file: Path | PathLike[str] | str | None = None,
symbology_file: PathLike[str] | str,
csv_file: PathLike[str] | str,
out_file: PathLike[str] | str | None = None,
) -> Path:
"""
Use a `symbology.json` file to map a symbols column onto an existing CSV
file. The result is written to `out_file`.
Parameters
----------
symbology_file: Path | PathLike[str] | str
symbology_file: PathLike[str] | str
Path to a `symbology.json` file to use as a symbology source.
csv_file: Path | PathLike[str] | str
csv_file: PathLike[str] | str
Path to a CSV file that contains encoded DBN data; must contain
a `ts_recv` or `ts_event` and `instrument_id` column.
out_file: Path | PathLike[str] | str (optional)
out_file: PathLike[str] | str (optional)
Path to a file to write results to. If unspecified, `_mapped` will be
appended to the `csv_file` name.
Expand Down Expand Up @@ -119,21 +119,21 @@ def map_symbols_csv(


def map_symbols_json(
symbology_file: Path | PathLike[str] | str,
json_file: Path | PathLike[str] | str,
out_file: Path | PathLike[str] | str | None = None,
symbology_file: PathLike[str] | str,
json_file: PathLike[str] | str,
out_file: PathLike[str] | str | None = None,
) -> Path:
"""
Use a `symbology.json` file to insert a symbols key into records of an
existing JSON file. The result is written to `out_file`.
Parameters
----------
symbology_file: Path | PathLike[str] | str
symbology_file: PathLike[str] | str
Path to a `symbology.json` file to use as a symbology source.
json_file: Path | PathLike[str] | str
json_file: PathLike[str] | str
Path to a JSON file that contains encoded DBN data.
out_file: Path | PathLike[str] | str (optional)
out_file: PathLike[str] | str (optional)
Path to a file to write results to. If unspecified, `_mapped` will be
appended to the `json_file` name.
Expand Down Expand Up @@ -243,7 +243,9 @@ def insert_metadata(self, metadata: Metadata) -> None:
return

stype_in = SType(metadata.stype_in) if metadata.stype_in is not None else None
stype_out = SType(metadata.stype_out) if metadata.stype_out is not None else None
stype_out = (
SType(metadata.stype_out) if metadata.stype_out is not None else None
)

for symbol_in, entries in metadata.mappings.items():
for entry in entries:
Expand Down Expand Up @@ -395,19 +397,19 @@ def insert_json(

def map_symbols_csv(
self,
csv_file: Path | PathLike[str] | str,
out_file: Path | PathLike[str] | str | None = None,
csv_file: PathLike[str] | str,
out_file: PathLike[str] | str | None = None,
) -> Path:
"""
Use the loaded symbology data to map a symbols column onto an existing
CSV file. The result is written to `out_file`.
Parameters
----------
csv_file: Path | PathLike[str] | str
csv_file: PathLike[str] | str
Path to a CSV file that contains encoded DBN data; must contain
a `ts_recv` or `ts_event` and `instrument_id` column.
out_file: Path | PathLike[str] | str (optional)
out_file: PathLike[str] | str (optional)
Path to a file to write results to. If unspecified, `_mapped` will be
appended to the `csv_file` name.
Expand Down Expand Up @@ -474,18 +476,18 @@ def map_symbols_csv(

def map_symbols_json(
self,
json_file: Path | PathLike[str] | str,
out_file: Path | PathLike[str] | str | None = None,
json_file: PathLike[str] | str,
out_file: PathLike[str] | str | None = None,
) -> Path:
"""
Use the loaded symbology data to insert a symbols key into records of
an existing JSON file. The result is written to `out_file`.
Parameters
----------
json_file: Path | PathLike[str] | str
json_file: PathLike[str] | str
Path to a JSON file that contains encoded DBN data.
out_file: Path | PathLike[str] | str (optional)
out_file: PathLike[str] | str (optional)
Path to a file to write results to. If unspecified, `_mapped` will be
appended to the `json_file` name.
Expand Down
4 changes: 2 additions & 2 deletions databento/common/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def validate_path(value: PathLike[str] | str, param: str) -> Path:
Parameters
----------
value: PathLike or str
value: PathLike[str] or str
The value to validate.
param : str
The name of the parameter being validated (for any error message).
Expand Down Expand Up @@ -49,7 +49,7 @@ def validate_file_write_path(value: PathLike[str] | str, param: str) -> Path:
Parameters
----------
value: PathLike or str
value: PathLike[str] or str
The value to validate.
param : str
The name of the parameter being validated (for any error message).
Expand Down
21 changes: 14 additions & 7 deletions databento/historical/api/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from databento.common.enums import Delivery
from databento.common.enums import Packaging
from databento.common.enums import SplitDuration
from databento.common.error import BentoError
from databento.common.parsing import datetime_to_string
from databento.common.parsing import optional_datetime_to_string
from databento.common.parsing import optional_symbols_list_to_list
Expand Down Expand Up @@ -252,7 +253,7 @@ def download(
Parameters
----------
output_dir: PathLike or str
output_dir: PathLike[str] or str
The directory to download the file(s) to.
job_id : str
The batch job identifier.
Expand Down Expand Up @@ -371,8 +372,11 @@ def _download_file(

logger.debug("Starting download of file %s", output_path.name)
with open(output_path, mode=mode) as f:
for chunk in response.iter_content(chunk_size=None):
f.write(chunk)
try:
for chunk in response.iter_content(chunk_size=None):
f.write(chunk)
except Exception as exc:
raise BentoError(f"Error downloading file: {exc}") from None
logger.debug("Download of %s completed", output_path.name)

async def download_async(
Expand All @@ -393,7 +397,7 @@ async def download_async(
Parameters
----------
output_dir: PathLike or str
output_dir: PathLike[str] or str
The directory to download the file(s) to.
job_id : str
The batch job identifier.
Expand Down Expand Up @@ -512,9 +516,12 @@ async def _download_file_async(

logger.debug("Starting async download of file %s", output_path.name)
with open(output_path, mode=mode) as f:
async for chunk in response.content.iter_chunks():
data: bytes = chunk[0]
f.write(data)
try:
async for chunk in response.content.iter_chunks():
data: bytes = chunk[0]
f.write(data)
except Exception as exc:
raise BentoError(f"Error downloading file: {exc}") from None
logger.debug("Download of %s completed", output_path.name)

def _get_file_download_headers_and_mode(
Expand Down
4 changes: 2 additions & 2 deletions databento/historical/api/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def get_range(
The output symbology type to resolve to.
limit : int, optional
The maximum number of records to return. If `None` then no limit.
path : PathLike or str, optional
path : PathLike[str] or str, optional
The file path to stream the data to on disk (will then return a `DBNStore`).
Returns
Expand Down Expand Up @@ -177,7 +177,7 @@ async def get_range_async(
The output symbology type to resolve to.
limit : int, optional
The maximum number of records to return. If `None` then no limit.
path : PathLike or str, optional
path : PathLike[str] or str, optional
The file path to stream the data to on disk (will then return a `DBNStore`).
Returns
Expand Down
15 changes: 11 additions & 4 deletions databento/historical/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from databento.common.dbnstore import DBNStore
from databento.common.error import BentoClientError
from databento.common.error import BentoDeprecationWarning
from databento.common.error import BentoError
from databento.common.error import BentoServerError
from databento.common.error import BentoWarning
from databento.common.system import USER_AGENT
Expand Down Expand Up @@ -132,8 +133,11 @@ def _stream(
else:
writer = open(path, "x+b")

for chunk in response.iter_content(chunk_size=None):
writer.write(chunk)
try:
for chunk in response.iter_content(chunk_size=None):
writer.write(chunk)
except Exception as exc:
raise BentoError(f"Error streaming response: {exc}") from None

if path is None:
writer.seek(0)
Expand Down Expand Up @@ -169,8 +173,11 @@ async def _stream_async(
else:
writer = open(path, "x+b")

async for chunk in response.content.iter_chunks():
writer.write(chunk[0])
try:
async for chunk in response.content.iter_chunks():
writer.write(chunk[0])
except Exception as exc:
raise BentoError(f"Error streaming response: {exc}") from None

if path is None:
writer.seek(0)
Expand Down
Loading

0 comments on commit 63ce2a3

Please sign in to comment.