Skip to content

Commit

Permalink
VER: Release 0.41.0
Browse files Browse the repository at this point in the history
See release notes.
  • Loading branch information
nmacholl authored Sep 3, 2024
2 parents 899df69 + 701d745 commit 7f84f96
Show file tree
Hide file tree
Showing 8 changed files with 125 additions and 20 deletions.
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
# Changelog

## 0.41.0 - 2024-09-03

#### Enhancements
- Added `databento.read_dbn` alias
- Added `mode` parameter to `DBNStore.to_file` to control the file writing mode

#### Breaking changes
- Changed default write mode for `DBNStore.to_file` to overwrite ("w")

#### Deprecations
- Deprecated `databento.from_dbn` and will be removed in a future release, use `databento.read_dbn` instead

## 0.40.0 - 2024-08-27

#### Enhancements
Expand Down
4 changes: 3 additions & 1 deletion databento/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
from databento.common.publishers import Venue
from databento.common.symbology import InstrumentMap
from databento.common.types import DBNRecord
from databento.common.validation import deprecated
from databento.historical.client import Historical
from databento.live.client import Live
from databento.reference.client import Reference
Expand Down Expand Up @@ -109,6 +110,7 @@

# Convenience imports
enable_logging = bentologging.enable_logging
from_dbn = DBNStore.from_file
from_dbn = deprecated("databento.from_dbn")(DBNStore.from_file)
read_dbn = DBNStore.from_file
map_symbols_csv = symbology.map_symbols_csv
map_symbols_json = symbology.map_symbols_json
13 changes: 9 additions & 4 deletions databento/common/dbnstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -1029,14 +1029,20 @@ def to_parquet(
if writer is not None:
writer.close()

def to_file(self, path: PathLike[str] | str) -> None:
def to_file(
self,
path: PathLike[str] | str,
mode: Literal["w", "x"] = "w",
) -> None:
"""
Write the data to a DBN file at the given path.
Parameters
----------
path : PathLike[str] or str
The file path to write to.
mode : str, default "w"
The file write mode to use, either "x" or "w".
Raises
------
Expand All @@ -1048,9 +1054,8 @@ def to_file(self, path: PathLike[str] | str) -> None:
If path is not writable.
"""
file_path = validate_file_write_path(path, "path")
with open(file_path, mode="xb") as f:
f.write(self._data_source.reader.read())
file_path = validate_file_write_path(path, "path", exist_ok=mode == "w")
file_path.write_bytes(self._data_source.reader.read())
self._data_source = FileDataSource(file_path)

def to_json(
Expand Down
48 changes: 46 additions & 2 deletions databento/common/validation.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
from __future__ import annotations

import functools
import os
import warnings
from collections.abc import Callable
from enum import Enum
from os import PathLike
from pathlib import Path
from typing import Any
from typing import TypeVar
from urllib.parse import urlsplit
from urllib.parse import urlunsplit
Expand Down Expand Up @@ -44,7 +48,11 @@ def validate_path(value: PathLike[str] | str, param: str) -> Path:
) from None


def validate_file_write_path(value: PathLike[str] | str, param: str) -> Path:
def validate_file_write_path(
value: PathLike[str] | str,
param: str,
exist_ok: bool = False,
) -> Path:
"""
Validate whether the given value is a valid path to a writable file.
Expand All @@ -54,6 +62,8 @@ def validate_file_write_path(value: PathLike[str] | str, param: str) -> Path:
The value to validate.
param : str
The name of the parameter being validated (for any error message).
exist_ok : bool, default False
If False, raises a `FileExistsError` if the file exists.
Returns
-------
Expand All @@ -75,7 +85,7 @@ def validate_file_write_path(value: PathLike[str] | str, param: str) -> Path:
raise PermissionError(f"The file `{value}` is not writable.")
if path_valid.is_dir():
raise IsADirectoryError(f"The `{param}` was not a path to a file.")
if path_valid.is_file():
if not exist_ok and path_valid.is_file():
raise FileExistsError(f"The file `{value}` already exists.")
return path_valid

Expand Down Expand Up @@ -262,3 +272,37 @@ def validate_smart_symbol(symbol: str) -> str:
tokens[1] = tokens[1].lower() # api expects lower case

return ".".join(tokens)


_D = TypeVar("_D", bound=Callable) # type: ignore [type-arg]


def deprecated(name: str | None = None) -> Callable[[_D], _D]:
"""
Decorate for a function that will emit a deprecation warning.
Parameters
----------
name : str, optional
An optional name to use instead of the actual function name.
Returns
-------
Callable[..., Any]
"""

def decorator(func: _D) -> _D:
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
func_name = name if name is not None else func.__name__
warnings.warn(
f"{func_name} is deprecated and will be removed in a future release",
category=DeprecationWarning,
stacklevel=2,
)
return func(*args, **kwargs)

return wrapper # type: ignore

return decorator
10 changes: 0 additions & 10 deletions databento/historical/api/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
from databento.common.parsing import optional_values_list_to_string
from databento.common.parsing import symbols_list_to_list
from databento.common.publishers import Dataset
from databento.common.types import Default
from databento.common.validation import validate_enum
from databento.common.validation import validate_path
from databento.common.validation import validate_semantic_string
Expand Down Expand Up @@ -253,7 +252,6 @@ def download(
job_id: str,
output_dir: PathLike[str] | str | None = None,
filename_to_download: str | None = None,
enable_partial_downloads: Default[bool] = Default[bool](True),
) -> list[Path]:
"""
Download a batch job or a specific file to `{output_dir}/{job_id}/`.
Expand Down Expand Up @@ -287,14 +285,6 @@ def download(
If a file fails to download.
"""
# TODO: Remove after a reasonable deprecation period
if not isinstance(enable_partial_downloads, Default):
warnings.warn(
"The parameter `enable_partial_downloads` has been removed and will cause an error if set in the future. Partially downloaded files will always be resumed.",
category=BentoWarning,
stacklevel=2,
)

if filename_to_download is None:
filenames_to_download = None
else:
Expand Down
2 changes: 1 addition & 1 deletion databento/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.40.0"
__version__ = "0.41.0"
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "databento"
version = "0.40.0"
version = "0.41.0"
description = "Official Python client library for Databento"
authors = [
"Databento <[email protected]>",
Expand Down
54 changes: 53 additions & 1 deletion tests/test_historical_bento.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,23 +174,75 @@ def test_file_dbnstore_given_valid_path_initialized_expected_data(
assert dbnstore.nbytes == 189


@pytest.mark.parametrize(
"schema,expected_size",
[
(Schema.MBO, 189),
(Schema.DEFINITION, 290),
],
)
def test_to_file_persists_to_disk(
test_data: Callable[[Dataset, Schema], bytes],
tmp_path: Path,
schema: Schema,
expected_size: int,
) -> None:
"""
Test the DBNStore.to_file writes files to disk.
"""
# Arrange
stub_data = test_data(Dataset.GLBX_MDP3, Schema.MBO)
stub_data = test_data(Dataset.GLBX_MDP3, schema)
dbnstore = DBNStore.from_bytes(data=stub_data)

# Act
dbn_path = tmp_path / "my_test.dbn"
dbnstore.to_file(path=dbn_path)

# Assert
assert dbn_path.exists()
assert dbn_path.stat().st_size == expected_size


def test_to_file_overwrite(
test_data: Callable[[Dataset, Schema], bytes],
tmp_path: Path,
) -> None:
"""
Test that the default write mode allows files to be overwritten.
"""
# Arrange
stub_data = test_data(Dataset.GLBX_MDP3, Schema.MBO)
dbnstore = DBNStore.from_bytes(data=stub_data)
dbn_path = tmp_path / "my_test.dbn"
dbnstore.to_file(path=dbn_path)
assert dbn_path.stat().st_size == 189

# Act
dbnstore.to_file(path=dbn_path)

# Assert
assert dbn_path.exists()
assert dbn_path.stat().st_size == 189


def test_to_file_exclusive(
test_data: Callable[[Dataset, Schema], bytes],
tmp_path: Path,
) -> None:
"""
Test that the exclusive write mode correctly rejects an existing file path.
"""
# Arrange
stub_data = test_data(Dataset.GLBX_MDP3, Schema.MBO)
dbnstore = DBNStore.from_bytes(data=stub_data)
dbn_path = tmp_path / "my_test.dbn"
dbnstore.to_file(path=dbn_path)

# Act, Assert
with pytest.raises(FileExistsError):
dbnstore.to_file(path=dbn_path, mode="x")


def test_to_ndarray_with_stub_data_returns_expected_array(
test_data: Callable[[Dataset, Schema], bytes],
) -> None:
Expand Down

0 comments on commit 7f84f96

Please sign in to comment.