Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(all): unify method signatures across backends #9383

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
c326048
feat(sigcheck): first cut of signature checking utilities
gforsyth Jun 13, 2024
e49caf7
test(signatures): add fixture that returns uninstantiated backend class
gforsyth Jun 13, 2024
5655f80
feat(sigcheck): make annotation checks optional in signature comparisons
gforsyth Jun 13, 2024
18995f8
chore(signature): swap order of arguments to `compatible`
gforsyth Jun 13, 2024
d50efd0
test(signatures): add signature checks for SQL backends
gforsyth Jun 14, 2024
662afa5
test(sigcheck): add test for comparing pos-only vs positional args
gforsyth Jun 14, 2024
f00d8f1
test(sigcheck): don't check `_FileIOHandler` as it is part of `BaseBa…
gforsyth Jun 14, 2024
2624c50
refactor(read_csv): unify read_csv signatures across backends
gforsyth Jun 14, 2024
c1153fb
refactor(read_parquet): unify read_parquet signatures across backends
gforsyth Jun 14, 2024
2a7edc6
refactor(read_json): unify read_json signatures across backends
gforsyth Jun 14, 2024
00a6822
refactor(read_delta): unify read_delta signatures across backends
gforsyth Jun 14, 2024
b1f3d5c
refactor(*_catalog): unify *_catalog(s) signatures across backends
gforsyth Jun 14, 2024
22492be
refactor(*_database): unify *_database(s) signatures across backends
gforsyth Jun 14, 2024
8ca2ea1
refactor(table): unify `table` signatures across backends
gforsyth Jun 14, 2024
8f55d48
refactor(_table): unify `_table(s)` signatures across backends
gforsyth Jun 14, 2024
89203b6
refactor(compile): unify compile signatures across backends
gforsyth Jun 14, 2024
d6f98c5
refactor(execute): unify execute signatures across backends
gforsyth Jun 14, 2024
e9620b8
refactor(insert): unify insert signatures across backends
gforsyth Jun 14, 2024
815d123
chore(sigcheck): skip signature checks for do_connect
gforsyth Jun 14, 2024
5279875
refactor(views): unify _view signatures across backends
gforsyth Jun 14, 2024
c5c0fb8
chore(clickhouse): remove XPASSing xfail
gforsyth Jun 17, 2024
a574c92
refactor(to_parquet_dir): use `directory` as common arg name
gforsyth Aug 19, 2024
082f519
chore(sigcheck): ignore `from_connection`
gforsyth Aug 19, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 26 additions & 19 deletions ibis/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,7 @@ def to_torch(
}

def read_parquet(
self, path: str | Path, table_name: str | None = None, **kwargs: Any
self, path: str | Path, /, *, table_name: str | None = None, **kwargs: Any
) -> ir.Table:
"""Register a parquet file as a table in the current backend.

Expand All @@ -362,14 +362,15 @@ def read_parquet(
)

def read_csv(
self, path: str | Path, table_name: str | None = None, **kwargs: Any
self, path: str | Path, /, *, table_name: str | None = None, **kwargs: Any
) -> ir.Table:
"""Register a CSV file as a table in the current backend.

Parameters
----------
path
The data source. A string or Path to the CSV file.
The data source(s). A string or Path to the CSV file or directory
containing CSV files.
table_name
An optional name to use for the created table. This defaults to
a sequentially generated name.
Expand All @@ -387,7 +388,7 @@ def read_csv(
)

def read_json(
self, path: str | Path, table_name: str | None = None, **kwargs: Any
self, path: str | Path, /, *, table_name: str | None = None, **kwargs: Any
) -> ir.Table:
"""Register a JSON file as a table in the current backend.

Expand All @@ -412,13 +413,13 @@ def read_json(
)

def read_delta(
self, source: str | Path, table_name: str | None = None, **kwargs: Any
self, path: str | Path, /, *, table_name: str | None = None, **kwargs: Any
):
"""Register a Delta Lake table in the current database.

Parameters
----------
source
path
The data source. Must be a directory
containing a Delta Lake table.
table_name
Expand Down Expand Up @@ -492,7 +493,8 @@ def to_parquet_dir(
expr
The ibis expression to execute and persist to parquet.
directory
The data source. A string or Path to the directory where the parquet file will be written.
A string or Path to the directory where the parquet file will be
written.
params
Mapping of scalar parameter expressions to value.
**kwargs
Expand Down Expand Up @@ -587,7 +589,7 @@ def to_delta(

class CanListCatalog(abc.ABC):
@abc.abstractmethod
def list_catalogs(self, like: str | None = None) -> list[str]:
def list_catalogs(self, *, like: str | None = None) -> list[str]:
"""List existing catalogs in the current connection.

::: {.callout-note}
Expand Down Expand Up @@ -623,7 +625,7 @@ def current_catalog(self) -> str:

class CanCreateCatalog(CanListCatalog):
@abc.abstractmethod
def create_catalog(self, name: str, force: bool = False) -> None:
def create_catalog(self, name: str, /, *, force: bool = False) -> None:
"""Create a new catalog.

::: {.callout-note}
Expand All @@ -647,7 +649,7 @@ def create_catalog(self, name: str, force: bool = False) -> None:
"""

@abc.abstractmethod
def drop_catalog(self, name: str, force: bool = False) -> None:
def drop_catalog(self, name: str, /, *, force: bool = False) -> None:
"""Drop a catalog with name `name`.

::: {.callout-note}
Expand All @@ -674,7 +676,7 @@ def drop_catalog(self, name: str, force: bool = False) -> None:
class CanListDatabase(abc.ABC):
@abc.abstractmethod
def list_databases(
self, like: str | None = None, catalog: str | None = None
self, *, like: str | None = None, catalog: str | None = None
) -> list[str]:
"""List existing databases in the current connection.

Expand Down Expand Up @@ -715,7 +717,7 @@ def current_database(self) -> str:
class CanCreateDatabase(CanListDatabase):
@abc.abstractmethod
def create_database(
self, name: str, catalog: str | None = None, force: bool = False
self, name: str, /, *, catalog: str | None = None, force: bool = False
) -> None:
"""Create a database named `name` in `catalog`.

Expand All @@ -733,7 +735,7 @@ def create_database(

@abc.abstractmethod
def drop_database(
self, name: str, catalog: str | None = None, force: bool = False
self, name: str, /, *, catalog: str | None = None, force: bool = False
) -> None:
"""Drop the database with `name` in `catalog`.

Expand Down Expand Up @@ -777,15 +779,15 @@ class CanCreateSchema(CanListSchema):
def create_schema(
self, name: str, database: str | None = None, force: bool = False
) -> None:
self.create_database(name=name, catalog=database, force=force)
self.create_database(name, catalog=database, force=force)

@util.deprecated(
instead="Use `drop_database` instead", as_of="9.0", removed_in="10.0"
)
def drop_schema(
self, name: str, database: str | None = None, force: bool = False
) -> None:
self.drop_database(name=name, catalog=database, force=force)
self.drop_database(name, catalog=database, force=force)


class BaseBackend(abc.ABC, _FileIOHandler):
Expand Down Expand Up @@ -934,7 +936,7 @@ def _filter_with_like(values: Iterable[str], like: str | None = None) -> list[st

@abc.abstractmethod
def list_tables(
self, like: str | None = None, database: tuple[str, str] | str | None = None
self, *, like: str | None = None, database: tuple[str, str] | str | None = None
) -> list[str]:
"""Return the list of table names in the current database.

Expand Down Expand Up @@ -972,7 +974,7 @@ def list_tables(

@abc.abstractmethod
def table(
self, name: str, database: tuple[str, str] | str | None = None
self, name: str, /, *, database: tuple[str, str] | str | None = None
) -> ir.Table:
"""Construct a table expression.

Expand Down Expand Up @@ -1075,18 +1077,21 @@ def _run_pre_execute_hooks(self, expr: ir.Expr) -> None:
def compile(
self,
expr: ir.Expr,
/,
*,
params: Mapping[ir.Expr, Any] | None = None,
) -> Any:
"""Compile an expression."""
return self.compiler.to_sql(expr, params=params)

def execute(self, expr: ir.Expr) -> Any:
def execute(self, expr: ir.Expr, /) -> Any:
"""Execute an expression."""

@abc.abstractmethod
def create_table(
self,
name: str,
/,
obj: pd.DataFrame | pa.Table | ir.Table | None = None,
*,
schema: ibis.Schema | None = None,
Expand Down Expand Up @@ -1126,6 +1131,7 @@ def create_table(
def drop_table(
self,
name: str,
/,
*,
database: str | None = None,
force: bool = False,
Expand Down Expand Up @@ -1165,6 +1171,7 @@ def rename_table(self, old_name: str, new_name: str) -> None:
def create_view(
self,
name: str,
/,
obj: ir.Table,
*,
database: str | None = None,
Expand Down Expand Up @@ -1193,7 +1200,7 @@ def create_view(

@abc.abstractmethod
def drop_view(
self, name: str, *, database: str | None = None, force: bool = False
self, name: str, /, *, database: str | None = None, force: bool = False
) -> None:
"""Drop a view.

Expand Down
23 changes: 18 additions & 5 deletions ibis/backends/bigquery/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def load(file: str) -> None:
return self.table(table_name, database=(catalog, database))

def read_parquet(
self, path: str | Path, table_name: str | None = None, **kwargs: Any
self, path: str | Path, /, *, table_name: str | None = None, **kwargs: Any
):
"""Read Parquet data into a BigQuery table.

Expand Down Expand Up @@ -274,7 +274,7 @@ def read_parquet(
)

def read_csv(
self, path: str | Path, table_name: str | None = None, **kwargs: Any
self, path: str | Path, /, *, table_name: str | None = None, **kwargs: Any
) -> ir.Table:
"""Read CSV data into a BigQuery table.

Expand Down Expand Up @@ -303,7 +303,7 @@ def read_csv(
return self._read_file(path, table_name=table_name, job_config=job_config)

def read_json(
self, path: str | Path, table_name: str | None = None, **kwargs: Any
self, path: str | Path, /, *, table_name: str | None = None, **kwargs: Any
) -> ir.Table:
"""Read newline-delimited JSON data into a BigQuery table.

Expand Down Expand Up @@ -533,6 +533,8 @@ def dataset_id(self):
def create_database(
self,
name: str,
/,
*,
catalog: str | None = None,
force: bool = False,
collate: str | None = None,
Expand Down Expand Up @@ -560,6 +562,8 @@ def create_database(
def drop_database(
self,
name: str,
/,
*,
catalog: str | None = None,
force: bool = False,
cascade: bool = False,
Expand All @@ -575,7 +579,7 @@ def drop_database(
self.raw_sql(stmt.sql(self.name))

def table(
self, name: str, database: str | None = None, schema: str | None = None
self, name: str, /, *, database: str | None = None, schema: str | None = None
) -> ir.Table:
table_loc = self._warn_and_create_table_loc(database, schema)
table = sg.parse_one(f"`{name}`", into=sge.Table, read=self.name)
Expand Down Expand Up @@ -686,6 +690,8 @@ def current_database(self) -> str | None:
def compile(
self,
expr: ir.Expr,
/,
*,
limit: str | None = None,
params=None,
pretty: bool = True,
Expand All @@ -706,7 +712,7 @@ def compile(
self._log(sql)
return sql

def execute(self, expr, params=None, limit="default", **kwargs):
def execute(self, expr, /, *, params=None, limit="default", **kwargs):
"""Compile and execute the given Ibis expression.

Compile and execute Ibis expression using this backend client
Expand Down Expand Up @@ -753,7 +759,9 @@ def execute(self, expr, params=None, limit="default", **kwargs):
def insert(
self,
table_name: str,
/,
obj: pd.DataFrame | ir.Table | list | dict,
*,
schema: str | None = None,
database: str | None = None,
overwrite: bool = False,
Expand Down Expand Up @@ -866,6 +874,7 @@ def list_databases(

def list_tables(
self,
*,
like: str | None = None,
database: tuple[str, str] | str | None = None,
schema: str | None = None,
Expand Down Expand Up @@ -916,6 +925,7 @@ def version(self):
def create_table(
self,
name: str,
/,
obj: ir.Table
| pd.DataFrame
| pa.Table
Expand Down Expand Up @@ -1069,6 +1079,7 @@ def create_table(
def drop_table(
self,
name: str,
/,
*,
schema: str | None = None,
database: tuple[str | str] | str | None = None,
Expand All @@ -1090,6 +1101,7 @@ def drop_table(
def create_view(
self,
name: str,
/,
obj: ir.Table,
*,
schema: str | None = None,
Expand All @@ -1116,6 +1128,7 @@ def create_view(
def drop_view(
self,
name: str,
/,
*,
schema: str | None = None,
database: str | None = None,
Expand Down
Loading
Loading