Skip to content

Commit

Permalink
use parametrized constructor to reduce tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ion-elgreco committed Nov 20, 2023
1 parent 7c191a2 commit ff53a07
Showing 1 changed file with 21 additions and 68 deletions.
89 changes: 21 additions & 68 deletions python/tests/test_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,88 +304,41 @@ def test_write_iterator(
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


@pytest.parametrize("large_dtypes", [True, False])
@pytest.parametrize("constructor", [
lambda table: table.to_pyarrow_dataset(),
lambda table: table.to_pyarrow_table(),
lambda table: table.to_pyarrow_table().to_batches()[0]
])
def test_write_dataset(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table,
large_dtypes: bool, constructor
@pytest.mark.parametrize("large_dtypes", [True, False])
@pytest.mark.parametrize(
"constructor",
[
lambda table: table.to_pyarrow_dataset(),
lambda table: table.to_pyarrow_table(),
lambda table: table.to_pyarrow_table().to_batches()[0],
],
)
def test_write_dataset_table_recordbatch(
tmp_path: pathlib.Path,
existing_table: DeltaTable,
sample_data: pa.Table,
large_dtypes: bool,
constructor,
):
dataset = constructor(existing_table)

write_deltalake(tmp_path, dataset, mode="overwrite", large_dtypes=large_dtypes)
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


def test_write_dataset_large_types(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table
):
dataset = existing_table.to_pyarrow_dataset()

write_deltalake(tmp_path, dataset, mode="overwrite", large_dtypes=True)
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


def test_write_table(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table
):
dataset = existing_table.to_pyarrow_table()

write_deltalake(tmp_path, dataset, mode="overwrite")
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


def test_write_table_large_dtypes(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table
):
dataset = existing_table.to_pyarrow_table()

write_deltalake(tmp_path, dataset, mode="overwrite", large_dtypes=True)
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


def test_write_recordbatch(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table
):
batch = existing_table.to_pyarrow_table().to_batches()

write_deltalake(tmp_path, batch[0], mode="overwrite")
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


def test_write_recordbatch_large_dtypes(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table
):
batch = existing_table.to_pyarrow_table().to_batches()

write_deltalake(tmp_path, batch[0], mode="overwrite", large_dtypes=True)
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


@pytest.mark.parametrize("large_dtypes", [True, False])
def test_write_recordbatchreader(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table
):
batches = existing_table.to_pyarrow_dataset().to_batches()
reader = RecordBatchReader.from_batches(
existing_table.to_pyarrow_dataset().schema, batches
)

write_deltalake(tmp_path, reader, mode="overwrite")
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


def test_write_recordbatchreader_large_dtypes(
tmp_path: pathlib.Path, existing_table: DeltaTable, sample_data: pa.Table
tmp_path: pathlib.Path,
existing_table: DeltaTable,
sample_data: pa.Table,
large_dtypes: bool,
):
batches = existing_table.to_pyarrow_dataset().to_batches()
reader = RecordBatchReader.from_batches(
existing_table.to_pyarrow_dataset().schema, batches
)

write_deltalake(tmp_path, reader, mode="overwrite", large_dtypes=True)
write_deltalake(tmp_path, reader, mode="overwrite", large_dtypes=large_dtypes)
assert DeltaTable(tmp_path).to_pyarrow_table() == sample_data


Expand Down

0 comments on commit ff53a07

Please sign in to comment.