From 1130b9a8eeedc8d7a3e5f9b03e5b745ba6f70300 Mon Sep 17 00:00:00 2001 From: Marcel Coetzee Date: Mon, 15 Jan 2024 17:44:29 +0200 Subject: [PATCH] Remove Signed-off-by: Marcel Coetzee --- tests/load/bigquery/test_bigquery.py | 87 ---------------------------- 1 file changed, 87 deletions(-) delete mode 100644 tests/load/bigquery/test_bigquery.py diff --git a/tests/load/bigquery/test_bigquery.py b/tests/load/bigquery/test_bigquery.py deleted file mode 100644 index ad517888e1..0000000000 --- a/tests/load/bigquery/test_bigquery.py +++ /dev/null @@ -1,87 +0,0 @@ -from typing import Iterator, Dict - -import pytest - -import dlt -from dlt.common.pendulum import pendulum -from dlt.common.utils import uniq_id -from dlt.extract import DltResource -from tests.load.pipeline.utils import destinations_configs, DestinationTestConfiguration - - -@pytest.mark.parametrize( - "destination_config", - destinations_configs(all_staging_configs=True, subset=["bigquery"]), - ids=lambda x: x.name, -) -def test_bigquery_partition_by_date(destination_config: DestinationTestConfiguration) -> None: - pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True) - - @dlt.resource( - write_disposition="merge", - primary_key="my_date_column", - columns={"my_date_column": {"data_type": "date", "partition": True, "nullable": False}}, - ) - def demo_resource() -> Iterator[Dict[str, pendulum.Date]]: - for i in range(10): - yield { - "my_date_column": pendulum.from_timestamp(1700784000 + i * 50_000).date(), - } - - @dlt.source(max_table_nesting=0) - def demo_source() -> DltResource: - return demo_resource - - pipeline.run(demo_source()) - - -@pytest.mark.parametrize( - "destination_config", - destinations_configs(all_staging_configs=True, subset=["bigquery"]), - ids=lambda x: x.name, -) -def test_bigquery_partition_by_timestamp(destination_config: DestinationTestConfiguration) -> None: - pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True) - - @dlt.resource( - write_disposition="merge", - primary_key="my_timestamp_column", - columns={ - "my_timestamp_column": {"data_type": "timestamp", "partition": True, "nullable": False} - }, - ) - def demo_resource() -> Iterator[Dict[str, pendulum.DateTime]]: - for i in range(10): - yield { - "my_timestamp_column": pendulum.from_timestamp(1700784000 + i * 50_000), - } - - @dlt.source(max_table_nesting=0) - def demo_source() -> DltResource: - return demo_resource - - pipeline.run(demo_source()) - - -@pytest.mark.parametrize( - "destination_config", - destinations_configs(all_staging_configs=True, subset=["bigquery"]), - ids=lambda x: x.name, -) -def test_bigquery_partition_by_integer(destination_config: DestinationTestConfiguration) -> None: - pipeline = destination_config.setup_pipeline(f"bigquery_{uniq_id()}", full_refresh=True) - - @dlt.resource( - columns={"some_int": {"data_type": "bigint", "partition": True, "nullable": False}}, - ) - def demo_resource() -> Iterator[Dict[str, int]]: - for i in range(10): - yield { - "some_int": i, - } - - @dlt.source(max_table_nesting=0) - def demo_source() -> DltResource: - return demo_resource - - pipeline.run(demo_source())