Skip to content

Commit

Permalink
move tests into load
Browse files Browse the repository at this point in the history
  • Loading branch information
IlyaFaer committed Mar 25, 2024
1 parent 75e894e commit 15d2d83
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 32 deletions.
33 changes: 33 additions & 0 deletions tests/load/bigquery/test_bigquery_streaming_insert.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import dlt
from dlt.destinations.impl.bigquery.bigquery_adapter import bigquery_adapter
from tests.pipeline.utils import assert_load_info


def test_bigquery_streaming_insert():
pipe = dlt.pipeline(destination="bigquery")
pack = pipe.run([{"field1": 1, "field2": 2}], table_name="test_streaming_items")

assert_load_info(pack)

with pipe.sql_client() as client:
with client.execute_query("SELECT * FROM test_streaming_items;") as cursor:
res = cursor.fetchall()
assert tuple(res[0])[:2] == (1, 2)


def test_bigquery_adapter_streaming_insert():
@dlt.resource
def test_resource():
yield {"field1": 1, "field2": 2}

bigquery_adapter(test_resource, insert_api="streaming")

pipe = dlt.pipeline(destination="bigquery")
pack = pipe.run(test_resource, table_name="test_streaming_items")

assert_load_info(pack)

with pipe.sql_client() as client:
with client.execute_query("SELECT * FROM test_streaming_items;") as cursor:
res = cursor.fetchall()
assert tuple(res[0])[:2] == (1, 2)
32 changes: 0 additions & 32 deletions tests/load/pipeline/test_bigquery.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import pytest

import dlt
from dlt.common import Decimal
from dlt.destinations.impl.bigquery.bigquery_adapter import bigquery_adapter

from tests.pipeline.utils import assert_load_info
from tests.load.pipeline.utils import destinations_configs, DestinationTestConfiguration
Expand Down Expand Up @@ -38,33 +36,3 @@ def test_bigquery_numeric_types(destination_config: DestinationTestConfiguration
row = q.fetchone()
assert row[0] == data[0]["col_big_numeric"]
assert row[1] == data[0]["col_numeric"]


def test_bigquery_streaming_insert():
pipe = dlt.pipeline(destination="bigquery")
pack = pipe.run([{"field1": 1, "field2": 2}], table_name="test_streaming_items")

assert_load_info(pack)

with pipe.sql_client() as client:
with client.execute_query("SELECT * FROM test_streaming_items;") as cursor:
res = cursor.fetchall()
assert tuple(res[0])[:2] == (1, 2)


def test_bigquery_adapter_streaming_insert():
@dlt.resource
def test_resource():
yield {"field1": 1, "field2": 2}

bigquery_adapter(test_resource, insert_api="streaming")

pipe = dlt.pipeline(destination="bigquery")
pack = pipe.run(test_resource, table_name="test_streaming_items")

assert_load_info(pack)

with pipe.sql_client() as client:
with client.execute_query("SELECT * FROM test_streaming_items;") as cursor:
res = cursor.fetchall()
assert tuple(res[0])[:2] == (1, 2)

0 comments on commit 15d2d83

Please sign in to comment.