Skip to content

Commit

Permalink
Fix integ tests (#280)
Browse files Browse the repository at this point in the history
* Fix integ test for GitHub actions

* Separated database in test_docs.py

* Separated database in test_snapshot.py

* Run integ tests for a database with random name

* Removed unneeded test

* Moving unique_schema, profiles_config_update, and cleanup to conftest.py

* More refactor

* No need to update proifile_config. Remove

* Now that we are using different schema for each class, need to cleanup after the test

* Update test_docs.py and test_snapshot.py as well

* Create separate conftest.py for integration tests

* Use helper method as possible

---------

Co-authored-by: Akira Ajisaka <[email protected]>
  • Loading branch information
moomindani and aajisaka authored Nov 15, 2023
1 parent 879f41a commit bbbc05a
Show file tree
Hide file tree
Showing 8 changed files with 53 additions and 109 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:

env:
TOXENV: "integration"
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
PYTEST_ADDOPTS: "-v --color=yes --csv integ_results.csv"
DBT_AWS_ACCOUNT: ${{ secrets.DBT_AWS_ACCOUNT }}
DBT_GLUE_ROLE_ARN: ${{ secrets.DBT_GLUE_ROLE_ARN }}
DBT_GLUE_REGION: ${{ secrets.DBT_GLUE_REGION }}
Expand Down Expand Up @@ -103,7 +103,7 @@ jobs:

env:
TOXENV: "integration"
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
PYTEST_ADDOPTS: "-v --color=yes --csv integ_results.csv -s"
DBT_AWS_ACCOUNT: ${{ secrets.DBT_AWS_ACCOUNT }}
DBT_GLUE_ROLE_ARN: ${{ secrets.DBT_GLUE_ROLE_ARN }}
DBT_GLUE_REGION: ${{ secrets.DBT_GLUE_REGION }}
Expand Down
1 change: 1 addition & 0 deletions dbt/adapters/glue/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ def get_response(cls, cursor) -> AdapterResponse:

@classmethod
def get_result_from_cursor(cls, cursor: GlueCursor, limit: Optional[int]) -> agate.Table:
logger.debug("get_result_from_cursor called")
data: List[Any] = []
column_names: List[str] = []
if cursor.description is not None:
Expand Down
6 changes: 3 additions & 3 deletions dbt/adapters/glue/gluedbapi/cursor.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,12 +207,12 @@ def __next__(self):
return item

def description(self):
logger.debug("GlueCursor get_columns_in_relation called")
logger.debug("GlueCursor description called")
if self.response:
return [[c["name"], c["type"]] for c in self.response.get("description", [])]

def get_response(self) -> AdapterResponse:
logger.debug("GlueCursor get_columns_in_relation called")
logger.debug("GlueCursor get_response called")
if self.statement:
r = self.statement._get_statement()
return AdapterResponse(
Expand All @@ -222,7 +222,7 @@ def get_response(self) -> AdapterResponse:
)

def close(self):
logger.debug("GlueCursor get_columns_in_relation called")
logger.debug("GlueCursor close called")
if self._closed:
raise Exception("CursorAlreadyClosed")
self._closed = True
Expand Down
1 change: 0 additions & 1 deletion dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ flake8
pytz
tox>=3.2.0
ipdb
pytest-xdist
pytest-dotenv
pytest-csv
flaky
Expand Down
67 changes: 0 additions & 67 deletions tests/functional/adapter/test_basic.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import os

import pytest
from dbt.tests.adapter.basic.files import (base_ephemeral_sql, base_table_sql,
base_view_sql, ephemeral_table_sql,
Expand All @@ -16,11 +15,7 @@
from dbt.tests.adapter.basic.test_validate_connection import BaseValidateConnection
from dbt.tests.util import (check_relations_equal, check_result_nodes_by_name,
get_manifest, relation_from_name, run_dbt)
from tests.util import get_s3_location, get_region, cleanup_s3_location

s3bucket = get_s3_location()
region = get_region()
schema_name = "dbt_functional_test_01"

# override schema_base_yml to set missing database
schema_base_yml = """
Expand Down Expand Up @@ -52,19 +47,10 @@
"before the models are actually run. Not sure how this test is intended to work."
)
class TestBaseCachingGlue(BaseAdapterMethod):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

pass


class TestSimpleMaterializationsGlue(BaseSimpleMaterializations):
# all tests within this test has the same schema
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

@pytest.fixture(scope="class")
def project_config_update(self):
return {
Expand All @@ -83,36 +69,18 @@ def models(self):
"schema.yml": schema_base_yml,
}

@pytest.fixture(scope='class', autouse=True)
def cleanup(self):
cleanup_s3_location(s3bucket + schema_name, region)
yield

pass


class TestSingularTestsGlue(BaseSingularTests):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

pass


class TestEmptyGlue(BaseEmpty):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

pass


class TestEphemeralGlue(BaseEphemeral):
# all tests within this test has the same schema
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

@pytest.fixture(scope="class")
def models(self):
return {
Expand All @@ -122,11 +90,6 @@ def models(self):
"schema.yml": schema_base_yml,
}

@pytest.fixture(scope='class', autouse=True)
def cleanup(self):
cleanup_s3_location(s3bucket + schema_name, region)
yield

# test_ephemeral with refresh table
def test_ephemeral(self, project):
# seed command
Expand Down Expand Up @@ -169,19 +132,10 @@ def test_ephemeral(self, project):


class TestSingularTestsEphemeralGlue(BaseSingularTestsEphemeral):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

pass


class TestIncrementalGlue(BaseIncremental):
@pytest.fixture(scope='class', autouse=True)
def cleanup(self):
cleanup_s3_location(s3bucket + schema_name, region)
yield

@pytest.fixture(scope="class")
def models(self):
model_incremental = """
Expand All @@ -190,10 +144,6 @@ def models(self):

return {"incremental.sql": model_incremental, "schema.yml": schema_base_yml}

@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

# test_incremental with refresh table
def test_incremental(self, project):
# seed command
Expand Down Expand Up @@ -239,15 +189,6 @@ def test_incremental(self, project):


class TestGenericTestsGlue(BaseGenericTests):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

@pytest.fixture(scope='class', autouse=True)
def cleanup(self):
cleanup_s3_location(s3bucket + schema_name, region)
yield

def test_generic_tests(self, project):
# seed command
results = run_dbt(["seed"])
Expand All @@ -273,17 +214,9 @@ def test_generic_tests(self, project):


class TestTableMatGlue(BaseTableMaterialization):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

pass


class TestValidateConnectionGlue(BaseValidateConnection):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

pass

13 changes: 0 additions & 13 deletions tests/functional/adapter/test_docs.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,9 @@
import os
import pytest
from dbt.tests.util import get_artifact
from dbt.tests.adapter.basic.test_docs_generate import BaseDocsGenerate, BaseDocsGenReferences
from dbt.tests.adapter.basic.expected_catalog import no_stats

schema_name = "dbt_functional_test_01"


class TestDocsGenerate(BaseDocsGenerate):
# all tests within this test has the same schema
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

@pytest.fixture(scope="class")
def expected_catalog(self, project, profile_user):
role = None
Expand Down Expand Up @@ -109,10 +100,6 @@ def case(x):


class TestDocsGenReferencesGlue(BaseDocsGenReferences):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

@pytest.fixture(scope="class")
def expected_catalog(self, project, profile_user):
role = None
Expand Down
23 changes: 0 additions & 23 deletions tests/functional/adapter/test_snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,6 @@
from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols
from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp
from dbt.tests.util import run_dbt, relation_from_name
from tests.util import get_s3_location, get_region, cleanup_s3_location

s3bucket = get_s3_location()
region = get_region()
schema_name = "dbt_functional_test_01"


def check_relation_rows(project, snapshot_name, count):
Expand All @@ -18,15 +13,6 @@ def check_relation_rows(project, snapshot_name, count):


class TestSnapshotCheckColsGlue(BaseSnapshotCheckCols):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

@pytest.fixture(scope='class', autouse=True)
def cleanup(self):
cleanup_s3_location(s3bucket + schema_name, region)
yield

@pytest.fixture(scope="class")
def project_config_update(self):
return {
Expand Down Expand Up @@ -79,15 +65,6 @@ def test_snapshot_check_cols(self, project):


class TestSnapshotTimestampGlue(BaseSnapshotTimestamp):
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
return schema_name

@pytest.fixture(scope='class', autouse=True)
def cleanup(self):
cleanup_s3_location(s3bucket + schema_name, region)
yield

@pytest.fixture(scope="class")
def project_config_update(self):
return {
Expand Down
47 changes: 47 additions & 0 deletions tests/functional/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import pytest
import os
import random
import string
from tests.util import get_s3_location, get_region, cleanup_s3_location

s3bucket = get_s3_location()
region = get_region()

# Import the standard functional fixtures as a plugin
# Note: fixtures with session scope need to be local
pytest_plugins = ["dbt.tests.fixtures.project"]

# Use different datatabase for each test class
@pytest.fixture(scope="class")
def unique_schema(request, prefix) -> str:
database_suffix = ''.join(random.choices(string.digits, k=4))
return f"dbt_functional_test_{database_suffix}"


# The profile dictionary, used to write out profiles.yml
# dbt will supply a unique schema per test, so we do not specify 'schema' here
@pytest.fixture(scope="class")
def dbt_profile_target(unique_schema):
return {
'type': 'glue',
'query-comment': 'test-glue-adapter',
'role_arn': os.getenv('DBT_GLUE_ROLE_ARN'),
'user': os.getenv('DBT_GLUE_ROLE_ARN'),
'region': get_region(),
'workers': 2,
'worker_type': 'G.1X',
'schema': unique_schema,
'database': unique_schema,
'session_provisioning_timeout_in_seconds': 300,
'location': get_s3_location(),
'datalake_formats': 'delta',
'conf': "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension --conf spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog --conf spark.sql.legacy.allowNonEmptyLocationInCTAS=true",
'glue_session_reuse': True
}


@pytest.fixture(scope='class', autouse=True)
def cleanup(unique_schema):
cleanup_s3_location(s3bucket + unique_schema, region)
yield
cleanup_s3_location(s3bucket + unique_schema, region)

0 comments on commit bbbc05a

Please sign in to comment.