diff --git a/dlt/common/schema/schema.py b/dlt/common/schema/schema.py index 38f5efc0bc..04ca7f88a1 100644 --- a/dlt/common/schema/schema.py +++ b/dlt/common/schema/schema.py @@ -297,7 +297,7 @@ def apply_schema_contract( existing_table: TTableSchema = self._schema_tables.get(table_name, None) # table is new when not yet exist or - is_dlt_table = table_name.startswith("_dlt") + is_dlt_table = table_name.startswith(self._dlt_tables_prefix) should_raise = raise_on_freeze and not is_dlt_table is_new_table = not existing_table or self.is_new_table(table_name) # check case where we have a new table diff --git a/tests/pipeline/test_pipeline.py b/tests/pipeline/test_pipeline.py index aa936a7690..7a70fded1e 100644 --- a/tests/pipeline/test_pipeline.py +++ b/tests/pipeline/test_pipeline.py @@ -2280,22 +2280,19 @@ def test_pipeline_with_frozen_schema_contract() -> None: destination="duckdb", ) + # Create a database schema with table + with pipeline.sql_client() as c: + dataset = c.fully_qualified_dataset_name() + table = f"{c.fully_qualified_dataset_name()}.test_items" + conn = c.open_connection() + conn.sql(f"CREATE SCHEMA {dataset}") + conn.sql(f"CREATE TABLE {table} (id INTEGER PRIMARY KEY, name VARCHAR)") + data = [ {"id": 101, "name": "sub item 101"}, {"id": 101, "name": "sub item 102"}, ] - pipeline.run( - data, - table_name="test_items", - ) - - with pipeline.sql_client() as c: - c.execute_sql("DROP TABLE _dlt_loads") - c.execute_sql("DROP TABLE _dlt_version") - c.execute_sql("DROP TABLE _dlt_pipeline_state") - c.execute_sql("TRUNCATE TABLE test_items") - pipeline.run( data, table_name="test_items",