Skip to content

Commit

Permalink
fixes tests, adds docs
Browse files Browse the repository at this point in the history
  • Loading branch information
rudolfix committed Dec 17, 2024
1 parent 1e26655 commit 9506938
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 13 deletions.
5 changes: 1 addition & 4 deletions dlt/destinations/impl/bigquery/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,10 +401,7 @@ def _get_info_schema_columns_query(
return query, folded_table_names

def _get_column_def_sql(self, column: TColumnSchema, table: PreparedTableSchema = None) -> str:
name = self.sql_client.escape_column_name(column["name"])
column_def_sql = (
f"{name} {self.type_mapper.to_destination_type(column, table)} {self._gen_not_null(column.get('nullable', True))}"
)
column_def_sql = super()._get_column_def_sql(column, table)
if column.get(ROUND_HALF_EVEN_HINT, False):
column_def_sql += " OPTIONS (rounding_mode='ROUND_HALF_EVEN')"
if column.get(ROUND_HALF_AWAY_FROM_ZERO_HINT, False):
Expand Down
5 changes: 2 additions & 3 deletions dlt/destinations/impl/clickhouse/clickhouse.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,11 +292,10 @@ def _get_table_update_sql(

return sql

@staticmethod
def _gen_not_null(v: bool) -> str:
def _gen_not_null(self, v: bool) -> str:
# ClickHouse fields are not nullable by default.
# We use the `Nullable` modifier instead of NULL / NOT NULL modifiers to cater for ALTER statement.
pass
return ""

def _from_db_type(
self, ch_t: str, precision: Optional[int], scale: Optional[int]
Expand Down
3 changes: 1 addition & 2 deletions dlt/destinations/job_client_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -635,8 +635,7 @@ def _get_column_hints_sql(self, c: TColumnSchema) -> str:
if c.get(h, False) is True # use ColumnPropInfos to get default value
)

@staticmethod
def _gen_not_null(nullable: bool) -> str:
def _gen_not_null(self, nullable: bool) -> str:
return "NOT NULL" if not nullable else ""

def _create_table_update(
Expand Down
9 changes: 9 additions & 0 deletions docs/website/docs/dlt-ecosystem/destinations/snowflake.md
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,12 @@ Note that we ignore missing columns `ERROR_ON_COLUMN_COUNT_MISMATCH = FALSE` and
## Supported column hints
Snowflake supports the following [column hints](../../general-usage/schema#tables-and-columns):
* `cluster` - Creates a cluster column(s). Many columns per table are supported and only when a new table is created.
* `unique` - Creates UNIQUE hint on a Snowflake column, can be added to many columns. ([optional](#additional-destination-options))
* `primary_key` - Creates PRIMARY KEY on selected column(s), may be compound. ([optional](#additional-destination-options))

`unique` and `primary_key` are not enforced and `dlt` does not instruct Snowflake to `RELY` on them when
query planning.


## Table and column identifiers
Snowflake supports both case-sensitive and case-insensitive identifiers. All unquoted and uppercase identifiers resolve case-insensitively in SQL statements. Case-insensitive [naming conventions](../../general-usage/naming-convention.md#case-sensitive-and-insensitive-destinations) like the default **snake_case** will generate case-insensitive identifiers. Case-sensitive (like **sql_cs_v1**) will generate
Expand Down Expand Up @@ -308,13 +314,16 @@ pipeline = dlt.pipeline(
## Additional destination options

You can define your own stage to PUT files and disable the removal of the staged files after loading.
You can also opt-in to [create indexes](#supported-column-hints).

```toml
[destination.snowflake]
# Use an existing named stage instead of the default. Default uses the implicit table stage per table
stage_name="DLT_STAGE"
# Whether to keep or delete the staged files after COPY INTO succeeds
keep_staged_files=true
# Add UNIQUE and PRIMARY KEY hints to tables
create_indexes=true
```

### Setting up CSV format
Expand Down
8 changes: 4 additions & 4 deletions tests/load/dremio/test_dremio_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,12 +48,12 @@ def test_dremio_factory() -> None:
[
TColumnSchema(name="foo", data_type="text", partition=True),
TColumnSchema(name="bar", data_type="bigint", sort=True),
TColumnSchema(name="baz", data_type="double"),
TColumnSchema(name="baz", data_type="double", nullable=False),
],
False,
[
'CREATE TABLE "test_database"."test_dataset"."event_test_table"'
' (\n"foo" VARCHAR ,\n"bar" BIGINT ,\n"baz" DOUBLE )\nPARTITION BY'
' (\n"foo" VARCHAR ,\n"bar" BIGINT ,\n"baz" DOUBLE NOT NULL)\nPARTITION BY'
' ("foo")\nLOCALSORT BY ("bar")'
],
),
Expand All @@ -66,7 +66,7 @@ def test_dremio_factory() -> None:
False,
[
'CREATE TABLE "test_database"."test_dataset"."event_test_table"'
' (\n"foo" VARCHAR ,\n"bar" BIGINT ,\n"baz" DOUBLE )\nPARTITION BY'
' (\n"foo" VARCHAR ,\n"bar" BIGINT ,\n"baz" DOUBLE )\nPARTITION BY'
' ("foo","bar")'
],
),
Expand All @@ -79,7 +79,7 @@ def test_dremio_factory() -> None:
False,
[
'CREATE TABLE "test_database"."test_dataset"."event_test_table"'
' (\n"foo" VARCHAR ,\n"bar" BIGINT ,\n"baz" DOUBLE )'
' (\n"foo" VARCHAR ,\n"bar" BIGINT ,\n"baz" DOUBLE )'
],
),
],
Expand Down

0 comments on commit 9506938

Please sign in to comment.