Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

overwrite filter strategy #1343

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 79 additions & 0 deletions content/docs/guides/datasets/incremental.md
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,8 @@ INSERT INTO default_schema.example_incremental (timestamp, action)

If no unique key is specified, then the merge condition (`T.user_id = S.user_id` in this example) is set as `false`, causing rows to always be inserted rather than merged.



## A merge example

<div className="bp3-callout bp3-icon-info-sign" markdown="1">
Expand Down Expand Up @@ -203,6 +205,83 @@ WHEN NOT MATCHED THEN
INSERT (timestamp, user_id, action) VALUES (timestamp, user_id, action)
```

## An insert overwrite example

If your table don't have unique key and you want to keep your tasks idempotent, use insert overwrite strategy. Insert overwrite will either detect which partitions to overwrite or you can specify overwrite filter or even set it dynamically. First example will detect partitions to overwrite.

```sql
config {
type: "incremental",
strategy: "insert_overwrite",
bigquery: {
partitionBy: "DATE(timestamp)"
}
}

SELECT timestamp, action FROM weblogs.user_actions
${ when(incremental(), `WHERE DATE(timestamp) = "` + dataform.projectConfig.vars.execution_date +`"`) }
```

### Generated SQL

As with above, the SQL generated by the above example will depend on the warehouse type, but generally follow the same format.

If the dataset doesn't exist yet:

```js
CREATE OR REPLACE TABLE default_schema.example_incremental PARTITION BY DATE(timestamp) AS
SELECT timestamp, action
FROM weblogs.user_actions;
```

When incrementally processing new rows:

```js
DELETE FROM default_schema.example_incremental
WHERE DATE(timestamp) in
(
select DATE(timestamp) from (
SELECT timestamp, action
FROM weblogs.user_actions
WHERE DATE(timestamp) = "2022-06-01")
);
INSERT INTO default_schema.example_incremental (timestamp, action)
SELECT timestamp, user_action
FROM weblogs.user_actions
WHERE DATE(timestamp) = "2022-06-01";
```

### Setting overwrite filter

To simplify DELETE statement, overwrite filter can be set. Below you can find example with variable specified for specific day that is being processed.

```sql
config {
type: "incremental",
strategy: "insert_overwrite",
overwriteFilter: `${"DATE(timestamp) = \"" + dataform.projectConfig.vars.execution_date + "\""}`
bigquery: {
partitionBy: "DATE(timestamp)"
}
}

SELECT timestamp, action FROM weblogs.user_actions
${ when(incremental(), `WHERE DATE(timestamp) = "` + dataform.projectConfig.vars.execution_date +`"`) }
```

### Generated SQL

Initial model will be created similarly to previous example but incremental step is faster.

```js
DELETE FROM default_schema.example_incremental
WHERE DATE(timestamp) = "2022-06-01";
INSERT INTO default_schema.example_incremental (timestamp, action)
SELECT timestamp, user_action
FROM weblogs.user_actions
WHERE DATE(timestamp) = "2022-06-01";
```

## Daily snapshots with incremental datasets

Incremental datasets can be used to create a daily snapshot of mutable external datasets.
Expand Down
45 changes: 37 additions & 8 deletions core/adapters/bigquery.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,22 +32,35 @@ export class BigQueryAdapter extends Adapter implements IAdapter {
if (table.type === "incremental") {
if (!this.shouldWriteIncrementally(runConfig, tableMetadata)) {
tasks.add(Task.statement(this.createOrReplace(table)));
} else {

} else if (table.uniqueKey && table.uniqueKey.length > 0 && (!table.strategy || table.strategy==="merge")){
tasks.add(
Task.statement(
table.uniqueKey && table.uniqueKey.length > 0
? this.mergeInto(
this.mergeInto(
table.target,
tableMetadata?.fields.map(f => f.name),
this.where(table.incrementalQuery || table.query, table.where),
table.uniqueKey,
table.bigquery && table.bigquery.updatePartitionFilter
)
: this.insertInto(
table.target,
tableMetadata?.fields.map(f => f.name).map(column => `\`${column}\``),
this.where(table.incrementalQuery || table.query, table.where)
)
)
);
} else {
if (table.strategy==="insert_overwrite"){
if(table.overwriteFilter){
tasks.add(Task.statement(this.deleteWithStaticFilter(table.target,table.overwriteFilter)));
}else {
tasks.add(Task.statement(this.deleteDynamically(table.target,table.bigquery.partitionBy,
this.where(table.incrementalQuery || table.query, table.where))));
}
}
tasks.add(
Task.statement(
this.insertInto(
table.target,
tableMetadata?.fields.map(f => f.name).map(column => `\`${column}\``),
this.where(table.incrementalQuery || table.query, table.where)
)
)
);
}
Expand Down Expand Up @@ -114,6 +127,22 @@ export class BigQueryAdapter extends Adapter implements IAdapter {
create or replace view ${this.resolveTarget(target)} as ${query}`;
}

private deleteWithStaticFilter(
target: dataform.ITarget,
overwriteFilter: string
) {
return `delete from ${this.resolveTarget(target)} T where ${overwriteFilter}`;
}

private deleteDynamically(
target: dataform.ITarget,
partitionBy: string,
query: string
) {
return `delete from ${this.resolveTarget(target)} T where ${partitionBy} in (select ${partitionBy} from (${query}))`;
}


private mergeInto(
target: dataform.ITarget,
columns: string[],
Expand Down
45 changes: 33 additions & 12 deletions core/adapters/snowflake.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,27 +31,33 @@ export class SnowflakeAdapter extends Adapter implements IAdapter {
Task.statement(this.dropIfExists(table.target, this.oppositeTableType(baseTableType)))
);
}

if (table.type === "incremental") {
if (!this.shouldWriteIncrementally(runConfig, tableMetadata)) {
tasks.add(Task.statement(this.createOrReplace(table)));
} else {
} else if (table.uniqueKey && table.uniqueKey.length > 0 && (!table.strategy || table.strategy==="merge")){
tasks.add(
Task.statement(
table.uniqueKey && table.uniqueKey.length > 0
? this.mergeInto(
this.mergeInto(
table.target,
tableMetadata.fields.map(f => f.name),
this.where(table.incrementalQuery || table.query, table.where),
table.uniqueKey
)
: this.insertInto(
table.target,
tableMetadata.fields.map(f => f.name),
this.where(table.incrementalQuery || table.query, table.where)
)
)
);
)));
} else {
if (table.strategy==="insert_overwrite"){
if(table.overwriteFilter){
tasks.add(Task.statement(this.deleteWithStaticFilter(table.target,table.overwriteFilter)));
}else {
throw new Error("insert_overwrite requires setting overtwriteFilter.");
}
}
tasks.add(Task.statement(this.insertInto(
table.target,
tableMetadata.fields.map(f => f.name),
this.where(table.incrementalQuery || table.query, table.where)
)));


}
} else {
tasks.add(Task.statement(this.createOrReplace(table)));
Expand Down Expand Up @@ -92,6 +98,21 @@ export class SnowflakeAdapter extends Adapter implements IAdapter {
}as ${table.query}`;
}

private deleteWithStaticFilter(
target: dataform.ITarget,
overwriteFilter: string
) {
return `delete from ${this.resolveTarget(target)} T where ${overwriteFilter}`;
}

private deleteDynamically(
target: dataform.ITarget,
partitionBy: string,
query: string
) {
return `delete from ${this.resolveTarget(target)} T where ${partitionBy} in (select ${partitionBy} from (${query}))`;
}

private mergeInto(
target: dataform.ITarget,
columns: string[],
Expand Down
28 changes: 26 additions & 2 deletions core/table.ts
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,10 @@ export interface ITableConfig
* or the [Snowflake materialized view docs](https://docs.snowflake.com/en/user-guide/views-materialized.html).
*/
materialized?: boolean;

strategy?: string;

overwriteFilter?: string;
}

// TODO: This needs to be a method, I'm really not sure why, but it hits a runtime failure otherwise.
Expand All @@ -347,7 +351,9 @@ export const ITableConfigProperties = () =>
"database",
"columns",
"description",
"materialized"
"materialized",
"strategy",
"overwriteFilter"
]);

/**
Expand Down Expand Up @@ -384,7 +390,8 @@ export class Table {
"postOps",
"actionDescriptor",
"disabled",
"where"
"where",
"strategy"
]
};

Expand Down Expand Up @@ -468,6 +475,14 @@ export class Table {
this.materialized(config.materialized);
}

if(config.strategy){
this.strategy(config.strategy);
}

if(config.overwriteFilter){
this.overwriteFilter(config.overwriteFilter);
}

return this;
}

Expand Down Expand Up @@ -515,6 +530,15 @@ export class Table {
public materialized(materialized: boolean) {
this.proto.materialized = materialized;
}

public strategy(strategy: string) {
this.proto.strategy = strategy;
}

public overwriteFilter(overwriteFilter: string) {
this.proto.overwriteFilter = overwriteFilter;
}


public snowflake(snowflake: dataform.ISnowflakeOptions) {
checkExcessProperties(
Expand Down
2 changes: 2 additions & 0 deletions protos/core.proto
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,8 @@ message Table {
string where = 8 [deprecated = true];
string incremental_query = 26;
repeated string unique_key = 30;
string strategy = 36;
string overwrite_filter = 37;

// Pre/post operations.
repeated string pre_ops = 13;
Expand Down
Loading