Skip to content

Commit

Permalink
e2e tests PostgreSql Sink
Browse files Browse the repository at this point in the history
  • Loading branch information
AnkitCLI committed Feb 7, 2025
1 parent adfb81e commit 0c1f6f0
Show file tree
Hide file tree
Showing 4 changed files with 214 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -144,3 +144,143 @@ Feature: PostgreSQL - Verify data transfer from BigQuery source to PostgreSQL si
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table

@BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @Plugin-1526
Scenario: To verify data is getting transferred from BigQuery source to PostgreSQL sink with Advanced operations update for table key
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "PostgreSQL" from the plugins list as: "Sink"
Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "datasetProject" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema"
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "PostgreSQL"
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
Then Replace input plugin property: "database" with value: "databaseName"
Then Replace input plugin property: "tableName" with value: "targetTable"
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
Then Enter input plugin property: "referenceName" with value: "targetRef"
Then Replace input plugin property: "dbSchemaName" with value: "schema"
Then Select radio button plugin property: "opeationName" with value: "UPDATE"
Then Click on the Add Button of the property: "relationTableKey" with value:
| PostgreSQLTableKey |
Then Validate "PostgreSQL" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Preview and run the pipeline
Then Verify the preview of pipeline is "success"
Then Click on preview data for PostgreSQL sink
Then Close the preview data
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table

@BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @Plugin-1526
Scenario: To verify data is getting transferred from BigQuery source to PostgreSQL sink with Advanced operations Upsert for table key
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "PostgreSQL" from the plugins list as: "Sink"
Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "datasetProject" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Verify the Output Schema matches the Expected Schema: "bqOutputMultipleDatatypesSchema"
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "PostgreSQL"
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
Then Replace input plugin property: "database" with value: "databaseName"
Then Replace input plugin property: "tableName" with value: "targetTable"
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
Then Enter input plugin property: "referenceName" with value: "targetRef"
Then Replace input plugin property: "dbSchemaName" with value: "schema"
Then Select radio button plugin property: "opeationName" with value: "UPSERT"
Then Click on the Add Button of the property: "relationTableKey" with value:
| PostgreSQLTableKey |
Then Validate "PostgreSQL" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Preview and run the pipeline
Then Verify the preview of pipeline is "success"
Then Click on preview data for PostgreSQL sink
Then Close the preview data
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table

@BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @CONNECTION @Plugin-1526
Scenario: To verify data is getting transferred from BigQuery source to PostgreSQL sink successfully using Connection
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "PostgreSQL" from the plugins list as: "Sink"
Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "project" with value: "projectId"
Then Enter input plugin property: "datasetProject" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "PostgreSQL"
And Click plugin property: "switch-useConnection"
And Click on the Browse Connections button
And Click on the Add Connection button
Then Click plugin property: "connector-PostgreSQL"
And Enter input plugin property: "name" with value: "connection.name"
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
Then Replace input plugin property: "database" with value: "databaseName"
Then Click on the Test Connection button
And Verify the test connection is successful
Then Click on the Create button
Then Select connection: "connection.name"
Then Enter input plugin property: "referenceName" with value: "targetRef"
Then Replace input plugin property: "tableName" with value: "targetTable"
Then Replace input plugin property: "dbSchemaName" with value: "schema"
Then Validate "PostgreSQL" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Preview and run the pipeline
Then Verify the preview of pipeline is "success"
Then Click on preview data for PostgreSQL sink
Then Close the preview data
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table
Original file line number Diff line number Diff line change
Expand Up @@ -136,3 +136,51 @@ Feature: PostgreSQL - Verify data transfer to PostgreSQL sink with macro argumen
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table

@BQ_SOURCE_TEST @Postgresql_Required @POSTGRESQL_TEST_TABLE @PLUGIN-1628 @Plugin-1526
Scenario: To verify data is getting transferred from BigQuery source to PostgreSQL sink using connection arguments and operations as macro
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Source"
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "PostgreSQL" from the plugins list as: "Sink"
Then Connect plugins: "BigQuery" and "PostgreSQL" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Click on the Macro button of Property: "projectId" and set the value to: "bqProjectId"
Then Click on the Macro button of Property: "datasetProjectId" and set the value to: "bqDatasetProjectId"
Then Click on the Macro button of Property: "dataset" and set the value to: "bqDataset"
Then Click on the Macro button of Property: "table" and set the value to: "bqTable"
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "PostgreSQL"
Then Select dropdown plugin property: "select-jdbcPluginName" with option value: "driverName"
Then Replace input plugin property: "host" with value: "host" for Credentials and Authorization related fields
Then Replace input plugin property: "port" with value: "port" for Credentials and Authorization related fields
Then Replace input plugin property: "user" with value: "username" for Credentials and Authorization related fields
Then Replace input plugin property: "password" with value: "password" for Credentials and Authorization related fields
Then Enter input plugin property: "referenceName" with value: "targetRef"
Then Replace input plugin property: "database" with value: "databaseName"
Then Click on the Macro button of Property: "connectionArguments" and set the value to: "PostgreSQLConnectionArguments"
Then Click on the Macro button of Property: "operationName" and set the value to: "PostgreSQLOperationName"
Then Click on the Macro button of Property: "tableName" and set the value to: "PostgreSQLTableName"
Then Click on the Macro button of Property: "dbSchemaName" and set the value to: "PostgreSQLSchemaName"
Then Validate "PostgreSQL" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Enter runtime argument value "projectId" for key "bqProjectId"
Then Enter runtime argument value "projectId" for key "bqDatasetProjectId"
Then Enter runtime argument value "dataset" for key "bqDataset"
Then Enter runtime argument value "bqSourceTable" for key "bqTable"
Then Enter runtime argument value "PostgreSQLConnectionArgumentsList" for key "PostgreSQLConnectionArgumentsList"
Then Enter runtime argument value "PostgreSQLOperationName" for key "PostgreSQLOperationName"
Then Enter runtime argument value "targetTable" for key "PostgreSQLTableName"
Then Enter runtime argument value "schema" for key "PostgreSQLSchemaName"
Then Run the Pipeline in Runtime with runtime arguments
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate the values of records transferred to target PostgreSQL table is equal to the values from source BigQuery table
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
package io.cdap.plugin.common.stepsdesign;

import com.google.cloud.bigquery.BigQueryException;
import io.cdap.e2e.pages.actions.CdfConnectionActions;
import io.cdap.e2e.pages.actions.CdfPluginPropertiesActions;
import io.cdap.e2e.utils.BigQueryClient;
import io.cdap.e2e.utils.PluginPropertyUtils;
import io.cdap.plugin.PostgresqlClient;
Expand Down Expand Up @@ -159,4 +161,25 @@ private static void createSourceBQTableWithQueries(String bqCreateTableQueryFile
PluginPropertyUtils.addPluginProp("bqSourceTable", bqSourceTable);
BeforeActions.scenario.write("BQ Source Table " + bqSourceTable + " created successfully");
}

@Before(order = 1, value = "@CONNECTION")
public static void setNewConnectionName() {
String connectionName = "PostgreSql" + RandomStringUtils.randomAlphanumeric(10);
PluginPropertyUtils.addPluginProp("connection.name", connectionName);
BeforeActions.scenario.write("New Connection name: " + connectionName);
}

private static void deleteConnection(String connectionType, String connectionName) throws IOException {
CdfConnectionActions.openWranglerConnectionsPage();
CdfConnectionActions.expandConnections(connectionType);
CdfConnectionActions.openConnectionActionMenu(connectionType, connectionName);
CdfConnectionActions.selectConnectionAction(connectionType, connectionName, "Delete");
CdfPluginPropertiesActions.clickPluginPropertyButton("Delete");
}

@After(order = 1, value = "@CONNECTION")
public static void deleteBQConnection() throws IOException {
deleteConnection("PostgreSql", "connection.name");
PluginPropertyUtils.removePluginProp("connection.name");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@ invalidBoundingQuery=SELECT MIN(id),MAX(id) FROM table
invalidBoundingQueryValue=select;
invalidTable=table
#POSTGRESQL Valid Properties
PostgreSQLConnectionArgumentsList=fetchsize=1000
PostgreSQLOperationName=INSERT
PostgreSQLTableKey=col2
connectionArgumentsList=[{"key":"queryTimeout","value":"-1"}]
connectionTimeout=150
numberOfSplits=2
Expand Down

0 comments on commit 0c1f6f0

Please sign in to comment.