Skip to content

Commit

Permalink
Add test-doc-generator and document all tests
Browse files Browse the repository at this point in the history
Signed-off-by: David Kornel <[email protected]>
  • Loading branch information
kornys committed Oct 3, 2024
1 parent 7a873b8 commit 0c994bf
Show file tree
Hide file tree
Showing 3 changed files with 117 additions and 0 deletions.
42 changes: 42 additions & 0 deletions docs/io/streams/e2e/flink/sql/SqlExampleST.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# SqlExampleST

**Description:** This test suite verifies that flink-sql-example works correctly

**Before tests execution steps:**

| Step | Action | Result |
| - | - | - |
| 1. | Deploy the strimzi kafka operator | Strimzi operator is deployed |
| 2. | Deploy the flink operator | Flink operator is deployed |
| 3. | Deploy the apicurio operator | Apicurio operator is deployed |
| 4. | Deploy the cert-manager operator | Cert-manager operator is deployed |

**Labels:**

* `flink-sql-example` (description file doesn't exist)
* `flink` (description file doesn't exist)

<hr style="border:1px solid">

## testFlinkSqlExample

**Description:** Test verifies that flink-sql-example recommended app https://github.com/streamshub/flink-sql-examples/tree/main/recommendation-app works

**Steps:**

| Step | Action | Result |
| - | - | - |
| 1. | Create namespace, serviceaccount and roles for flink | Resources created |
| 2. | Deploy apicurio registry | Apicurio registry is up and running |
| 3. | Deploy simple example kafka my-cluster | Kafka is up and running |
| 4. | Deploy productInventory.csv as configmap | Configmap created |
| 5. | Deploy data-generator deployment | Deployment is up and running |
| 6. | Deploy FlinkDeployment from sql-example | FlinkDeployment is up and tasks are deployed and it sends filtered data into flink.recommended.products topic |
| 7. | Deploy strimzi-kafka-client consumer as job and consume messages fromkafka topic flink.recommended.products | Consumer is deployed and it consumes messages |
| 8. | Verify that messages are present | Messages are present |

**Labels:**

* `flink-sql-example` (description file doesn't exist)
* `flink` (description file doesn't exist)

35 changes: 35 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

<test-frame.version>0.7.0</test-frame.version>
<skodjob-doc.version>0.3.0</skodjob-doc.version>
<fabric8.version>6.13.4</fabric8.version>
<log4j.version>2.24.1</log4j.version>
<slf4j.version>2.0.16</slf4j.version>
Expand Down Expand Up @@ -101,6 +102,12 @@
<artifactId>test-frame-log-collector</artifactId>
<version>${test-frame.version}</version>
</dependency>
<dependency>
<groupId>io.skodjob</groupId>
<artifactId>test-docs-generator-maven-plugin</artifactId>
<version>${skodjob-doc.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>io.skodjob</groupId>
<artifactId>test-frame-metrics-collector</artifactId>
Expand Down Expand Up @@ -359,6 +366,15 @@
</ignoredUnusedDeclaredDependencies>
</configuration>
</execution>
<execution>
<phase>generate-sources</phase>
<goals>
<goal>copy-dependencies</goal>
</goals>
<configuration>
<outputDirectory>${project.build.directory}/lib</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
Expand Down Expand Up @@ -428,6 +444,25 @@
<alwaysPreserveUnknown>true</alwaysPreserveUnknown>
</configuration>
</plugin>
<plugin>
<groupId>io.skodjob</groupId>
<artifactId>test-docs-generator-maven-plugin</artifactId>
<version>${skodjob-doc.version}</version>
<executions>
<execution>
<phase>install</phase>
<goals>
<goal>test-docs-generator</goal>
</goals>
</execution>
</executions>
<configuration>
<testsPath>${project.basedir}/src/test/java/io/streams/e2e/</testsPath>
<docsPath>${project.basedir}/docs/</docsPath>
<generateFmf>false</generateFmf>
<generateDirs>true</generateDirs>
</configuration>
</plugin>
</plugins>
</build>

Expand Down
40 changes: 40 additions & 0 deletions src/test/java/io/streams/e2e/flink/sql/SqlExampleST.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@
import io.fabric8.kubernetes.api.model.ConfigMapBuilder;
import io.fabric8.kubernetes.api.model.HasMetadata;
import io.fabric8.kubernetes.api.model.NamespaceBuilder;
import io.skodjob.annotations.Desc;
import io.skodjob.annotations.Label;
import io.skodjob.annotations.Step;
import io.skodjob.annotations.SuiteDoc;
import io.skodjob.annotations.TestDoc;
import io.skodjob.testframe.TestFrameConstants;
import io.skodjob.testframe.resources.KubeResourceManager;
import io.streams.clients.kafka.StrimziKafkaClients;
Expand Down Expand Up @@ -45,6 +50,19 @@

@Tag(FLINK)
@Tag(FLINK_SQL_EXAMPLE)
@SuiteDoc(
description = @Desc("This test suite verifies that flink-sql-example works correctly"),
beforeTestSteps = {
@Step(value = "Deploy the strimzi kafka operator", expected = "Strimzi operator is deployed"),
@Step(value = "Deploy the flink operator", expected = "Flink operator is deployed"),
@Step(value = "Deploy the apicurio operator", expected = "Apicurio operator is deployed"),
@Step(value = "Deploy the cert-manager operator", expected = "Cert-manager operator is deployed")
},
labels = {
@Label(value = FLINK_SQL_EXAMPLE),
@Label(value = FLINK),
}
)
public class SqlExampleST extends Abstract {

String namespace = "flink";
Expand All @@ -61,6 +79,28 @@ void prepareOperators() throws IOException {
FlinkManifestInstaller.install()).join();
}

@TestDoc(
description = @Desc("Test verifies that flink-sql-example recommended app " +
"https://github.com/streamshub/flink-sql-examples/tree/main/recommendation-app works"),
steps = {
@Step(value = "Create namespace, serviceaccount and roles for flink", expected = "Resources created"),
@Step(value = "Deploy apicurio registry", expected = "Apicurio registry is up and running"),
@Step(value = "Deploy simple example kafka my-cluster", expected = "Kafka is up and running"),
@Step(value = "Deploy productInventory.csv as configmap", expected = "Configmap created"),
@Step(value = "Deploy data-generator deployment", expected = "Deployment is up and running"),
@Step(value = "Deploy FlinkDeployment from sql-example",
expected = "FlinkDeployment is up and tasks are deployed and it sends filtered " +
"data into flink.recommended.products topic"),
@Step(value = "Deploy strimzi-kafka-client consumer as job and consume messages from" +
"kafka topic flink.recommended.products",
expected = "Consumer is deployed and it consumes messages"),
@Step(value = "Verify that messages are present", expected = "Messages are present"),
},
labels = {
@Label(value = FLINK_SQL_EXAMPLE),
@Label(value = FLINK),
}
)
@Test
void testFlinkSqlExample() throws IOException {
// Create namespace
Expand Down

0 comments on commit 0c994bf

Please sign in to comment.