Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add integration tests #140

Draft
wants to merge 24 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions aliases.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
lazy val avroBuild = "project avro; test; schema"
lazy val schedulerBuild = "project scheduler; test; dockerComposeTest"

addCommandAlias("checkFix", "scalafixAll --check OrganizeImports; scalafixAll --check")
addCommandAlias("runFix", "scalafixAll OrganizeImports; scalafixAll")
addCommandAlias("checkFmt", "scalafmtCheckAll; scalafmtSbtCheck")
addCommandAlias("runFmt", "scalafmtAll; scalafmtSbt")

addCommandAlias("ciBuild", s"checkFmt; checkFix; $avroBuild; $schedulerBuild;")
addCommandAlias("ciRelease", "clean; schema; project scheduler; release with-defaults")
17 changes: 14 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import Aliases._
import Release._
import DockerPublish._

Expand All @@ -8,9 +7,12 @@ ThisBuild / semanticdbVersion := scalafixSemanticdb
ThisBuild / scalafixDependencies += "com.github.liancheng" %% "organize-imports" % "0.6.0"

Global / onChangedBuildSource := ReloadOnSourceChanges
Global / excludeLintKeys ++= Set(testCasesJar, composeContainerPauseBeforeTestSeconds)

Test / testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-oF")

lazy val IntegrationTest = config("it") extend Test

val commonSettings = Seq(
organization := "com.sky",
scalaVersion := "2.13.10"
Expand All @@ -24,15 +26,25 @@ val compilerSettings = Seq(
}
)

lazy val integrationTestSettings =
Defaults.itSettings ++ inConfig(IntegrationTest)(scalafixConfigSettings(IntegrationTest)) ++ Seq(
testCasesPackageTask := (IntegrationTest / sbt.Keys.packageBin).value,
testCasesJar := (IntegrationTest / packageBin / artifactPath).value.getAbsolutePath,
dockerImageCreationTask := (Docker / publishLocal).value,
composeContainerPauseBeforeTestSeconds := 45
)

val buildInfoSettings = Seq(
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion, sbtVersion),
buildInfoPackage := "com.sky"
)

lazy val scheduler = (project in file("scheduler"))
.enablePlugins(BuildInfoPlugin, JavaAppPackaging, UniversalDeployPlugin, JavaAgent, DockerPlugin)
.enablePlugins(BuildInfoPlugin, JavaAppPackaging, UniversalDeployPlugin, JavaAgent, DockerPlugin, DockerComposePlugin)
.settings(commonSettings)
.settings(compilerSettings)
.settings(integrationTestSettings)
.configs(IntegrationTest)
.settings(
libraryDependencies ++= Dependencies.all,
addCompilerPlugin("org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full),
Expand All @@ -58,7 +70,6 @@ lazy val avro = (project in file("avro"))
lazy val root = (project in file("."))
.withId("kafka-message-scheduler")
.settings(commonSettings)
.settings(defineCommandAliases)
.settings(dockerImageCreationTask := (scheduler / Docker / publishLocal).value)
.aggregate(scheduler, avro)
.enablePlugins(DockerComposePlugin)
Expand Down
13 changes: 0 additions & 13 deletions project/Aliases.scala

This file was deleted.

35 changes: 21 additions & 14 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,32 @@ object Dependencies {
val stream = "com.typesafe.akka" %% "akka-stream" % version
val streamKafka = "com.typesafe.akka" %% "akka-stream-kafka" % "3.0.1"
val slf4j = "com.typesafe.akka" %% "akka-slf4j" % version
val testKit = "com.typesafe.akka" %% "akka-testkit" % version % Test
val streamTestKit = "com.typesafe.akka" %% "akka-stream-testkit" % version % Test
val testKit = "com.typesafe.akka" %% "akka-testkit" % version % "test,it"
val streamTestKit = "com.typesafe.akka" %% "akka-stream-testkit" % version % "test,it"
val base = Seq(actor, stream, streamKafka, slf4j)
val test = Seq(testKit, streamTestKit)
}

object Cats {
private val version = "2.7.0"
val core = "org.typelevel" %% "cats-core" % version
val testKit = "org.typelevel" %% "cats-testkit" % version % Test
val scalatest = "com.ironcorelabs" %% "cats-scalatest" % "3.1.1" % Test
val testKit = "org.typelevel" %% "cats-testkit" % version % "test,it"
val scalatest = "com.ironcorelabs" %% "cats-scalatest" % "3.1.1" % "test,it"
val base = Seq(core)
val test = Seq(testKit, scalatest)
}

object DockerJava {
private val version = "3.2.14"
val core = "com.github.docker-java" % "docker-java" % version % "test,it"
val httpClient = "com.github.docker-java" % "docker-java-transport-httpclient5" % version % "test,it"
val test = Seq(core, httpClient)
}

object Kafka {
private val version = "3.1.0"
val kafkaClients = "org.apache.kafka" % "kafka-clients" % version
val kafka = "org.apache.kafka" %% "kafka" % version % Test
val kafka = "org.apache.kafka" %% "kafka" % version % "test,it"
val base = Seq(kafkaClients)
val test = Seq(kafka)
}
Expand All @@ -50,7 +57,7 @@ object Dependencies {
private val version = "0.9.28"
val refined = "eu.timepit" %% "refined" % version
val pureconfig = "eu.timepit" %% "refined-pureconfig" % version
val scalaCheck = "eu.timepit" %% "refined-scalacheck" % version % Test
val scalaCheck = "eu.timepit" %% "refined-scalacheck" % version % "test,it"
val base = Seq(refined, pureconfig)
val test = Seq(scalaCheck)
}
Expand All @@ -64,13 +71,13 @@ object Dependencies {
val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.4.5" % Runtime
val logbackEncoder = "net.logstash.logback" % "logstash-logback-encoder" % "7.3" % Runtime

val embeddedKafka = "io.github.embeddedkafka" %% "embedded-kafka" % "3.4.0" % Test
val mockito = "org.mockito" % "mockito-core" % "5.1.1" % Test
val randomDataGenerator = "com.danielasfregola" %% "random-data-generator" % "2.9" % Test
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.17.0" % Test
val scalaCheckDatetime = "com.47deg" %% "scalacheck-toolbox-datetime" % "0.7.0" % Test
val scalaTest = "org.scalatest" %% "scalatest" % "3.2.15" % Test
val scalaTestPlusMockito = "org.scalatestplus" %% "mockito-3-12" % "3.2.10.0" % Test
val embeddedKafka = "io.github.embeddedkafka" %% "embedded-kafka" % "3.4.0" % "test,it"
val mockito = "org.mockito" % "mockito-core" % "5.1.1" % "test,it"
val randomDataGenerator = "com.danielasfregola" %% "random-data-generator" % "2.9" % "test,it"
val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.17.0" % "test,it"
val scalaCheckDatetime = "com.47deg" %% "scalacheck-toolbox-datetime" % "0.7.0" % "test,it"
val scalaTest = "org.scalatest" %% "scalatest" % "3.2.15" % "test,it"
val scalaTestPlusMockito = "org.scalatestplus" %% "mockito-3-12" % "3.2.10.0" % "test,it"

val core: Seq[ModuleID] = Akka.base ++ Cats.base ++ Kafka.base ++ Kamon.all ++ PureConfig.all ++ Refined.base ++ Seq(
avro4s,
Expand All @@ -83,7 +90,7 @@ object Dependencies {
logbackClassic,
logbackEncoder
)
val test: Seq[ModuleID] = Akka.test ++ Cats.test ++ Kafka.test ++ Refined.test ++ Seq(
val test: Seq[ModuleID] = Akka.test ++ Cats.test ++ DockerJava.test ++ Kafka.test ++ Refined.test ++ Seq(
embeddedKafka,
mockito,
randomDataGenerator,
Expand Down
47 changes: 47 additions & 0 deletions scheduler/docker/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
version: '3.9'

services:

kms:
image: skyuk/kafka-message-scheduler:latest<localBuild>
depends_on:
- kafka
- zookeeper
environment:
KAFKA_BROKERS: kafka:9092
JAVA_TOOL_OPTIONS:
-Dscheduler.reader.schedule-topics.0=scheduleTopic
-Dscheduler.reader.schedule-topics.1=extraScheduleTopic
ports:
- "9095:9095"

zookeeper:
image: confluentinc/cp-zookeeper:7.0.1
environment:
ZOOKEEPER_CLIENT_PORT: '2181'

kafka:
image: confluentinc/cp-kafka:7.0.1
ports:
- "9093:9093"
depends_on:
- zookeeper
environment:
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: '1'
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,OUTSIDE://localhost:9093
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT

# akhq:
# image: tchiotludo/akhq:0.23.0
# depends_on:
# - kafka
# ports:
# - "8080:8080"
# environment:
# AKHQ_CONFIGURATION: |
# akhq:
# connections:
# kafka:
# properties:
# bootstrap.servers: "kafka:9092"
23 changes: 23 additions & 0 deletions scheduler/src/it/scala/base/DockerBase.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package base

import com.github.dockerjava.api.DockerClient
import com.github.dockerjava.core.{DefaultDockerClientConfig, DockerClientImpl}
import com.github.dockerjava.httpclient5.ApacheDockerHttpClient

import scala.compat.java8.DurationConverters._
import scala.concurrent.duration._

trait DockerBase {

val dockerConfig: DefaultDockerClientConfig = DefaultDockerClientConfig.createDefaultConfigBuilder.build
val httpClient: ApacheDockerHttpClient = new ApacheDockerHttpClient.Builder()
.dockerHost(dockerConfig.getDockerHost)
.sslConfig(dockerConfig.getSSLConfig)
.maxConnections(100)
.connectionTimeout(30.seconds.toJava)
.responseTimeout(45.seconds.toJava)
.build()

val dockerClient: DockerClient = DockerClientImpl.getInstance(dockerConfig, httpClient)

}
32 changes: 32 additions & 0 deletions scheduler/src/it/scala/base/IntegrationBase.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package base

import com.danielasfregola.randomdatagenerator.RandomDataGenerator
import com.sky.kms.base.KafkaIntSpecBase
import io.github.embeddedkafka.EmbeddedKafkaConfig
import org.scalatest._
import org.scalatest.concurrent.{Eventually, ScalaFutures}
import org.scalatest.featurespec.FixtureAnyFeatureSpec
import org.scalatest.matchers.should.Matchers

import scala.concurrent.duration._

abstract class IntegrationBase
extends FixtureAnyFeatureSpec
with fixture.ConfigMapFixture
with BeforeAndAfterEach
with Matchers
with RandomDataGenerator
with ScalaFutures
with Eventually
with KafkaIntSpecBase
with DockerBase {

override implicit val patienceConfig: PatienceConfig = PatienceConfig(kafkaConsumerTimeout, 200.millis)

override implicit lazy val kafkaConfig: EmbeddedKafkaConfig = EmbeddedKafkaConfig(kafkaPort = 9093)

override def afterEach(): Unit = {
super.afterEach()
seekToEnd()
}
}
Loading