diff --git a/build.sbt b/build.sbt
index a2feb69e6b..6f2b8301ee 100644
--- a/build.sbt
+++ b/build.sbt
@@ -1,5 +1,6 @@
import Dependencies.globalExcludeDeps
import Dependencies.gson
+import Dependencies.bouncyCastle
import Settings._
import sbt.Keys.libraryDependencies
@@ -16,8 +17,8 @@ lazy val subProjects: Seq[Project] = Seq(
`aws-s3`,
`azure-documentdb`,
cassandra,
- //elastic6,
- //elastic7,
+ `elastic-common`,
+ opensearch,
elastic8,
ftp,
hazelcast,
@@ -135,9 +136,32 @@ lazy val cassandra = (project in file("kafka-connect-cassandra"))
.configureFunctionalTests()
.enablePlugins(PackPlugin)
-lazy val elastic8 = (project in file("kafka-connect-elastic8"))
+lazy val `elastic-common` = (project in file("kafka-connect-elastic-common"))
.dependsOn(common)
.dependsOn(`test-common` % "fun->compile")
+ .settings(
+ settings ++
+ Seq(
+ name := "kafka-connect-elastic-common",
+ description := "Kafka Connect compatible connectors to move data between Kafka and popular data stores",
+ libraryDependencies ++= baseDeps ++ kafkaConnectElasticBaseDeps,
+ publish / skip := true,
+ packExcludeJars := Seq(
+ "scala-.*\\.jar",
+ "zookeeper-.*\\.jar",
+ ),
+ ),
+ )
+ .configureAssembly()
+ .configureTests(baseTestDeps)
+ .configureIntegrationTests(kafkaConnectElastic8TestDeps)
+ .configureFunctionalTests()
+ .disablePlugins(PackPlugin)
+
+lazy val elastic8 = (project in file("kafka-connect-elastic8"))
+ .dependsOn(common)
+ .dependsOn(`elastic-common`)
+ .dependsOn(`test-common` % "fun->compile;it->compile")
.settings(
settings ++
Seq(
@@ -157,6 +181,29 @@ lazy val elastic8 = (project in file("kafka-connect-elastic8"))
.configureFunctionalTests()
.enablePlugins(PackPlugin)
+lazy val opensearch = (project in file("kafka-connect-opensearch"))
+ .dependsOn(common)
+ .dependsOn(`elastic-common`)
+ .dependsOn(`test-common` % "fun->compile;it->compile")
+ .settings(
+ settings ++
+ Seq(
+ name := "kafka-connect-opensearch",
+ description := "Kafka Connect compatible connectors to move data between Kafka and popular data stores",
+ libraryDependencies ++= baseDeps ++ kafkaConnectOpenSearchDeps,
+ publish / skip := true,
+ packExcludeJars := Seq(
+ "scala-.*\\.jar",
+ "zookeeper-.*\\.jar",
+ ),
+ ),
+ )
+ .configureAssembly()
+ .configureTests(baseTestDeps)
+ //.configureIntegrationTests(kafkaConnectOpenSearchTestDeps)
+ .configureFunctionalTests(bouncyCastle)
+ .enablePlugins(PackPlugin)
+
lazy val hazelcast = (project in file("kafka-connect-hazelcast"))
.dependsOn(common)
.settings(
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskId.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskId.scala
index 1284ebd377..c4818cf034 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskId.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskId.scala
@@ -20,8 +20,6 @@ import cats.implicits.toBifunctorOps
import io.lenses.streamreactor.connect.aws.s3.config.S3ConfigSettings.TASK_INDEX
import io.lenses.streamreactor.connect.aws.s3.source.distribution.PartitionHasher
-import java.util
-
case class ConnectorTaskId(name: String, maxTasks: Int, taskNo: Int) {
def ownsDir(dirPath: String): Boolean =
if (maxTasks == 1) true
@@ -29,9 +27,9 @@ case class ConnectorTaskId(name: String, maxTasks: Int, taskNo: Int) {
}
object ConnectorTaskId {
- def fromProps(props: util.Map[String, String]): Either[Throwable, ConnectorTaskId] = {
+ def fromProps(props: Map[String, String]): Either[Throwable, ConnectorTaskId] = {
for {
- taskIndexString <- Option(props.get(TASK_INDEX)).toRight(s"Missing $TASK_INDEX")
+ taskIndexString <- props.get(TASK_INDEX).toRight(s"Missing $TASK_INDEX")
taskIndex = taskIndexString.split(":")
_ <- if (taskIndex.size != 2) Left(s"Invalid $TASK_INDEX. Expecting TaskNumber:MaxTask format.") else Right(())
maxTasks <- taskIndex(1).toIntOption.toRight(
@@ -44,7 +42,7 @@ object ConnectorTaskId {
)
_ <- if (taskNumber < 0) Left(s"Invalid $TASK_INDEX. Expecting a positive integer but found:${taskIndex(0)}")
else Right(())
- maybeTaskName <- Option(props.get("name")).filter(_.trim.nonEmpty).toRight("Missing connector name")
+ maybeTaskName <- props.get("name").filter(_.trim.nonEmpty).toRight("Missing connector name")
} yield ConnectorTaskId(maybeTaskName, maxTasks, taskNumber)
}.leftMap(new IllegalArgumentException(_))
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/S3SinkTask.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/S3SinkTask.scala
index 33e3853f64..c8383f4b77 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/S3SinkTask.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/S3SinkTask.scala
@@ -57,7 +57,7 @@ class S3SinkTask extends SinkTask with ErrorHandler {
printAsciiHeader(manifest, "/aws-s3-sink-ascii.txt")
- ConnectorTaskId.fromProps(fallbackProps) match {
+ ConnectorTaskId.fromProps(fallbackProps.asScala.toMap) match {
case Left(value) => throw new IllegalArgumentException(value)
case Right(value) => connectorTaskId = value
}
@@ -67,7 +67,7 @@ class S3SinkTask extends SinkTask with ErrorHandler {
val contextProps = Option(context).flatMap(c => Option(c.configs())).map(_.asScala.toMap).getOrElse(Map.empty)
val props = MapUtils.mergeProps(contextProps, fallbackProps.asScala.toMap).asJava
val errOrWriterMan = for {
- config <- S3SinkConfig.fromProps(props)
+ config <- S3SinkConfig.fromProps(props.asScala.toMap)
s3Client <- AwsS3ClientCreator.make(config.s3Config)
storageInterface = new AwsS3StorageInterface(connectorTaskId, s3Client, config.batchDelete)
_ <- Try(setErrorRetryInterval(config.s3Config)).toEither
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfig.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfig.scala
index 8725880611..985611628e 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfig.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfig.scala
@@ -17,10 +17,6 @@ package io.lenses.streamreactor.connect.aws.s3.sink.config
import cats.syntax.all._
import com.datamountaineer.kcql.Kcql
import com.typesafe.scalalogging.LazyLogging
-import io.lenses.streamreactor.connect.aws.s3.config.ConnectorTaskId
-import io.lenses.streamreactor.connect.aws.s3.config.DataStorageSettings
-import io.lenses.streamreactor.connect.aws.s3.config.FormatSelection
-import io.lenses.streamreactor.connect.aws.s3.config.S3Config
import io.lenses.streamreactor.connect.aws.s3.config.S3ConfigSettings.SEEK_MAX_INDEX_FILES
import io.lenses.streamreactor.connect.aws.s3.config._
import io.lenses.streamreactor.connect.aws.s3.model.CompressionCodec
@@ -30,18 +26,17 @@ import io.lenses.streamreactor.connect.aws.s3.sink.commit.Count
import io.lenses.streamreactor.connect.aws.s3.sink.config.kcqlprops.S3SinkProps
import io.lenses.streamreactor.connect.aws.s3.sink.config.kcqlprops.S3SinkPropsSchema
import io.lenses.streamreactor.connect.aws.s3.sink.config.padding.PaddingService
-import io.lenses.streamreactor.connect.aws.s3.sink.naming.OffsetS3FileNamer
import io.lenses.streamreactor.connect.aws.s3.sink.naming.KeyNamer
-import io.lenses.streamreactor.connect.aws.s3.sink.naming.TopicPartitionOffsetS3FileNamer
+import io.lenses.streamreactor.connect.aws.s3.sink.naming.OffsetS3FileNamer
import io.lenses.streamreactor.connect.aws.s3.sink.naming.S3KeyNamer
+import io.lenses.streamreactor.connect.aws.s3.sink.naming.TopicPartitionOffsetS3FileNamer
-import java.util
import scala.jdk.CollectionConverters._
object S3SinkConfig {
def fromProps(
- props: util.Map[String, String],
+ props: Map[String, String],
)(
implicit
connectorTaskId: ConnectorTaskId,
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDef.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDef.scala
index fd63c41a21..43e99f7a31 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDef.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDef.scala
@@ -111,7 +111,7 @@ class S3SinkConfigDef() extends ConfigDef with LazyLogging {
}
-case class S3SinkConfigDefBuilder(props: util.Map[String, String])
+case class S3SinkConfigDefBuilder(props: Map[String, String])
extends BaseConfig(S3ConfigSettings.CONNECTOR_PREFIX, S3SinkConfigDef.config, props)
with KcqlSettings
with ErrorPolicySettings
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTask.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTask.scala
index bd534c3d31..b7e90aff1c 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTask.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/S3SourceTask.scala
@@ -62,7 +62,7 @@ class S3SourceTask extends SourceTask with LazyLogging {
val contextProperties = Option(context).flatMap(c => Option(c.configs()).map(_.asScala.toMap)).getOrElse(Map.empty)
val mergedProperties = MapUtils.mergeProps(contextProperties, props.asScala.toMap).asJava
(for {
- result <- S3SourceState.make(mergedProperties, contextOffsetFn)
+ result <- S3SourceState.make(mergedProperties.asScala.toMap, contextOffsetFn)
fiber <- result.partitionDiscoveryLoop.start
} yield {
s3SourceTaskState = result.state.some
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfig.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfig.scala
index 69b9dbf863..e52021eec6 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfig.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfig.scala
@@ -33,14 +33,13 @@ import io.lenses.streamreactor.connect.aws.s3.storage.FileMetadata
import io.lenses.streamreactor.connect.aws.s3.storage.ListResponse
import io.lenses.streamreactor.connect.aws.s3.storage.StorageInterface
-import java.util
import scala.jdk.CollectionConverters.MapHasAsScala
import scala.util.Try
object S3SourceConfig {
def fromProps(
- props: util.Map[String, String],
+ props: Map[String, String],
): Either[Throwable, S3SourceConfig] =
S3SourceConfig(S3SourceConfigDefBuilder(props))
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigDef.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigDef.scala
index 37cfac8ac6..6e3a54f0eb 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigDef.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigDef.scala
@@ -141,7 +141,7 @@ class S3SourceConfigDef() extends ConfigDef with LazyLogging {
}
-case class S3SourceConfigDefBuilder(props: util.Map[String, String])
+case class S3SourceConfigDefBuilder(props: Map[String, String])
extends BaseConfig(S3ConfigSettings.CONNECTOR_PREFIX, S3SourceConfigDef.config, props)
with KcqlSettings
with ErrorPolicySettings
diff --git a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/state/S3SourceBuilder.scala b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/state/S3SourceBuilder.scala
index 406cfe188c..aacbb64059 100644
--- a/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/state/S3SourceBuilder.scala
+++ b/kafka-connect-aws-s3/src/main/scala/io/lenses/streamreactor/connect/aws/s3/source/state/S3SourceBuilder.scala
@@ -28,12 +28,11 @@ import io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManager
import io.lenses.streamreactor.connect.aws.s3.source.reader.ReaderManagerState
import io.lenses.streamreactor.connect.aws.s3.storage.AwsS3StorageInterface
-import java.util
import scala.jdk.CollectionConverters.IteratorHasAsScala
object S3SourceState extends StrictLogging {
def make(
- props: util.Map[String, String],
+ props: Map[String, String],
contextOffsetFn: S3Location => Option[S3Location],
): IO[BuilderResult] =
for {
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskIdTest.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskIdTest.scala
index 45bd32eb70..a52e87d16b 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskIdTest.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/ConnectorTaskIdTest.scala
@@ -21,17 +21,16 @@ import io.lenses.streamreactor.connect.aws.s3.source.distribution.PartitionHashe
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters._
class ConnectorTaskIdTest extends AnyWordSpec with Matchers {
private val connectorName = "connectorName"
"ConnectorTaskId" should {
"create the instance" in {
val from = Map("a" -> "1", "b" -> "2", S3ConfigSettings.TASK_INDEX -> "0:2", "name" -> connectorName)
- ConnectorTaskId.fromProps(from.asJava) shouldBe ConnectorTaskId(connectorName, 2, 0).asRight[String]
+ ConnectorTaskId.fromProps(from) shouldBe ConnectorTaskId(connectorName, 2, 0).asRight[String]
}
"fail if max tasks is not valid integer" in {
val from = Map("a" -> "1", "b" -> "2", S3ConfigSettings.TASK_INDEX -> "0:2a", "name" -> connectorName)
- val actual = ConnectorTaskId.fromProps(from.asJava)
+ val actual = ConnectorTaskId.fromProps(from)
actual match {
case Left(e) => e.getMessage shouldBe s"Invalid $TASK_INDEX. Expecting an integer but found:2a"
case Right(_) => fail("Should have failed")
@@ -39,14 +38,14 @@ class ConnectorTaskIdTest extends AnyWordSpec with Matchers {
}
"fail if task number is not a valid integer" in {
val from = Map("a" -> "1", "b" -> "2", S3ConfigSettings.TASK_INDEX -> "0a:2", "name" -> connectorName)
- ConnectorTaskId.fromProps(from.asJava) match {
+ ConnectorTaskId.fromProps(from) match {
case Left(value) => value.getMessage shouldBe s"Invalid $TASK_INDEX. Expecting an integer but found:0a"
case Right(_) => fail("Should have failed")
}
}
"fail if task number < 0" in {
val from = Map("a" -> "1", "b" -> "2", S3ConfigSettings.TASK_INDEX -> "-1:2", "name" -> connectorName)
- ConnectorTaskId.fromProps(from.asJava) match {
+ ConnectorTaskId.fromProps(from) match {
case Left(value) => value.getMessage shouldBe s"Invalid $TASK_INDEX. Expecting a positive integer but found:-1"
case Right(value) => fail(s"Should have failed but got $value")
}
@@ -54,14 +53,14 @@ class ConnectorTaskIdTest extends AnyWordSpec with Matchers {
}
"fail if max tasks is zero" in {
val from = Map("a" -> "1", "b" -> "2", S3ConfigSettings.TASK_INDEX -> "0:0", "name" -> connectorName)
- ConnectorTaskId.fromProps(from.asJava) match {
+ ConnectorTaskId.fromProps(from) match {
case Left(value) => value.getMessage shouldBe s"Invalid $TASK_INDEX. Expecting a positive integer but found:0"
case Right(value) => fail(s"Should have failed but got $value")
}
}
"fail if max tasks is negative" in {
val from = Map("a" -> "1", "b" -> "2", S3ConfigSettings.TASK_INDEX -> "0:-1", "name" -> connectorName)
- ConnectorTaskId.fromProps(from.asJava) match {
+ ConnectorTaskId.fromProps(from) match {
case Left(value) => value.getMessage shouldBe s"Invalid $TASK_INDEX. Expecting a positive integer but found:-1"
case Right(value) => fail(s"Should have failed but got $value")
}
@@ -69,7 +68,7 @@ class ConnectorTaskIdTest extends AnyWordSpec with Matchers {
"own the partitions when max task is 1" in {
val from = Map("a" -> "1", "b" -> "2", S3ConfigSettings.TASK_INDEX -> "0:1", "name" -> connectorName)
- val actual = ConnectorTaskId.fromProps(from.asJava).getOrElse(fail("Should be valid"))
+ val actual = ConnectorTaskId.fromProps(from).getOrElse(fail("Should be valid"))
Seq("/myTopic/", "/anotherTopic/", "/thirdTopic/")
.flatMap { value =>
@@ -86,12 +85,12 @@ class ConnectorTaskIdTest extends AnyWordSpec with Matchers {
"b" -> "2",
S3ConfigSettings.TASK_INDEX -> "0:2",
"name" -> connectorName,
- ).asJava).getOrElse(fail("Should be valid"))
+ )).getOrElse(fail("Should be valid"))
val two = ConnectorTaskId.fromProps(Map("a" -> "1",
"b" -> "2",
S3ConfigSettings.TASK_INDEX -> "1:2",
"name" -> connectorName,
- ).asJava).getOrElse(fail("Should be valid"))
+ )).getOrElse(fail("Should be valid"))
PartitionHasher.hash(2, "1") shouldBe 1
one.ownsDir("1") shouldBe false
@@ -108,17 +107,17 @@ class ConnectorTaskIdTest extends AnyWordSpec with Matchers {
"b" -> "2",
S3ConfigSettings.TASK_INDEX -> "0:3",
"name" -> connectorName,
- ).asJava).getOrElse(fail("Should be valid"))
+ )).getOrElse(fail("Should be valid"))
val two = ConnectorTaskId.fromProps(Map("a" -> "1",
"b" -> "2",
S3ConfigSettings.TASK_INDEX -> "1:3",
"name" -> connectorName,
- ).asJava).getOrElse(fail("Should be valid"))
+ )).getOrElse(fail("Should be valid"))
val three = ConnectorTaskId.fromProps(Map("a" -> "1",
"b" -> "2",
S3ConfigSettings.TASK_INDEX -> "2:3",
"name" -> connectorName,
- ).asJava).getOrElse(fail("Should be valid"))
+ )).getOrElse(fail("Should be valid"))
PartitionHasher.hash(3, "1") shouldBe 1
one.ownsDir("1") shouldBe false
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3SourceConfigTest.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3SourceConfigTest.scala
index ea6e333d2f..b3e036a9c2 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3SourceConfigTest.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/config/S3SourceConfigTest.scala
@@ -28,8 +28,6 @@ import io.lenses.streamreactor.connect.aws.s3.source.config.S3SourceConfigDefBui
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class S3SourceConfigTest extends AnyFunSuite with Matchers {
private val Identity: String = "identity"
private val Credential: String = "credential"
@@ -62,7 +60,7 @@ class S3SourceConfigTest extends AnyFunSuite with Matchers {
"connect.s3.partition.search.recurse.levels" -> "0",
)
- S3SourceConfig(S3SourceConfigDefBuilder(props.asJava)) match {
+ S3SourceConfig(S3SourceConfigDefBuilder(props)) match {
case Left(value) => fail(value.toString)
case Right(config) =>
config.bucketOptions.size shouldBe 3
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/DeleteModeSettingsTest.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/DeleteModeSettingsTest.scala
index 118c65fb6e..68903650b1 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/DeleteModeSettingsTest.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/DeleteModeSettingsTest.scala
@@ -20,8 +20,6 @@ import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks._
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class DeleteModeSettingsTest extends AnyFlatSpec with Matchers with LazyLogging {
private val deleteModeMap = Table[String, String, Boolean](
("testName", "value", "expected"),
@@ -36,7 +34,7 @@ class DeleteModeSettingsTest extends AnyFlatSpec with Matchers with LazyLogging
S3SinkConfigDefBuilder(Map(
"connect.s3.kcql" -> "abc",
"connect.s3.delete.mode" -> value,
- ).asJava).batchDelete() should be(expected)
+ )).batchDelete() should be(expected)
}
}
}
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDefBuilderTest.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDefBuilderTest.scala
index a6af26ad21..38b47fe6e7 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDefBuilderTest.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigDefBuilderTest.scala
@@ -27,7 +27,6 @@ import org.scalatest.matchers.should.Matchers
import scala.concurrent.duration.DurationInt
import scala.jdk.CollectionConverters.IteratorHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matchers with EitherValues {
@@ -42,7 +41,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into $BucketName:$PrefixName select * from $TopicName PARTITIONBY _key STOREAS `CSV` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1",
)
- val kcql = S3SinkConfigDefBuilder(props.asJava).getKCQL
+ val kcql = S3SinkConfigDefBuilder(props).getKCQL
kcql should have size 1
val element = kcql.head
@@ -59,7 +58,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from $TopicName PARTITIONBY _key STOREAS CSV WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => fail(value.toString)
case Right(value) => value.map(_.dataStorage) should be(List(DataStorageSettings.Default))
}
@@ -70,7 +69,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from $TopicName PARTITIONBY _key STOREAS `JSON` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => fail(value.toString)
case Right(value) => value.map(_.dataStorage) should be(List(DataStorageSettings.enabled))
}
@@ -81,7 +80,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from $TopicName PARTITIONBY _key STOREAS `PARQUET` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true, '${DataStorageSettings.StoreKeyKey}'=true, '${DataStorageSettings.StoreValueKey}'=true, '${DataStorageSettings.StoreMetadataKey}'=false, '${DataStorageSettings.StoreHeadersKey}'=false)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => fail(value.toString)
case Right(value) =>
value.map(_.dataStorage) should be(List(DataStorageSettings(true, true, true, false, false)))
@@ -110,7 +109,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
|""".stripMargin,
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => fail(value.toString)
case Right(value) =>
value.map(_.dataStorage) should be(List(DataStorageSettings(true, true, true, false, false),
@@ -125,7 +124,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
)
val commitPolicy =
- S3SinkConfigDefBuilder(props.asJava).commitPolicy(S3SinkConfigDefBuilder(props.asJava).getKCQL.head)
+ S3SinkConfigDefBuilder(props).commitPolicy(S3SinkConfigDefBuilder(props).getKCQL.head)
commitPolicy.conditions should be(
Seq(
@@ -143,7 +142,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
)
val commitPolicy =
- S3SinkConfigDefBuilder(props.asJava).commitPolicy(S3SinkConfigDefBuilder(props.asJava).getKCQL.head)
+ S3SinkConfigDefBuilder(props).commitPolicy(S3SinkConfigDefBuilder(props).getKCQL.head)
commitPolicy.conditions should be(
Seq(
@@ -159,7 +158,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
)
val commitPolicy =
- S3SinkConfigDefBuilder(props.asJava).commitPolicy(S3SinkConfigDefBuilder(props.asJava).getKCQL.head)
+ S3SinkConfigDefBuilder(props).commitPolicy(S3SinkConfigDefBuilder(props).getKCQL.head)
commitPolicy.conditions should be(
Seq(
@@ -175,7 +174,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into $BucketName:$PrefixName select * from $TopicName BATCH = 150 STOREAS `CSV` LIMIT 550",
)
- val kcql = S3SinkConfigDefBuilder(props.asJava).getKCQL
+ val kcql = S3SinkConfigDefBuilder(props).getKCQL
kcql.head.getBatchSize should be(150)
kcql.head.getLimit should be(550)
@@ -186,7 +185,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into $BucketName:$PrefixName select * from $TopicName STOREAS `JSON` WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true, '${DataStorageSettings.StoreKeyKey}'=true, '${DataStorageSettings.StoreValueKey}'=true, '${DataStorageSettings.StoreMetadataKey}'=false, '${DataStorageSettings.StoreHeadersKey}'=false)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => fail(value.toString)
case Right(value) =>
value.map(_.dataStorage) should be(List(DataStorageSettings(envelope = true,
@@ -203,7 +202,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into $BucketName:$PrefixName select * from $TopicName STOREAS `JSON` WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true, '${DataStorageSettings.StoreKeyKey}'=true, '${DataStorageSettings.StoreValueKey}'=true, '${DataStorageSettings.StoreMetadataKey}'=false, '${DataStorageSettings.StoreHeadersKey}'=false)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => fail(value.toString)
case Right(value) =>
value.map(_.dataStorage) should be(List(DataStorageSettings(envelope = true,
@@ -220,7 +219,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into $BucketName:$PrefixName select * from $TopicName STOREAS `BYTES_VALUEONLY` WITH_FLUSH_COUNT = 1",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)).left.value.getMessage should startWith(
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)).left.value.getMessage should startWith(
"Unsupported format - BYTES_VALUEONLY. Please note",
)
}
@@ -230,7 +229,7 @@ class S3SinkConfigDefBuilderTest extends AnyFlatSpec with MockitoSugar with Matc
"connect.s3.kcql" -> s"insert into $BucketName:$PrefixName select * from $TopicName STOREAS `BYTES` WITH_FLUSH_COUNT = 3",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)).left.value.getMessage should startWith(
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)).left.value.getMessage should startWith(
"FLUSH_COUNT > 1 is not allowed for BYTES",
)
}
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigTest.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigTest.scala
index b567e0cc3e..9846ec4954 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigTest.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/S3SinkConfigTest.scala
@@ -20,8 +20,6 @@ import io.lenses.streamreactor.connect.aws.s3.config.DataStorageSettings
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class S3SinkConfigTest extends AnyFunSuite with Matchers {
private implicit val connectorTaskId = ConnectorTaskId("connector", 1, 0)
test("envelope and CSV storage is not allowed") {
@@ -29,7 +27,7 @@ class S3SinkConfigTest extends AnyFunSuite with Matchers {
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from TopicName PARTITIONBY _key STOREAS `CSV` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => value.getMessage shouldBe "Envelope is not supported for format CSV."
case Right(_) => fail("Should fail since envelope and CSV storage is not allowed")
}
@@ -40,7 +38,7 @@ class S3SinkConfigTest extends AnyFunSuite with Matchers {
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from TopicName PARTITIONBY _key STOREAS `Parquet` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(error) => fail("Should not fail since envelope and Parquet storage is allowed", error)
case Right(_) => succeed
}
@@ -50,7 +48,7 @@ class S3SinkConfigTest extends AnyFunSuite with Matchers {
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from TopicName PARTITIONBY _key STOREAS `Avro` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(error) => fail("Should not fail since envelope and Avro storage is allowed", error)
case Right(_) => succeed
}
@@ -60,7 +58,7 @@ class S3SinkConfigTest extends AnyFunSuite with Matchers {
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from TopicName PARTITIONBY _key STOREAS `Json` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(error) => fail("Should not fail since envelope and Json storage is allowed", error)
case Right(_) => succeed
}
@@ -70,7 +68,7 @@ class S3SinkConfigTest extends AnyFunSuite with Matchers {
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from TopicName PARTITIONBY _key STOREAS `Text` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => value.getMessage shouldBe "Envelope is not supported for format TEXT."
case Right(_) => fail("Should fail since text and envelope storage is not allowed")
}
@@ -80,7 +78,7 @@ class S3SinkConfigTest extends AnyFunSuite with Matchers {
"connect.s3.kcql" -> s"insert into mybucket:myprefix select * from TopicName PARTITIONBY _key STOREAS `Bytes` WITHPARTITIONER=Values WITH_FLUSH_COUNT = 1 PROPERTIES('${DataStorageSettings.StoreEnvelopeKey}'=true)",
)
- SinkBucketOptions(S3SinkConfigDefBuilder(props.asJava)) match {
+ SinkBucketOptions(S3SinkConfigDefBuilder(props)) match {
case Left(value) => value.getMessage shouldBe "Envelope is not supported for format BYTES."
case Right(_) => fail("Should fail since envelope and bytes storage is not allowed")
}
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/TestConfigDefBuilder.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/TestConfigDefBuilder.scala
index 1a36d07a9f..0b783b4766 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/TestConfigDefBuilder.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/sink/config/TestConfigDefBuilder.scala
@@ -15,8 +15,6 @@
*/
package io.lenses.streamreactor.connect.aws.s3.sink.config
-import scala.jdk.CollectionConverters.MapHasAsJava
-
object TestConfigDefBuilder {
def apply(pairs: (String, String)*): S3SinkConfigDefBuilder = {
@@ -24,7 +22,7 @@ object TestConfigDefBuilder {
val newMap = map + {
"connect.s3.kcql" -> "dummy value"
}
- S3SinkConfigDefBuilder(newMap.asJava)
+ S3SinkConfigDefBuilder(newMap)
}
}
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/DeleteModeSettingsTest.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/DeleteModeSettingsTest.scala
index e89dc9fcd6..d6e7f84f32 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/DeleteModeSettingsTest.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/DeleteModeSettingsTest.scala
@@ -20,8 +20,6 @@ import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks._
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class DeleteModeSettingsTest extends AnyFlatSpec with Matchers with LazyLogging {
private val deleteModeMap = Table[String, String, Boolean](
("testName", "value", "expected"),
@@ -36,7 +34,7 @@ class DeleteModeSettingsTest extends AnyFlatSpec with Matchers with LazyLogging
S3SourceConfigDefBuilder(Map(
"connect.s3.kcql" -> "abc",
"connect.s3.delete.mode" -> value,
- ).asJava).batchDelete() should be(expected)
+ )).batchDelete() should be(expected)
}
}
}
diff --git a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala
index 5e59c3495b..7fdce305bd 100644
--- a/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala
+++ b/kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/source/config/S3SourceConfigTests.scala
@@ -21,7 +21,6 @@ import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import scala.concurrent.duration._
-import scala.jdk.CollectionConverters._
class S3SourceConfigTests extends AnyFunSuite with Matchers {
test("default recursive levels is 0") {
@@ -31,7 +30,7 @@ class S3SourceConfigTests extends AnyFunSuite with Matchers {
SOURCE_PARTITION_SEARCH_INTERVAL_MILLIS -> "1000",
TASK_INDEX -> "1:0",
KCQL_CONFIG -> "INSERT INTO topic SELECT * FROM bucket:/a/b/c",
- ).asJava,
+ ),
) match {
case Left(value) => fail(value.toString)
case Right(value) =>
@@ -46,7 +45,7 @@ class S3SourceConfigTests extends AnyFunSuite with Matchers {
SOURCE_PARTITION_SEARCH_INTERVAL_MILLIS -> "1000",
TASK_INDEX -> "1:0",
KCQL_CONFIG -> "INSERT INTO topic SELECT * FROM bucket:/a/b/c",
- ).asJava,
+ ),
) match {
case Left(value) => fail(value.toString)
case Right(value) =>
@@ -61,7 +60,7 @@ class S3SourceConfigTests extends AnyFunSuite with Matchers {
SOURCE_PARTITION_SEARCH_INTERVAL_MILLIS -> "1000",
TASK_INDEX -> "1:0",
KCQL_CONFIG -> "INSERT INTO topic SELECT * FROM bucket:/a/b/c",
- ).asJava,
+ ),
) match {
case Left(value) => fail(value.toString)
case Right(value) =>
@@ -75,7 +74,7 @@ class S3SourceConfigTests extends AnyFunSuite with Matchers {
SOURCE_PARTITION_SEARCH_INTERVAL_MILLIS -> "1000",
TASK_INDEX -> "1:0",
KCQL_CONFIG -> "INSERT INTO topic SELECT * FROM bucket:/a/b/c",
- ).asJava,
+ ),
) match {
case Left(value) => fail(value.toString)
case Right(value) =>
diff --git a/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala b/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala
index 6f68837212..cf51a6de7b 100644
--- a/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala
+++ b/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbConfig.scala
@@ -22,7 +22,6 @@ import com.datamountaineer.streamreactor.common.config.base.traits.ErrorPolicySe
import com.datamountaineer.streamreactor.common.config.base.traits.KcqlSettings
import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
-import java.util
import com.microsoft.azure.documentdb.ConsistencyLevel
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
@@ -149,7 +148,7 @@ object DocumentDbConfig {
)
}
-case class DocumentDbConfig(props: util.Map[String, String])
+case class DocumentDbConfig(props: Map[String, String])
extends BaseConfig(DocumentDbConfigConstants.CONNECTOR_PREFIX, DocumentDbConfig.config, props)
with KcqlSettings
with DatabaseSettings
diff --git a/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala b/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala
index 374a67b2fd..5d996a2ac6 100644
--- a/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala
+++ b/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.azure.documentdb.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
import com.datamountaineer.streamreactor.connect.azure.documentdb.DocumentClientProvider
@@ -90,7 +91,7 @@ class DocumentDbSinkConnector private[sink] (builder: DocumentDbSinkSettings =>
* @param props A map of properties for the connector and worker
*/
override def start(props: util.Map[String, String]): Unit = {
- val config = Try(DocumentDbConfig(props)) match {
+ val config = Try(DocumentDbConfig(props.asScala.toMap)) match {
case Failure(f) =>
throw new ConnectException(s"Couldn't start Azure DocumentDb sink due to configuration error: ${f.getMessage}",
f,
@@ -100,7 +101,7 @@ class DocumentDbSinkConnector private[sink] (builder: DocumentDbSinkSettings =>
configProps = props
//check input topics
- Helpers.checkInputTopics(DocumentDbConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(DocumentDbConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
val settings = DocumentDbSinkSettings(config)
diff --git a/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTask.scala b/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTask.scala
index 8df4b8e661..e4a7c6ffdc 100644
--- a/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTask.scala
+++ b/kafka-connect-azure-documentdb/src/main/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTask.scala
@@ -29,6 +29,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import scala.jdk.CollectionConverters.CollectionHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
import scala.util.Failure
import scala.util.Success
import scala.util.Try
@@ -52,7 +53,7 @@ class DocumentDbSinkTask extends SinkTask with StrictLogging {
override def start(props: util.Map[String, String]): Unit = {
val config = if (context.configs().isEmpty) props else context.configs()
- val taskConfig = Try(DocumentDbConfig(config)) match {
+ val taskConfig = Try(DocumentDbConfig(config.asScala.toMap)) match {
case Failure(f) =>
throw new ConnectException("Couldn't start Azure Document DB Sink due to configuration error.", f)
case Success(s) => s
diff --git a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbSinkSettingsTest.scala b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbSinkSettingsTest.scala
index c1bb11743b..4dfe5dc60a 100644
--- a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbSinkSettingsTest.scala
+++ b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/config/DocumentDbSinkSettingsTest.scala
@@ -20,8 +20,6 @@ import org.apache.kafka.common.config.ConfigException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
private val connection = "https://accountName.documents.azure.com:443/"
@@ -31,7 +29,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.DATABASE_CONFIG -> "dbs/database1",
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
- ).asJava
+ )
intercept[ConfigException] {
DocumentDbConfig(map)
@@ -43,7 +41,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.DATABASE_CONFIG -> "dbs/database1",
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
- ).asJava
+ )
intercept[ConfigException] {
DocumentDbConfig(map)
@@ -57,7 +55,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1;INSERT INTO coll2 SELECT a as F1, b as F2 FROM topic2",
- ).asJava
+ )
val config = DocumentDbConfig(map)
val settings = DocumentDbSinkSettings(config)
@@ -75,7 +73,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1 IGNORE a,b,c",
- ).asJava
+ )
val config = DocumentDbConfig(map)
val settings = DocumentDbSinkSettings(config)
@@ -94,7 +92,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1 PK a,b",
- ).asJava
+ )
val config = DocumentDbConfig(map)
val settings = DocumentDbSinkSettings(config)
@@ -112,7 +110,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.CONSISTENCY_CONFIG -> "invalid",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1 PK a,b",
- ).asJava
+ )
val config = DocumentDbConfig(map)
intercept[ConfigException] {
@@ -126,7 +124,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO SELECT * FROM topic1",
- ).asJava
+ )
val config = DocumentDbConfig(map)
intercept[IllegalArgumentException] {
@@ -140,7 +138,7 @@ class DocumentDbSinkSettingsTest extends AnyWordSpec with Matchers {
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
- ).asJava
+ )
val config = DocumentDbConfig(map)
intercept[ConfigException] {
diff --git a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskJsonTest.scala b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskJsonTest.scala
index 3ac1c71a8e..c71d434ad9 100644
--- a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskJsonTest.scala
+++ b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskJsonTest.scala
@@ -27,8 +27,6 @@ import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class DocumentDbSinkTaskJsonTest extends AnyWordSpec with Matchers with MockitoSugar with MatchingArgument {
private val connection = "https://accountName.documents.azure.com:443/"
@@ -39,7 +37,7 @@ class DocumentDbSinkTaskJsonTest extends AnyWordSpec with Matchers with MockitoS
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO coll1 SELECT * FROM topic1;INSERT INTO coll2 SELECT * FROM topic2",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
@@ -142,7 +140,7 @@ class DocumentDbSinkTaskJsonTest extends AnyWordSpec with Matchers with MockitoS
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.CONSISTENCY_CONFIG -> ConsistencyLevel.Eventual.toString,
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO coll1 SELECT * FROM topic1;INSERT INTO coll2 SELECT * FROM topic2",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
@@ -245,7 +243,7 @@ class DocumentDbSinkTaskJsonTest extends AnyWordSpec with Matchers with MockitoS
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.CONSISTENCY_CONFIG -> ConsistencyLevel.Eventual.toString,
DocumentDbConfigConstants.KCQL_CONFIG -> "UPSERT INTO coll1 SELECT * FROM topic1 PK time",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
diff --git a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskMapTest.scala b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskMapTest.scala
index 5d7b5b794e..d9366d1a66 100644
--- a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskMapTest.scala
+++ b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskMapTest.scala
@@ -27,8 +27,6 @@ import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class DocumentDbSinkTaskMapTest extends AnyWordSpec with Matchers with MockitoSugar with MatchingArgument {
private val connection = "https://accountName.documents.azure.com:443/"
@@ -43,7 +41,7 @@ class DocumentDbSinkTaskMapTest extends AnyWordSpec with Matchers with MockitoSu
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO coll1 SELECT * FROM topic1;INSERT INTO coll2 SELECT * FROM topic2",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
@@ -153,7 +151,7 @@ class DocumentDbSinkTaskMapTest extends AnyWordSpec with Matchers with MockitoSu
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.CONSISTENCY_CONFIG -> ConsistencyLevel.Eventual.toString,
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO coll1 SELECT * FROM topic1;INSERT INTO coll2 SELECT * FROM topic2",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
@@ -265,7 +263,7 @@ class DocumentDbSinkTaskMapTest extends AnyWordSpec with Matchers with MockitoSu
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.CONSISTENCY_CONFIG -> ConsistencyLevel.Eventual.toString,
DocumentDbConfigConstants.KCQL_CONFIG -> "UPSERT INTO coll1 SELECT * FROM topic1 PK time",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
diff --git a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskStructTest.scala b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskStructTest.scala
index eb23485c53..cb4d1b7465 100644
--- a/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskStructTest.scala
+++ b/kafka-connect-azure-documentdb/src/test/scala/com/datamountaineer/streamreactor/connect/azure/documentdb/sink/DocumentDbSinkTaskStructTest.scala
@@ -27,8 +27,6 @@ import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class DocumentDbSinkTaskStructTest extends AnyWordSpec with Matchers with MockitoSugar with MatchingArgument {
private val connection = "https://accountName.documents.azure.com:443/"
@@ -39,7 +37,7 @@ class DocumentDbSinkTaskStructTest extends AnyWordSpec with Matchers with Mockit
DocumentDbConfigConstants.CONNECTION_CONFIG -> connection,
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO coll1 SELECT * FROM topic1;INSERT INTO coll2 SELECT * FROM topic2",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
@@ -143,7 +141,7 @@ class DocumentDbSinkTaskStructTest extends AnyWordSpec with Matchers with Mockit
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.CONSISTENCY_CONFIG -> ConsistencyLevel.Eventual.toString,
DocumentDbConfigConstants.KCQL_CONFIG -> "INSERT INTO coll1 SELECT * FROM topic1;INSERT INTO coll2 SELECT * FROM topic2",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
@@ -253,7 +251,7 @@ class DocumentDbSinkTaskStructTest extends AnyWordSpec with Matchers with Mockit
DocumentDbConfigConstants.MASTER_KEY_CONFIG -> "secret",
DocumentDbConfigConstants.CONSISTENCY_CONFIG -> ConsistencyLevel.Eventual.toString,
DocumentDbConfigConstants.KCQL_CONFIG -> "UPSERT INTO coll1 SELECT * FROM topic1 PK time",
- ).asJava
+ )
val documentClient = mock[DocumentClient]
val dbResource: ResourceResponse[Database] = mock[ResourceResponse[Database]]
diff --git a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/TestCassandraConnectionSecure.scala b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/TestCassandraConnectionSecure.scala
index 0d83bc463f..d404406671 100644
--- a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/TestCassandraConnectionSecure.scala
+++ b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/TestCassandraConnectionSecure.scala
@@ -23,8 +23,6 @@ import org.scalatest.wordspec.AnyWordSpec
import org.scalatest.DoNotDiscover
import org.scalatest.Suite
-import scala.jdk.CollectionConverters.MapHasAsJava
-
/**
* Created by andrew@datamountaineer.com on 14/04/16.
* stream-reactor
@@ -41,7 +39,7 @@ class TestCassandraConnectionSecure extends AnyWordSpec with Matchers with ItTes
CassandraConfigConstants.USERNAME -> "cassandra",
CassandraConfigConstants.PASSWD -> "cassandra",
CassandraConfigConstants.KCQL -> "INSERT INTO TABLE SELECT * FROM TOPIC",
- ).asJava
+ )
val taskConfig = CassandraConfigSink(props)
val conn = CassandraConnection(taskConfig)
diff --git a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/TestCassandraJsonWriter.scala b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/TestCassandraJsonWriter.scala
index 2271915eef..8fb83fddd6 100644
--- a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/TestCassandraJsonWriter.scala
+++ b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/TestCassandraJsonWriter.scala
@@ -181,7 +181,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> s"INSERT INTO $table SELECT id, int_field1, double_field1,timestamp_field1 FROM TOPICA; INSERT INTO $table1 SELECT id, int_field2, double_field2,timestamp_field2 FROM TOPICA",
CassandraConfigConstants.ERROR_POLICY -> ErrorPolicyEnum.NOOP.toString,
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -267,7 +267,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> s"INSERT INTO $table SELECT * FROM TOPICA; INSERT INTO $table SELECT * FROM TOPICB",
CassandraConfigConstants.ERROR_POLICY -> ErrorPolicyEnum.THROW.toString,
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -335,7 +335,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> s"INSERT INTO $table SELECT id, inner1.int_field, inner2.* FROM TOPIC",
CassandraConfigConstants.ERROR_POLICY -> ErrorPolicyEnum.NOOP.toString,
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
val writer = CassandraWriter(taskConfig, context)
@@ -424,7 +424,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> s"INSERT INTO $table SELECT id, inner1.int_field, inner2.* FROM TOPIC",
CassandraConfigConstants.ERROR_POLICY -> ErrorPolicyEnum.NOOP.toString,
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -475,7 +475,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.USERNAME -> userName,
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kcql,
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -560,7 +560,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.KCQL -> kcql,
CassandraConfigConstants.ERROR_POLICY -> ErrorPolicyEnum.RETRY.toString,
CassandraConfigConstants.ERROR_RETRY_INTERVAL -> "500",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
val writer = CassandraWriter(taskConfig, context)
@@ -619,7 +619,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kcql,
CassandraConfigConstants.ERROR_POLICY -> ErrorPolicyEnum.NOOP.toString,
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
val writer = CassandraWriter(taskConfig, context)
@@ -852,7 +852,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kql,
CassandraConfigConstants.DELETE_ROW_ENABLED -> "true",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -904,7 +904,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kql,
CassandraConfigConstants.DELETE_ROW_ENABLED -> "true",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -967,7 +967,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kql,
CassandraConfigConstants.DELETE_ROW_ENABLED -> "true",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -1032,7 +1032,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kql,
CassandraConfigConstants.DELETE_ROW_ENABLED -> "true",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -1095,7 +1095,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kql,
CassandraConfigConstants.DELETE_ROW_ENABLED -> "true",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -1152,7 +1152,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> kql,
CassandraConfigConstants.DELETE_ROW_ENABLED -> "true",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
@@ -1198,7 +1198,7 @@ class TestCassandraJsonWriter
CassandraConfigConstants.USERNAME -> userName,
CassandraConfigConstants.PASSWD -> password,
CassandraConfigConstants.KCQL -> s"INSERT INTO $table SELECT key, name FROM TOPIC",
- ).asJava
+ )
val taskConfig = new CassandraConfigSink(props)
diff --git a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimestamp.scala b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimestamp.scala
index 8c4e155dfa..914fcabd84 100644
--- a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimestamp.scala
+++ b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimestamp.scala
@@ -32,6 +32,7 @@ import org.scalatest.wordspec.AnyWordSpec
import scala.annotation.nowarn
import scala.jdk.CollectionConverters.ListHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsJava
@DoNotDiscover
@nowarn
@@ -65,7 +66,7 @@ class TestCassandraSourceTaskTimestamp
task.initialize(taskContext)
//start task
- task.start(config)
+ task.start(config.asJava)
insertIntoTimestampTable(session, keyspace, tableName, "id1", "magic_string", getFormattedDateNow(), 1.toByte)
@@ -100,7 +101,7 @@ class TestCassandraSourceTaskTimestamp
task.initialize(taskContext)
//start task
- task.start(config)
+ task.start(config.asJava)
insertIntoTimestampTable(session, keyspace, tableName, "id1", "magic_string", getFormattedDateNow(), 1.toByte)
@@ -126,7 +127,7 @@ class TestCassandraSourceTaskTimestamp
task.initialize(taskContext)
//start task
- task.start(config)
+ task.start(config.asJava)
for (i <- 1 to 10) {
insertIntoTimestampTable(session,
@@ -160,7 +161,7 @@ class TestCassandraSourceTaskTimestamp
val mapper = new ObjectMapper()
//start task
- task.start(config)
+ task.start(config.asJava)
insertIntoTimestampTable(session, keyspace, tableName, "id1", "magic_string", getFormattedDateNow(), 1.toByte)
@@ -188,7 +189,7 @@ class TestCassandraSourceTaskTimestamp
insertIntoTimestampTable(session, keyspace, tableName, "id1", "magic_string", getFormattedDateNow(), 1.toByte)
try {
- task.start(config)
+ task.start(config.asJava)
fail()
} catch {
case _: ConfigException => // Expected, so continue
diff --git a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimeuuid.scala b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimeuuid.scala
index fb995016ee..80921c1b60 100644
--- a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimeuuid.scala
+++ b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceTaskTimeuuid.scala
@@ -31,6 +31,7 @@ import org.scalatest.Suite
import scala.annotation.nowarn
import scala.jdk.CollectionConverters.ListHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsJava
@DoNotDiscover
@nowarn
@@ -64,7 +65,7 @@ class TestCassandraSourceTaskTimeuuid
task.initialize(taskContext)
//start task
- task.start(config)
+ task.start(config.asJava)
insertIntoTimeuuidTable(session, keyspace, tableName, "id1", "magic_string")
@@ -97,7 +98,7 @@ class TestCassandraSourceTaskTimeuuid
task.initialize(taskContext)
//start task
- task.start(config)
+ task.start(config.asJava)
insertIntoTimeuuidTable(session, keyspace, tableName, "id1", "magic_string")
@@ -122,7 +123,7 @@ class TestCassandraSourceTaskTimeuuid
task.initialize(taskContext)
//start task
- task.start(config)
+ task.start(config.asJava)
for (i <- 1 to 10) {
insertIntoTimeuuidTable(session, keyspace, tableName, s"id$i", s"magic_string_$i")
@@ -145,7 +146,7 @@ class TestCassandraSourceTaskTimeuuid
insertIntoTimeuuidTable(session, keyspace, tableName, "id1", "magic_string")
try {
- task.start(config)
+ task.start(config.asJava)
fail()
} catch {
case _: ConfigException => // Expected, so continue
diff --git a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceUtil.scala b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceUtil.scala
index 3f189b60e5..3d284ae77c 100644
--- a/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceUtil.scala
+++ b/kafka-connect-cassandra/src/it/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraSourceUtil.scala
@@ -5,11 +5,8 @@ import com.datastax.driver.core.Session
import java.text.SimpleDateFormat
import java.util.Date
-import java.util.Map
import java.util.UUID
-import scala.jdk.CollectionConverters.MapHasAsJava
-
trait TestCassandraSourceUtil {
def createTimestampTable(session: Session, keySpace: String): String = {
@@ -135,5 +132,5 @@ trait TestCassandraSourceUtil {
CassandraConfigConstants.TIMESLICE_DELAY -> "0",
CassandraConfigConstants.POLL_INTERVAL -> "500",
CassandraConfigConstants.FETCH_SIZE -> "2",
- ).asJava
+ )
}
diff --git a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/CassandraConnection.scala b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/CassandraConnection.scala
index fd4f449af4..62c70b2194 100644
--- a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/CassandraConnection.scala
+++ b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/CassandraConnection.scala
@@ -15,9 +15,9 @@
*/
package com.datamountaineer.streamreactor.connect.cassandra
-import com.datamountaineer.streamreactor.common.config.SSLConfig
-import com.datamountaineer.streamreactor.common.config.SSLConfigContext
import com.datamountaineer.streamreactor.connect.cassandra.config.CassandraConfigConstants
+import com.datamountaineer.streamreactor.connect.cassandra.config.SSLConfig
+import com.datamountaineer.streamreactor.connect.cassandra.config.SSLConfigContext
import com.datamountaineer.streamreactor.connect.cassandra.config.LoadBalancingPolicy
import com.datastax.driver.core.Cluster.Builder
import com.datastax.driver.core.policies.DCAwareRoundRobinPolicy
diff --git a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/config/CassandraConfig.scala b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/config/CassandraConfig.scala
index 65702310bb..12894feef2 100644
--- a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/config/CassandraConfig.scala
+++ b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/config/CassandraConfig.scala
@@ -22,7 +22,6 @@ import com.datamountaineer.streamreactor.common.config.base.traits.KcqlSettings
import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
import com.datamountaineer.streamreactor.common.config.base.traits.ThreadPoolSettings
-import java.util
import com.datastax.driver.core.ConsistencyLevel
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
@@ -430,7 +429,7 @@ object CassandraConfigSource {
}
-case class CassandraConfigSource(props: util.Map[String, String])
+case class CassandraConfigSource(props: Map[String, String])
extends BaseConfig(CassandraConfigConstants.CONNECTOR_PREFIX, CassandraConfigSource.sourceConfig, props)
with ErrorPolicySettings
with ConsistencyLevelSettings[ConsistencyLevel]
@@ -522,7 +521,7 @@ object CassandraConfigSink {
}
-case class CassandraConfigSink(props: util.Map[String, String])
+case class CassandraConfigSink(props: Map[String, String])
extends BaseConfig(CassandraConfigConstants.CONNECTOR_PREFIX, CassandraConfigSink.sinkConfig, props)
with KcqlSettings
with ErrorPolicySettings
diff --git a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/SSLConfigContext.scala b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/config/SSLConfigContext.scala
similarity index 92%
rename from kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/SSLConfigContext.scala
rename to kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/config/SSLConfigContext.scala
index a50a9c09c3..92a975e1c7 100644
--- a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/SSLConfigContext.scala
+++ b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/config/SSLConfigContext.scala
@@ -13,16 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.common.config
+package com.datamountaineer.streamreactor.connect.cassandra.config
import java.io.FileInputStream
import java.security.KeyStore
import java.security.SecureRandom
-import javax.net.ssl.KeyManager
-import javax.net.ssl.KeyManagerFactory
-import javax.net.ssl.SSLContext
-import javax.net.ssl.TrustManagerFactory
-import javax.net.ssl.TrustManager
+import javax.net.ssl._
/**
* Created by andrew@datamountaineer.com on 14/04/16.
diff --git a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala
index 218191835b..05efba6d64 100644
--- a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala
+++ b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.cassandra.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
@@ -66,9 +67,9 @@ class CassandraSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
//check input topics
- Helpers.checkInputTopics(CassandraConfigConstants.KCQL, props.asScala.toMap)
+ Helpers.checkInputTopics(CassandraConfigConstants.KCQL, props.asScala.toMap).leftMap(throw _)
configProps = props
- Try(new CassandraConfigSink(props)) match {
+ Try(new CassandraConfigSink(props.asScala.toMap)) match {
case Failure(f) =>
throw new ConnectException(s"Couldn't start Cassandra sink due to configuration error: ${f.getMessage}", f)
case _ =>
diff --git a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkTask.scala b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkTask.scala
index ffa32eb4e0..1c663c8a76 100644
--- a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkTask.scala
+++ b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/sink/CassandraSinkTask.scala
@@ -30,6 +30,7 @@ import org.apache.kafka.connect.sink.SinkRecord
import org.apache.kafka.connect.sink.SinkTask
import scala.jdk.CollectionConverters.CollectionHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
import scala.util.Failure
import scala.util.Success
import scala.util.Try
@@ -55,7 +56,7 @@ class CassandraSinkTask extends SinkTask with StrictLogging {
val config = if (context.configs().isEmpty) props else context.configs()
- val taskConfig = Try(new CassandraConfigSink(config)) match {
+ val taskConfig = Try(new CassandraConfigSink(config.asScala.toMap)) match {
case Failure(f) => throw new ConnectException("Couldn't start CassandraSink due to configuration error.", f)
case Success(s) => s
}
diff --git a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/source/CassandraSourceTask.scala b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/source/CassandraSourceTask.scala
index ff348c75a9..c1cf5ba3a1 100644
--- a/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/source/CassandraSourceTask.scala
+++ b/kafka-connect-cassandra/src/main/scala/com/datamountaineer/streamreactor/connect/cassandra/source/CassandraSourceTask.scala
@@ -33,6 +33,7 @@ import org.apache.kafka.connect.source.SourceTask
import scala.collection.mutable
import scala.jdk.CollectionConverters.ListHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
import scala.jdk.CollectionConverters.SeqHasAsJava
import scala.util.Failure
import scala.util.Success
@@ -68,7 +69,7 @@ class CassandraSourceTask extends SourceTask with StrictLogging {
val config = if (context.configs().isEmpty) props else context.configs()
//get configuration for this task
- taskConfig = Try(new CassandraConfigSource(config)) match {
+ taskConfig = Try(new CassandraConfigSource(config.asScala.toMap)) match {
case Failure(f) => throw new ConnectException("Couldn't start CassandraSource due to configuration error.", f)
case Success(s) => Some(s)
}
diff --git a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkConfig.scala b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkConfig.scala
index 296d09275f..fce9b884d6 100644
--- a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkConfig.scala
+++ b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkConfig.scala
@@ -20,8 +20,6 @@ import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class TestCassandraSinkConfig extends AnyWordSpec with BeforeAndAfter with Matchers with TestConfig {
"A CassandraConfig should return configured for username and password" in {
@@ -31,7 +29,7 @@ class TestCassandraSinkConfig extends AnyWordSpec with BeforeAndAfter with Match
CassandraConfigConstants.USERNAME -> USERNAME,
CassandraConfigConstants.PASSWD -> PASSWD,
CassandraConfigConstants.KCQL -> QUERY_ALL,
- ).asJava
+ )
val taskConfig = CassandraConfigSink(props)
taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
@@ -54,7 +52,7 @@ class TestCassandraSinkConfig extends AnyWordSpec with BeforeAndAfter with Match
CassandraConfigConstants.TRUST_STORE_PATH -> TRUST_STORE_PATH,
CassandraConfigConstants.TRUST_STORE_PASSWD -> TRUST_STORE_PASSWORD,
CassandraConfigConstants.KCQL -> QUERY_ALL,
- ).asJava
+ )
val taskConfig = CassandraConfigSink(props)
taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
@@ -81,7 +79,7 @@ class TestCassandraSinkConfig extends AnyWordSpec with BeforeAndAfter with Match
CassandraConfigConstants.KEY_STORE_PATH -> KEYSTORE_PATH,
CassandraConfigConstants.KEY_STORE_PASSWD -> KEYSTORE_PASSWORD,
CassandraConfigConstants.KCQL -> QUERY_ALL,
- ).asJava
+ )
val taskConfig = CassandraConfigSink(props)
taskConfig.getString(CassandraConfigConstants.CONTACT_POINTS) shouldBe CONTACT_POINT
diff --git a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkSettings.scala b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkSettings.scala
index a4baa91cbb..84de651e7b 100644
--- a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkSettings.scala
+++ b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSinkSettings.scala
@@ -15,7 +15,6 @@
*/
package com.datamountaineer.streamreactor.connect.cassandra.config
-import java.util
import com.datamountaineer.kcql.Kcql
import com.datamountaineer.streamreactor.common.errors.ErrorPolicyEnum
import com.datamountaineer.streamreactor.common.errors.RetryErrorPolicy
@@ -27,8 +26,6 @@ import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
/**
* Created by andrew@datamountaineer.com on 28/04/16.
* stream-reactor
@@ -44,7 +41,7 @@ class TestCassandraSinkSettings extends AnyWordSpec with Matchers with MockitoSu
CassandraConfigConstants.KCQL -> QUERY_ALL,
CassandraConfigConstants.ERROR_POLICY -> ErrorPolicyEnum.RETRY.toString,
CassandraConfigConstants.ERROR_RETRY_INTERVAL -> "500",
- ).asJava
+ )
"CassandraSettings should return setting for a sink" in {
val context = mock[SinkTaskContext]
@@ -67,30 +64,27 @@ class TestCassandraSinkSettings extends AnyWordSpec with Matchers with MockitoSu
}
"CassandraSettings should throw an exception if the consistency level is not valid for a sink" in {
- val map = new util.HashMap[String, String](getCassandraConfigSinkPropsRetry)
- map.put(CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG, "INvalid")
+ val map = getCassandraConfigSinkPropsRetry + (CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG -> "INvalid")
intercept[ConfigException] {
CassandraSettings.configureSink(CassandraConfigSink(map))
}
}
"CassandraSettings should allow setting the consistency level as Quorum for a sink" in {
- val map = new util.HashMap[String, String](getCassandraConfigSinkPropsRetry)
- map.put(CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG, ConsistencyLevel.QUORUM.name())
+ val map =
+ getCassandraConfigSinkPropsRetry + (CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG -> ConsistencyLevel.QUORUM.name())
val settings = CassandraSettings.configureSink(CassandraConfigSink(map))
settings.consistencyLevel shouldBe Some(ConsistencyLevel.QUORUM)
}
"CassandraSettings should allow setting the sink thread pool to 64" in {
- val map = new util.HashMap[String, String](getCassandraConfigSinkPropsRetry)
- map.put(CassandraConfigConstants.THREAD_POOL_CONFIG, "64")
+ val map = getCassandraConfigSinkPropsRetry + (CassandraConfigConstants.THREAD_POOL_CONFIG -> "64")
val settings = CassandraSettings.configureSink(CassandraConfigSink(map))
settings.threadPoolSize shouldBe 64
}
"CassandraSettings should handle setting the sink thread pool to 0 and return a non zero value" in {
- val map = new util.HashMap[String, String](getCassandraConfigSinkPropsRetry)
- map.put(CassandraConfigConstants.THREAD_POOL_CONFIG, "0")
+ val map = getCassandraConfigSinkPropsRetry + (CassandraConfigConstants.THREAD_POOL_CONFIG -> "0")
val settings = CassandraSettings.configureSink(CassandraConfigSink(map))
settings.threadPoolSize shouldBe 4 * Runtime.getRuntime.availableProcessors()
}
@@ -105,12 +99,9 @@ class TestCassandraSinkSettings extends AnyWordSpec with Matchers with MockitoSu
CassandraConfigConstants.KCQL -> "INSERT INTO TABLE SELECT * FROM TOPIC",
CassandraConfigConstants.ASSIGNED_TABLES -> ASSIGNED_TABLES,
CassandraConfigConstants.POLL_INTERVAL -> "1000",
- ).asJava
-
- val map = new util.HashMap[String, String](props)
- map.put(CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG, "InvaliD")
+ ) + (CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG -> "InvaliD")
intercept[ConfigException] {
- CassandraSettings.configureSource(CassandraConfigSource(map))
+ CassandraSettings.configureSource(CassandraConfigSource(props))
}
}
@@ -123,11 +114,8 @@ class TestCassandraSinkSettings extends AnyWordSpec with Matchers with MockitoSu
CassandraConfigConstants.KCQL -> "INSERT INTO TABLE SELECT * FROM TOPIC",
CassandraConfigConstants.ASSIGNED_TABLES -> ASSIGNED_TABLES,
CassandraConfigConstants.POLL_INTERVAL -> "1000",
- ).asJava
-
- val map = new util.HashMap[String, String](props)
- map.put(CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG, ConsistencyLevel.QUORUM.name())
- val settingsSet = CassandraSettings.configureSource(CassandraConfigSource(map))
+ ) + (CassandraConfigConstants.CONSISTENCY_LEVEL_CONFIG -> ConsistencyLevel.QUORUM.name())
+ val settingsSet = CassandraSettings.configureSource(CassandraConfigSource(props))
settingsSet.head.consistencyLevel shouldBe Some(ConsistencyLevel.QUORUM)
}
}
diff --git a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSourceSettings.scala b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSourceSettings.scala
index 4b155306e5..f3ddafcdf4 100644
--- a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSourceSettings.scala
+++ b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestCassandraSourceSettings.scala
@@ -19,8 +19,6 @@ import com.datamountaineer.streamreactor.connect.cassandra.TestConfig
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
/**
* Created by andrew@datamountaineer.com on 28/04/16.
* stream-reactor
@@ -35,7 +33,7 @@ class TestCassandraSourceSettings extends AnyWordSpec with Matchers with TestCon
CassandraConfigConstants.KCQL -> IMPORT_QUERY_ALL,
CassandraConfigConstants.ASSIGNED_TABLES -> ASSIGNED_TABLES,
CassandraConfigConstants.POLL_INTERVAL -> "1000",
- ).asJava
+ )
val taskConfig = CassandraConfigSource(props)
val settings = CassandraSettings.configureSource(taskConfig).toList
@@ -57,7 +55,7 @@ class TestCassandraSourceSettings extends AnyWordSpec with Matchers with TestCon
CassandraConfigConstants.KCQL -> "INSERT INTO cassandra-source SELECT * FROM orders PK created",
CassandraConfigConstants.POLL_INTERVAL -> "1000",
)
- val taskConfig = CassandraConfigSource(map.asJava)
+ val taskConfig = CassandraConfigSource(map)
val settings = CassandraSettings.configureSource(taskConfig).toList
settings.size shouldBe 1
}
diff --git a/kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/TestSSLConfigContext.scala b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestSSLConfigContext.scala
similarity index 97%
rename from kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/TestSSLConfigContext.scala
rename to kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestSSLConfigContext.scala
index 1ba23d2408..b4aeadee74 100644
--- a/kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/TestSSLConfigContext.scala
+++ b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/config/TestSSLConfigContext.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.common.config
+package com.datamountaineer.streamreactor.connect.cassandra.config
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
diff --git a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraTypeConverter.scala b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraTypeConverter.scala
index 2be567d0a2..fccc0b1783 100644
--- a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraTypeConverter.scala
+++ b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCassandraTypeConverter.scala
@@ -35,7 +35,6 @@ import org.scalatest.wordspec.AnyWordSpec
import scala.jdk.CollectionConverters.IterableHasAsScala
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
class TestCassandraTypeConverter extends AnyWordSpec with TestConfig with Matchers with MockitoSugar {
@@ -285,7 +284,7 @@ class TestCassandraTypeConverter extends AnyWordSpec with TestConfig with Matche
CassandraConfigConstants.POLL_INTERVAL -> "1000",
CassandraConfigConstants.MAPPING_COLLECTION_TO_JSON -> mappingCollectionToJson.toString,
)
- val taskConfig = CassandraConfigSource(config.asJava);
+ val taskConfig = CassandraConfigSource(config);
CassandraSettings.configureSource(taskConfig).toList.head
}
diff --git a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCqlGenerator.scala b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCqlGenerator.scala
index 1526df0b54..99f9db59e4 100644
--- a/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCqlGenerator.scala
+++ b/kafka-connect-cassandra/src/test/scala/com/datamountaineer/streamreactor/connect/cassandra/source/TestCqlGenerator.scala
@@ -25,7 +25,6 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.annotation.nowarn
-import scala.jdk.CollectionConverters.MapHasAsJava
/**
*/
@@ -107,7 +106,7 @@ class TestCqlGenerator extends AnyWordSpec with Matchers with MockitoSugar with
CassandraConfigConstants.POLL_INTERVAL -> "1000",
CassandraConfigConstants.FETCH_SIZE -> "500",
CassandraConfigConstants.BATCH_SIZE -> "800",
- ).asJava
+ )
}
val configSource = new CassandraConfigSource(configMap)
CassandraSettings.configureSource(configSource).head
@@ -124,7 +123,7 @@ class TestCqlGenerator extends AnyWordSpec with Matchers with MockitoSugar with
CassandraConfigConstants.POLL_INTERVAL -> "1000",
CassandraConfigConstants.FETCH_SIZE -> "500",
CassandraConfigConstants.BATCH_SIZE -> "800",
- ).asJava
+ )
}
val configSource = new CassandraConfigSource(configMap)
CassandraSettings.configureSource(configSource).head
@@ -141,7 +140,7 @@ class TestCqlGenerator extends AnyWordSpec with Matchers with MockitoSugar with
CassandraConfigConstants.POLL_INTERVAL -> "1000",
CassandraConfigConstants.FETCH_SIZE -> "500",
CassandraConfigConstants.BATCH_SIZE -> "800",
- ).asJava
+ )
}
val configSource = new CassandraConfigSource(configMap)
CassandraSettings.configureSource(configSource).head
@@ -160,7 +159,7 @@ class TestCqlGenerator extends AnyWordSpec with Matchers with MockitoSugar with
CassandraConfigConstants.BATCH_SIZE -> "800",
CassandraConfigConstants.BUCKET_TIME_SERIES_MODE -> "MINUTE",
CassandraConfigConstants.BUCKET_TIME_SERIES_FORMAT -> "yyMMddHHmm",
- ).asJava
+ )
}
val configSource = new CassandraConfigSource(configMap)
CassandraSettings.configureSource(configSource).head
diff --git a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/Helpers.scala b/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/Helpers.scala
index 3315f63864..094b175394 100644
--- a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/Helpers.scala
+++ b/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/Helpers.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.common.config
+import cats.implicits.catsSyntaxEitherId
import com.datamountaineer.kcql.Kcql
import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.common.config.ConfigException
@@ -26,11 +27,11 @@ import org.apache.kafka.common.config.ConfigException
object Helpers extends StrictLogging {
- def checkInputTopics(kcqlConstant: String, props: Map[String, String]): Boolean = {
+ def checkInputTopics(kcqlConstant: String, props: Map[String, String]): Either[Throwable, Unit] = {
val topics = props("topics").split(",").map(t => t.trim).toSet
val raw = props(kcqlConstant)
if (raw.isEmpty) {
- throw new ConfigException(s"Missing $kcqlConstant")
+ return new ConfigException(s"Missing $kcqlConstant").asLeft
}
val kcql = raw.split(";").map(r => Kcql.parse(r)).toSet
val sources = kcql.map(k => k.getSource)
@@ -38,20 +39,20 @@ object Helpers extends StrictLogging {
if (!res) {
val missing = topics.diff(sources)
- throw new ConfigException(
+ return new ConfigException(
s"Mandatory `topics` configuration contains topics not set in $kcqlConstant: ${missing}, kcql contains $sources",
- )
+ ).asLeft
}
val res1 = sources.subsetOf(topics)
if (!res1) {
val missing = topics.diff(sources)
- throw new ConfigException(
+ return new ConfigException(
s"$kcqlConstant configuration contains topics not set in mandatory `topic` configuration: ${missing}, kcql contains $sources",
- )
+ ).asLeft
}
- true
+ ().asRight
}
}
diff --git a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/base/traits/BaseConfig.scala b/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/base/traits/BaseConfig.scala
index 4e5df3b0ee..087d1f8c80 100644
--- a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/base/traits/BaseConfig.scala
+++ b/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/config/base/traits/BaseConfig.scala
@@ -15,12 +15,12 @@
*/
package com.datamountaineer.streamreactor.common.config.base.traits
-import java.util
-
import org.apache.kafka.common.config.AbstractConfig
import org.apache.kafka.common.config.ConfigDef
-abstract class BaseConfig(connectorPrefixStr: String, confDef: ConfigDef, props: util.Map[String, String])
- extends AbstractConfig(confDef, props) {
+import scala.jdk.CollectionConverters.MapHasAsJava
+
+abstract class BaseConfig(connectorPrefixStr: String, confDef: ConfigDef, props: Map[String, String])
+ extends AbstractConfig(confDef, props.asJava) {
val connectorPrefix: String = connectorPrefixStr
}
diff --git a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/schemas/ConverterUtil.scala b/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/schemas/ConverterUtil.scala
index dd2a10efd8..b0b3d98de5 100644
--- a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/schemas/ConverterUtil.scala
+++ b/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/schemas/ConverterUtil.scala
@@ -18,13 +18,11 @@ package com.datamountaineer.streamreactor.common.schemas
import com.datamountaineer.streamreactor.common.schemas.StructHelper._
import com.datamountaineer.streamreactor.connect.json.SimpleJsonConverter
import com.fasterxml.jackson.databind.JsonNode
-import io.confluent.connect.avro.AvroConverter
import io.confluent.connect.avro.AvroData
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.connect.connector.ConnectRecord
import org.apache.kafka.connect.data._
import org.apache.kafka.connect.errors.ConnectException
-import org.apache.kafka.connect.json.JsonDeserializer
import org.apache.kafka.connect.sink.SinkRecord
import org.json4s._
import org.json4s.jackson.JsonMethods._
@@ -45,8 +43,6 @@ trait ConverterUtil {
type avroSchema = org.apache.avro.Schema
lazy val simpleJsonConverter = new SimpleJsonConverter()
- lazy val deserializer = new JsonDeserializer()
- lazy val avroConverter = new AvroConverter()
lazy val avroData = new AvroData(100)
//for converting json to
diff --git a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/schemas/SchemaRegistry.scala b/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/schemas/SchemaRegistry.scala
deleted file mode 100644
index c83cfae85c..0000000000
--- a/kafka-connect-common/src/main/scala/com/datamountaineer/streamreactor/common/schemas/SchemaRegistry.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.common.schemas
-
-import com.typesafe.scalalogging.StrictLogging
-import io.confluent.kafka.schemaregistry.client.rest.RestService
-
-import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.util.Failure
-import scala.util.Success
-import scala.util.Try
-
-/**
- * Created by andrew@datamountaineer.com on 13/06/16.
- * kafka-connect-common
- */
-object SchemaRegistry extends StrictLogging {
-
- /**
- * Get a schema for a given subject
- *
- * @param url The url of the schema registry
- * @param subject The subject to het the schema for
- * @return The schema for the subject
- */
- def getSchema(url: String, subject: String): String = {
- val registry = new RestService(url)
-
- Try(registry.getLatestVersion(subject).getSchema) match {
- case Success(s) =>
- logger.info(s"Found schema for $subject")
- s
- case Failure(throwable) =>
- logger.warn(
- "Unable to connect to the Schema registry. An attempt will be made to create the table on receipt of the first records.",
- throwable,
- )
- ""
- }
- }
-
- /**
- * Get a list of subjects from the registry
- *
- * @param url The url to the schema registry
- * @return A list of subjects/topics
- */
- def getSubjects(url: String): List[String] = {
- val registry = new RestService(url)
- val schemas: List[String] = Try(registry.getAllSubjects.asScala.toList) match {
- case Success(s) => s
- case Failure(_) => {
- logger.warn(
- "Unable to connect to the Schema registry. An attempt will be made to create the table" +
- " on receipt of the first records.",
- )
- List.empty[String]
- }
- }
-
- schemas.foreach(s => logger.info(s"Found schemas for $s"))
- schemas
- }
-}
diff --git a/kafka-connect-common/src/main/scala/io/lenses/streamreactor/connect/security/StoreInfo.scala b/kafka-connect-common/src/main/scala/io/lenses/streamreactor/connect/security/StoreInfo.scala
new file mode 100644
index 0000000000..181561e4b6
--- /dev/null
+++ b/kafka-connect-common/src/main/scala/io/lenses/streamreactor/connect/security/StoreInfo.scala
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.security
+
+import cats.implicits.catsSyntaxOptionId
+import cats.implicits.none
+import com.datamountaineer.streamreactor.common.config.base.traits.BaseConfig
+import org.apache.kafka.common.config.SslConfigs
+
+import java.io.FileInputStream
+import java.security.KeyStore
+import javax.net.ssl.KeyManagerFactory
+import javax.net.ssl.SSLContext
+import javax.net.ssl.TrustManagerFactory
+
+case class StoreInfo(
+ storePath: String,
+ storeType: Option[String],
+ storePassword: Option[String] = None,
+)
+
+case class StoresInfo(
+ trustStore: Option[StoreInfo] = None,
+ keyStore: Option[StoreInfo] = None,
+) {
+ def toSslContext: Option[SSLContext] = {
+ val maybeTrustFactory: Option[TrustManagerFactory] = trustStore.map {
+ case StoreInfo(path, storeType, password) =>
+ trustManagers(path, storeType, password)
+ }
+ val maybeKeyFactory: Option[KeyManagerFactory] = keyStore.map {
+ case StoreInfo(path, storeType, password) =>
+ keyManagers(path, storeType, password)
+ }
+
+ if (maybeTrustFactory.nonEmpty || maybeKeyFactory.nonEmpty) {
+ val sslContext = SSLContext.getInstance("TLS")
+ sslContext.init(
+ maybeKeyFactory.map(_.getKeyManagers).orNull,
+ maybeTrustFactory.map(_.getTrustManagers).orNull,
+ null,
+ )
+ sslContext.some
+ } else {
+ none
+ }
+ }
+
+ private def trustManagers(path: String, storeType: Option[String], password: Option[String]) = {
+ val truststore = KeyStore.getInstance(storeType.map(_.toUpperCase).getOrElse("JKS"))
+ val truststoreStream = new FileInputStream(path)
+ truststore.load(truststoreStream, password.getOrElse("").toCharArray)
+
+ val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
+ trustManagerFactory.init(truststore)
+ trustManagerFactory
+ }
+
+ private def keyManagers(path: String, storeType: Option[String], password: Option[String]): KeyManagerFactory = {
+ val keyStore = KeyStore.getInstance(storeType.map(_.toUpperCase).getOrElse("JKS"))
+ val truststoreStream = new FileInputStream(path)
+ keyStore.load(truststoreStream, password.getOrElse("").toCharArray)
+
+ val keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
+ keyManagerFactory.init(keyStore, password.getOrElse("").toCharArray)
+ keyManagerFactory
+ }
+}
+
+object StoresInfo {
+ def apply(config: BaseConfig): StoresInfo = {
+ val trustStore = for {
+ storePath <- Option(config.getString(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG))
+ storeType = Option(config.getString(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG))
+ storePassword = Option(config.getPassword(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)).map(_.value())
+ } yield StoreInfo(storePath, storeType, storePassword)
+ val keyStore = for {
+ storePath <- Option(config.getString(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
+ storeType = Option(config.getString(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG))
+ storePassword = Option(config.getPassword(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)).map(_.value())
+ } yield StoreInfo(storePath, storeType, storePassword)
+
+ StoresInfo(trustStore, keyStore)
+ }
+}
diff --git a/kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/TestHelpers.scala b/kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/TestHelpers.scala
index fa16154470..c7b499cb9c 100644
--- a/kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/TestHelpers.scala
+++ b/kafka-connect-common/src/test/scala/com/datamountaineer/streamreactor/common/config/TestHelpers.scala
@@ -17,12 +17,14 @@ package com.datamountaineer.streamreactor.common.config
import com.datamountaineer.streamreactor.common.TestUtilsBase
import org.apache.kafka.common.config.ConfigException
+import org.scalatest.EitherValues
+import org.scalatest.matchers.should.Matchers
/**
* Created by andrew@datamountaineer.com on 23/08/2017.
* kafka-connect-common
*/
-class TestHelpers extends TestUtilsBase {
+class TestHelpers extends TestUtilsBase with EitherValues with Matchers {
val kcqlConstant: String = "myconnector.kcql"
@@ -31,18 +33,15 @@ class TestHelpers extends TestUtilsBase {
s"$kcqlConstant" -> "insert into table select * from t1;insert into table2 select * from t2",
)
- intercept[ConfigException] {
- Helpers.checkInputTopics(kcqlConstant, props)
- }
+ Helpers.checkInputTopics(kcqlConstant, props).left.value should be(a[ConfigException])
}
"should throw exception if topics not specified in kcql" in {
val props = Map("topics" -> "t1,t2", s"$kcqlConstant" -> "insert into table select * from t1")
- intercept[ConfigException] {
- Helpers.checkInputTopics(kcqlConstant, props)
- }
+ Helpers.checkInputTopics(kcqlConstant, props).left.value should be(a[ConfigException])
+
}
"should not throw exception if all good" in {
@@ -50,8 +49,7 @@ class TestHelpers extends TestUtilsBase {
s"$kcqlConstant" -> "insert into table select * from t1;insert into table2 select * from t2",
)
- val res = Helpers.checkInputTopics(kcqlConstant, props)
- res shouldBe true
+ Helpers.checkInputTopics(kcqlConstant, props).value should be(())
}
"should add topics involved in kcql error to message" in {
@@ -59,10 +57,8 @@ class TestHelpers extends TestUtilsBase {
s"$kcqlConstant" -> "insert into table select time,c1,c2 from topic1 WITH TIMESTAMP time",
)
- val e = intercept[ConfigException] {
- Helpers.checkInputTopics(kcqlConstant, props)
- }
-
+ val e = Helpers.checkInputTopics(kcqlConstant, props).left.value
+ e should be(a[ConfigException])
e.getMessage.contains("topic1WITHTIMESTAMPtime") shouldBe true
}
}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkConnector.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkConnector.scala
similarity index 72%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkConnector.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkConnector.scala
index 8b85d7e9d0..07273e983a 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkConnector.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkConnector.scala
@@ -13,31 +13,34 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic.common
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
-
-import java.util
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfig
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfigConstants
import com.typesafe.scalalogging.StrictLogging
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.sink.SinkConnector
+import java.util
import scala.jdk.CollectionConverters.MapHasAsScala
import scala.jdk.CollectionConverters.SeqHasAsJava
-class ElasticSinkConnector extends SinkConnector with StrictLogging {
+abstract class ElasticSinkConnector[C <: ElasticSettings, CO <: ElasticConfigDef, T <: ElasticSinkTask[C, CO]](
+ sinkClass: Class[T],
+ constants: CO,
+) extends SinkConnector
+ with StrictLogging {
private var configProps: Option[util.Map[String, String]] = None
- private val configDef = ElasticConfig.config
- private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
+ private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
/**
* States which SinkTask class to use
*/
- override def taskClass(): Class[_ <: Task] = classOf[ElasticSinkTask]
+ override def taskClass(): Class[_ <: Task] = sinkClass
/**
* Set the configuration for each work and determine the split
@@ -57,11 +60,11 @@ class ElasticSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Elastic sink task.")
- Helpers.checkInputTopics(ElasticConfigConstants.KCQL, props.asScala.toMap)
+ Helpers.checkInputTopics(constants.KCQL, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
override def stop(): Unit = {}
override def version(): String = manifest.version()
- override def config(): ConfigDef = configDef
+ override def config(): ConfigDef = constants.configDef
}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkTask.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkTask.scala
new file mode 100644
index 0000000000..c374034867
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/ElasticSinkTask.scala
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common
+
+import cats.effect.IO
+import cats.effect.unsafe.implicits.global
+import cats.implicits.catsSyntaxOptionId
+import cats.implicits.toBifunctorOps
+import com.datamountaineer.streamreactor.common.errors.ErrorHandler
+import com.datamountaineer.streamreactor.common.errors.RetryErrorPolicy
+import com.datamountaineer.streamreactor.common.utils.AsciiArtPrinter.printAsciiHeader
+import com.datamountaineer.streamreactor.common.utils.JarManifest
+import com.datamountaineer.streamreactor.common.utils.ProgressCounter
+import com.typesafe.scalalogging.StrictLogging
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticClientCreator
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticJsonWriter
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticWriter
+import org.apache.kafka.clients.consumer.OffsetAndMetadata
+import org.apache.kafka.common.TopicPartition
+import org.apache.kafka.connect.errors.ConnectException
+import org.apache.kafka.connect.sink.SinkRecord
+import org.apache.kafka.connect.sink.SinkTask
+
+import java.util
+import scala.concurrent.duration.Duration
+import scala.concurrent.duration.DurationInt
+import scala.jdk.CollectionConverters.IterableHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
+
+abstract class ElasticSinkTask[C <: ElasticSettings, CD <: ElasticConfigDef](
+ configReader: ElasticSettingsReader[C, CD],
+ writerCreator: ElasticClientCreator[C],
+ configDef: CD,
+ asciiArt: String,
+) extends SinkTask
+ with StrictLogging
+ with ErrorHandler {
+
+ private var writer: Option[ElasticWriter] = None
+ private val progressCounter = new ProgressCounter
+ private var enableProgress: Boolean = false
+ private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
+ private var writeTimeout: Option[Duration] = None
+
+ /**
+ * Parse the configurations and setup the writer
+ */
+ override def start(props: util.Map[String, String]): Unit = {
+ printAsciiHeader(manifest, asciiArt)
+
+ val conf = if (context.configs().isEmpty) props else context.configs()
+
+ val settings: C = configReader.read(configDef, conf.asScala.toMap).leftMap(t =>
+ throw new ConnectException("exception reading config", t),
+ ).merge
+
+ enableProgress = settings.common.progressCounter
+
+ //if error policy is retry set retry interval
+ settings.common.errorPolicy match {
+ case RetryErrorPolicy() => context.timeout(settings.common.errorRetryInterval)
+ case _ =>
+ }
+
+ //initialize error tracker
+ initialize(settings.common.taskRetries, settings.common.errorPolicy)
+
+ writeTimeout = settings.common.writeTimeout.seconds.some
+ val elasticClientWrapper =
+ writerCreator.create(settings).leftMap(t => throw new ConnectException("exception creating connection", t)).merge
+ val elasticJsonWriter = new ElasticJsonWriter(elasticClientWrapper, settings.common)
+ writer = Some(elasticJsonWriter)
+ }
+
+ /**
+ * Pass the SinkRecords to the writer for Writing
+ */
+ override def put(records: util.Collection[SinkRecord]): Unit = {
+ require(writer.nonEmpty, "Writer is not set!")
+ val seq = records.asScala.toVector
+
+ val ioWrite = writer.map(_.write(seq).attempt).getOrElse(IO(Right(())))
+ val timeoutIo = writeTimeout.fold(ioWrite)(wT => ioWrite.timeout(wT))
+
+ handleTry(timeoutIo.map(_.toTry).unsafeRunSync())
+ if (enableProgress) {
+ progressCounter.update(seq)
+ }
+ }
+
+ /**
+ * Clean up writer
+ */
+ override def stop(): Unit = {
+ logger.info("Stopping Elastic sink.")
+ writer.foreach(_.close())
+ progressCounter.empty()
+ }
+
+ override def flush(map: util.Map[TopicPartition, OffsetAndMetadata]): Unit =
+ logger.info("Flushing Elastic Sink")
+
+ override def version: String = manifest.version()
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/ElasticClientWrapper.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/ElasticClientWrapper.scala
new file mode 100644
index 0000000000..1ee99913df
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/ElasticClientWrapper.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.client
+
+import cats.effect.IO
+
+trait ElasticClientWrapper {
+
+ def createIndex(indexName: String): IO[Unit]
+
+ def close(): IO[Unit]
+
+ def execute(reqs: Seq[Request]): IO[Unit]
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/Request.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/Request.scala
new file mode 100644
index 0000000000..940eacf86e
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/client/Request.scala
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.client
+
+import com.fasterxml.jackson.databind.JsonNode
+
+trait Request
+case class InsertRequest(index: String, id: String, json: JsonNode, pipeline: String) extends Request
+case class UpsertRequest(index: String, id: String, json: JsonNode) extends Request
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettings.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettings.scala
new file mode 100644
index 0000000000..e23c757950
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettings.scala
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import com.datamountaineer.kcql.Kcql
+import com.datamountaineer.streamreactor.common.errors.ErrorPolicy
+
+case class ElasticCommonSettings(
+ kcqls: Seq[Kcql],
+ errorPolicy: ErrorPolicy,
+ taskRetries: Int,
+ writeTimeout: Int,
+ batchSize: Int,
+ pkJoinerSeparator: String,
+ progressCounter: Boolean,
+ errorRetryInterval: Long,
+) extends ElasticSettings {
+ override def common: ElasticCommonSettings = this
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettingsReader.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettingsReader.scala
new file mode 100644
index 0000000000..18c916f2c4
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticCommonSettingsReader.scala
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import com.datamountaineer.kcql.Kcql
+
+import scala.util.Try
+
+object ElasticCommonSettingsReader extends ElasticSettingsReader[ElasticCommonSettings, ElasticConfigDef] {
+ override def read(configDef: ElasticConfigDef, props: Map[String, String]): Either[Throwable, ElasticCommonSettings] =
+ for {
+ config <- Try(ElasticConfig(configDef, configDef.connectorPrefix, props)).toEither
+
+ kcql = config.getString(configDef.KCQL).split(";").filter(_.trim.nonEmpty).map(Kcql.parse).toIndexedSeq
+ pkJoinerSeparator = config.getString(configDef.PK_JOINER_SEPARATOR)
+ writeTimeout = config.getWriteTimeout
+ errorPolicy = config.getErrorPolicy
+ retries = config.getNumberRetries
+ progressCounter = config.getBoolean(configDef.PROGRESS_COUNTER_ENABLED)
+
+ errorRetryInterval = config.getLong(configDef.ERROR_RETRY_INTERVAL).toLong
+ batchSize = config.getInt(configDef.BATCH_SIZE_CONFIG)
+ } yield {
+ ElasticCommonSettings(
+ kcql,
+ errorPolicy,
+ retries,
+ writeTimeout,
+ batchSize,
+ pkJoinerSeparator,
+ progressCounter,
+ errorRetryInterval,
+ )
+ }
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfig.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfig.scala
new file mode 100644
index 0000000000..b1f673991f
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfig.scala
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import com.datamountaineer.streamreactor.common.config.base.traits.BaseConfig
+import com.datamountaineer.streamreactor.common.config.base.traits.ErrorPolicySettings
+import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
+import com.datamountaineer.streamreactor.common.config.base.traits.WriteTimeoutSettings
+
+/**
+ *
ElasticSinkConfig
+ *
+ * Holds config, extends AbstractConfig.
+ */
+case class ElasticConfig(configDef: ElasticConfigDef, prefix: String, props: Map[String, String])
+ extends BaseConfig(prefix, configDef.configDef, props)
+ with WriteTimeoutSettings
+ with ErrorPolicySettings
+ with NumberRetriesSettings
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfigDef.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfigDef.scala
new file mode 100644
index 0000000000..af40c19757
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticConfigDef.scala
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+import com.datamountaineer.streamreactor.common.config.base.const.TraitConfigConst._
+import org.apache.kafka.common.config.ConfigDef
+import org.apache.kafka.common.config.ConfigDef.Importance
+import org.apache.kafka.common.config.ConfigDef.Type
+
+abstract class ElasticConfigDef(val connectorPrefix: String) {
+
+ def configDef: ConfigDef =
+ new ConfigDef()
+ .define(
+ WRITE_TIMEOUT_CONFIG,
+ Type.INT,
+ WRITE_TIMEOUT_DEFAULT,
+ Importance.MEDIUM,
+ WRITE_TIMEOUT_DOC,
+ "Connection",
+ 6,
+ ConfigDef.Width.MEDIUM,
+ WRITE_TIMEOUT_DISPLAY,
+ )
+ .define(
+ BATCH_SIZE_CONFIG,
+ Type.INT,
+ BATCH_SIZE_DEFAULT,
+ Importance.MEDIUM,
+ BATCH_SIZE_DOC,
+ "Connection",
+ 7,
+ ConfigDef.Width.MEDIUM,
+ BATCH_SIZE_DISPLAY,
+ )
+ .define(
+ ERROR_POLICY_CONFIG,
+ Type.STRING,
+ ERROR_POLICY_DEFAULT,
+ Importance.HIGH,
+ ERROR_POLICY_DOC,
+ "Error",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ ERROR_POLICY_CONFIG,
+ )
+ .define(
+ NBR_OF_RETRIES_CONFIG,
+ Type.INT,
+ NBR_OF_RETIRES_DEFAULT,
+ Importance.MEDIUM,
+ NBR_OF_RETRIES_DOC,
+ "Error",
+ 2,
+ ConfigDef.Width.SHORT,
+ NBR_OF_RETRIES_CONFIG,
+ )
+ .define(
+ ERROR_RETRY_INTERVAL,
+ Type.LONG,
+ ERROR_RETRY_INTERVAL_DEFAULT,
+ Importance.MEDIUM,
+ ERROR_RETRY_INTERVAL_DOC,
+ "Error",
+ 3,
+ ConfigDef.Width.LONG,
+ ERROR_RETRY_INTERVAL,
+ )
+ .define(
+ KCQL,
+ Type.STRING,
+ Importance.HIGH,
+ KCQL_DOC,
+ "KCQL",
+ 1,
+ ConfigDef.Width.LONG,
+ KCQL,
+ )
+ .define(
+ PK_JOINER_SEPARATOR,
+ Type.STRING,
+ PK_JOINER_SEPARATOR_DEFAULT,
+ Importance.LOW,
+ PK_JOINER_SEPARATOR_DOC,
+ "KCQL",
+ 2,
+ ConfigDef.Width.SHORT,
+ PK_JOINER_SEPARATOR,
+ )
+ .define(
+ PROGRESS_COUNTER_ENABLED,
+ Type.BOOLEAN,
+ PROGRESS_COUNTER_ENABLED_DEFAULT,
+ Importance.MEDIUM,
+ PROGRESS_COUNTER_ENABLED_DOC,
+ "Metrics",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ PROGRESS_COUNTER_ENABLED_DISPLAY,
+ )
+ .withClientSslSupport()
+
+ val KCQL: String = s"$connectorPrefix.$KCQL_PROP_SUFFIX"
+ val KCQL_DOC = "KCQL expression describing field selection and routes."
+
+ val ERROR_POLICY_CONFIG = s"$connectorPrefix.$ERROR_POLICY_PROP_SUFFIX"
+ val ERROR_POLICY_DOC: String =
+ """Specifies the action to be taken if an error occurs while inserting the data
+ |There are two available options:
+ |NOOP - the error is swallowed
+ |THROW - the error is allowed to propagate.
+ |RETRY - The exception causes the Connect framework to retry the message. The number of retries is based on
+ |The error will be logged automatically""".stripMargin
+ val ERROR_POLICY_DEFAULT = "THROW"
+
+ val WRITE_TIMEOUT_CONFIG = s"$connectorPrefix.$WRITE_TIMEOUT_SUFFIX"
+ val WRITE_TIMEOUT_DOC = "The time to wait in millis. Default is 5 minutes."
+ val WRITE_TIMEOUT_DISPLAY = "Write timeout"
+ val WRITE_TIMEOUT_DEFAULT = 300000
+
+ val ERROR_RETRY_INTERVAL = s"$connectorPrefix.$RETRY_INTERVAL_PROP_SUFFIX"
+ val ERROR_RETRY_INTERVAL_DOC = "The time in milliseconds between retries."
+ val ERROR_RETRY_INTERVAL_DEFAULT = 60000L
+
+ val NBR_OF_RETRIES_CONFIG = s"$connectorPrefix.$MAX_RETRIES_PROP_SUFFIX"
+ val NBR_OF_RETRIES_DOC = "The maximum number of times to try the write again."
+ val NBR_OF_RETIRES_DEFAULT = 20
+
+ val BATCH_SIZE_CONFIG = s"$connectorPrefix.$BATCH_SIZE_PROP_SUFFIX"
+ val BATCH_SIZE_DOC =
+ "How many records to process at one time. As records are pulled from Kafka it can be 100k+ which will not be feasible to throw at Elastic search at once"
+ val BATCH_SIZE_DISPLAY = "Batch size"
+ val BATCH_SIZE_DEFAULT = 4000
+
+ val PROGRESS_COUNTER_ENABLED: String = PROGRESS_ENABLED_CONST
+ val PROGRESS_COUNTER_ENABLED_DOC = "Enables the output for how many records have been processed"
+ val PROGRESS_COUNTER_ENABLED_DEFAULT = false
+ val PROGRESS_COUNTER_ENABLED_DISPLAY = "Enable progress counter"
+
+ val PK_JOINER_SEPARATOR = s"$connectorPrefix.pk.separator"
+ val PK_JOINER_SEPARATOR_DOC = "Separator used when have more that one field in PK"
+ val PK_JOINER_SEPARATOR_DEFAULT = "-"
+
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettings.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettings.scala
new file mode 100644
index 0000000000..a60add7d74
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettings.scala
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+trait ElasticSettings {
+
+ def common: ElasticCommonSettings
+
+}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettingsReader.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettingsReader.scala
new file mode 100644
index 0000000000..3ed15f2d39
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/config/ElasticSettingsReader.scala
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.config
+
+trait ElasticSettingsReader[C <: ElasticSettings, CD <: ElasticConfigDef] {
+
+ def read(configDef: CD, props: Map[String, String]): Either[Throwable, C]
+}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CreateIndex.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CreateIndex.scala
similarity index 69%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CreateIndex.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CreateIndex.scala
index 6ae8bfb92e..148c654952 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CreateIndex.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CreateIndex.scala
@@ -13,14 +13,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8.indexname
+package io.lenses.streamreactor.connect.elastic.common.indexname
+import cats.effect.IO
import com.datamountaineer.kcql.Kcql
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
/**
* Creates the index for the given KCQL configuration.
*/
object CreateIndex {
+
+ def createIndex(kcql: Kcql, client: ElasticClientWrapper): IO[Unit] = {
+ require(kcql.isAutoCreate, s"Auto-creating indexes hasn't been enabled for target:${kcql.getTarget}")
+
+ client.createIndex(getIndexName(kcql)) *> IO.unit
+ }
+
def getIndexName(kcql: Kcql): String =
Option(kcql.getIndexSuffix).fold(kcql.getTarget) { indexNameSuffix =>
s"${kcql.getTarget}${CustomIndexName.parseIndexName(indexNameSuffix)}"
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CustomIndexName.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CustomIndexName.scala
similarity index 97%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CustomIndexName.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CustomIndexName.scala
index f1e0430d1f..2e15a0c9f9 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CustomIndexName.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/CustomIndexName.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8.indexname
+package io.lenses.streamreactor.connect.elastic.common.indexname
import scala.annotation.tailrec
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/IndexNameFragment.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/IndexNameFragment.scala
similarity index 94%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/IndexNameFragment.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/IndexNameFragment.scala
index 6a6ab618a1..8404e83d8e 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/IndexNameFragment.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/IndexNameFragment.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8.indexname
+package io.lenses.streamreactor.connect.elastic.common.indexname
import java.time.Clock
import java.time.LocalDateTime._
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/package.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/package.scala
similarity index 92%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/package.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/package.scala
index 9791d68212..e99bb3a6ca 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/package.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/indexname/package.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic.common
package object indexname {
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/PrimaryKeyExtractor.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/PrimaryKeyExtractor.scala
similarity index 99%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/PrimaryKeyExtractor.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/PrimaryKeyExtractor.scala
index 392297d9be..85f9eb5d82 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/PrimaryKeyExtractor.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/PrimaryKeyExtractor.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic.common.transform
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.node._
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/Transform.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/Transform.scala
similarity index 97%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/Transform.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/Transform.scala
index 38f6f33549..f2acdbdaa1 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/Transform.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/Transform.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic.common.transform
import com.datamountaineer.streamreactor.connect.json.SimpleJsonConverter
import com.fasterxml.jackson.annotation.JsonInclude
@@ -31,7 +31,7 @@ import scala.util.Failure
import scala.util.Success
import scala.util.Try
-private object Transform extends StrictLogging {
+object Transform extends StrictLogging {
lazy val simpleJsonConverter = new SimpleJsonConverter()
def apply(
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/TransformAndExtractPK.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/TransformAndExtractPK.scala
similarity index 97%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/TransformAndExtractPK.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/TransformAndExtractPK.scala
index 639bcd2b42..513446f170 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/TransformAndExtractPK.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/transform/TransformAndExtractPK.scala
@@ -13,9 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
-
-import java.nio.ByteBuffer
+package io.lenses.streamreactor.connect.elastic.common.transform
import com.datamountaineer.streamreactor.connect.json.SimpleJsonConverter
import com.fasterxml.jackson.databind.JsonNode
@@ -27,11 +25,12 @@ import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.data.Struct
+import java.nio.ByteBuffer
import scala.util.Failure
import scala.util.Success
import scala.util.Try
-private object TransformAndExtractPK extends StrictLogging {
+object TransformAndExtractPK extends StrictLogging {
lazy val simpleJsonConverter = new SimpleJsonConverter()
def apply(
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticClientCreator.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticClientCreator.scala
new file mode 100644
index 0000000000..6e1fe6f9ad
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticClientCreator.scala
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.writers
+
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.config
+
+trait ElasticClientCreator[C <: config.ElasticSettings] {
+ def create(config: C): Either[Throwable, ElasticClientWrapper]
+}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticJsonWriter.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticJsonWriter.scala
similarity index 63%
rename from kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticJsonWriter.scala
rename to kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticJsonWriter.scala
index bec0129837..f6556067df 100644
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticJsonWriter.scala
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticJsonWriter.scala
@@ -13,53 +13,44 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic.common.writers
-import java.util
+import cats.effect.IO
+import cats.effect.unsafe.implicits.global
+import cats.implicits._
import com.datamountaineer.kcql.Kcql
import com.datamountaineer.kcql.WriteModeEnum
import com.datamountaineer.streamreactor.common.converters.FieldConverter
-import com.datamountaineer.streamreactor.common.errors.ErrorHandler
import com.datamountaineer.streamreactor.common.schemas.ConverterUtil
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfig
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticSettings
-import com.datamountaineer.streamreactor.connect.elastic8.indexname.CreateIndex
-import com.fasterxml.jackson.databind.JsonNode
import com.landoop.sql.Field
-import com.sksamuel.elastic4s.Index
-import com.sksamuel.elastic4s.Indexable
-import com.sksamuel.elastic4s.ElasticDsl._
import com.typesafe.scalalogging.StrictLogging
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.Request
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettings
+import io.lenses.streamreactor.connect.elastic.common.indexname.CreateIndex
+import io.lenses.streamreactor.connect.elastic.common.transform.Transform
+import io.lenses.streamreactor.connect.elastic.common.transform.TransformAndExtractPK
import org.apache.kafka.connect.sink.SinkRecord
+import java.util
import scala.annotation.nowarn
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration._
-import scala.concurrent.Await
-import scala.concurrent.Future
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
-import scala.util.Try
-
-object ElasticJsonWriter {
- def apply(client: KElasticClient, props: Map[String, String]): ElasticJsonWriter =
- new ElasticJsonWriter(client, ElasticSettings(ElasticConfig(props.asJava)))
-}
@nowarn
-class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
- extends ErrorHandler
- with StrictLogging
+class ElasticJsonWriter(client: ElasticClientWrapper, settings: ElasticCommonSettings)
+ extends ElasticWriter
with ConverterUtil
- with AutoCloseable {
+ with AutoCloseable
+ with StrictLogging {
logger.info("Initialising Elastic Json writer")
- //initialize error tracker
- initialize(settings.taskRetries, settings.errorPolicy)
-
//create the index automatically if it was set to do so
- settings.kcqls.filter(_.isAutoCreate).foreach(client.index)
+ settings.kcqls.filter(_.isAutoCreate).toList.map(CreateIndex.createIndex(_, client)).traverse(_.attempt).onError(t =>
+ throw t,
+ ).unsafeRunSync()
private val topicKcqlMap = settings.kcqls.groupBy(_.getSource)
@@ -79,23 +70,20 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
}
- implicit object SinkRecordIndexable extends Indexable[SinkRecord] {
- override def json(t: SinkRecord): String = convertValueToJson(t).toString
- }
-
/**
* Close elastic4s client
*/
- def close(): Unit = client.close()
+ override def close(): Unit = client.close()
/**
* Write SinkRecords to Elastic Search if list is not empty
*
* @param records A list of SinkRecords
*/
- def write(records: Vector[SinkRecord]): Unit =
+ override def write(records: Vector[SinkRecord]): IO[Unit] =
if (records.isEmpty) {
logger.debug("No records received.")
+ IO.unit
} else {
logger.debug(s"Received ${records.size} records.")
val grouped = records.groupBy(_.topic())
@@ -107,8 +95,8 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
*
* @param records A list of SinkRecords
*/
- def insert(records: Map[String, Vector[SinkRecord]]): Unit = {
- val fut = records.flatMap {
+ private def insert(records: Map[String, Vector[SinkRecord]]): IO[Unit] =
+ records.flatMap {
case (topic, sinkRecords) =>
val kcqls = topicKcqlMap.getOrElse(
topic,
@@ -123,7 +111,7 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
val kcqlValue = kcqlMap.get(kcql)
sinkRecords.grouped(settings.batchSize)
.map { batch =>
- val indexes = batch.map { r =>
+ val indexes: Seq[Request] = batch.map { r =>
val (json, pks) = if (kcqlValue.primaryKeysPath.isEmpty) {
(Transform(
kcqlValue.fields,
@@ -146,37 +134,25 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
kcql.getWriteMode match {
case WriteModeEnum.INSERT =>
- indexInto(new Index(i))
- .id(if (idFromPk.isEmpty) autoGenId(r) else idFromPk)
- .pipeline(kcql.getPipeline)
- .source(json.toString)
+ val id = if (idFromPk.isEmpty) autoGenId(r) else idFromPk
+ InsertRequest(i, id, json, kcql.getPipeline)
case WriteModeEnum.UPSERT =>
- require(pks.nonEmpty, "Error extracting primary keys")
- updateById(new Index(i), idFromPk)
- .docAsUpsert(json)(IndexableJsonNode)
+ UpsertRequest(i, idFromPk, json)
}
}
- client.execute(bulk(indexes).refreshImmediately)
+ client.execute(indexes)
}
}
- }
-
- handleTry(
- Try(
- Await.result(Future.sequence(fut), settings.writeTimeout.seconds),
- ),
- )
- ()
- }
+ }.toList.traverse(identity).void
/**
* Create id from record infos
*
* @param record One SinkRecord
*/
- def autoGenId(record: SinkRecord): String = {
+ private def autoGenId(record: SinkRecord): String = {
val pks: Seq[Any] = Seq(record.topic(), record.kafkaPartition(), record.kafkaOffset())
pks.mkString(settings.pkJoinerSeparator)
}
@@ -184,7 +160,3 @@ class ElasticJsonWriter(client: KElasticClient, settings: ElasticSettings)
private case class KcqlValues(fields: Seq[Field], ignoredFields: Seq[Field], primaryKeysPath: Seq[Vector[String]])
}
-
-case object IndexableJsonNode extends Indexable[JsonNode] {
- override def json(t: JsonNode): String = t.toString
-}
diff --git a/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticWriter.scala b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticWriter.scala
new file mode 100644
index 0000000000..47b5e360da
--- /dev/null
+++ b/kafka-connect-elastic-common/src/main/scala/io/lenses/streamreactor/connect/elastic/common/writers/ElasticWriter.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic.common.writers
+
+import cats.effect.IO
+import org.apache.kafka.connect.sink.SinkRecord
+
+trait ElasticWriter {
+
+ /**
+ * Close elastic4s client
+ */
+ def close(): Unit
+
+ /**
+ * Write SinkRecords to Elastic Search if list is not empty
+ *
+ * @param records A list of SinkRecords
+ */
+ def write(records: Vector[SinkRecord]): IO[Unit]
+
+}
diff --git a/kafka-connect-elastic8/src/fun/scala/com/datamountaineer/streamreactor/connect/Elastic8Test.scala b/kafka-connect-elastic8/src/fun/scala/io/lenses/streamreactor/connect/Elastic8Test.scala
similarity index 76%
rename from kafka-connect-elastic8/src/fun/scala/com/datamountaineer/streamreactor/connect/Elastic8Test.scala
rename to kafka-connect-elastic8/src/fun/scala/io/lenses/streamreactor/connect/Elastic8Test.scala
index 2d58fdf7c7..babd9e1695 100644
--- a/kafka-connect-elastic8/src/fun/scala/com/datamountaineer/streamreactor/connect/Elastic8Test.scala
+++ b/kafka-connect-elastic8/src/fun/scala/io/lenses/streamreactor/connect/Elastic8Test.scala
@@ -1,15 +1,16 @@
-package com.datamountaineer.streamreactor.connect
+package io.lenses.streamreactor.connect
+
import cats.effect.IO
import cats.effect.testing.scalatest.AsyncIOSpec
import com.jayway.jsonpath.JsonPath
-import io.confluent.kafka.serializers.KafkaJsonSerializer
-import io.lenses.streamreactor.connect.model.Order
-import io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer
-import io.lenses.streamreactor.connect.testcontainers.SchemaRegistryContainer
-import io.lenses.streamreactor.connect.testcontainers.connect.ConfigValue
-import io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
-import io.lenses.streamreactor.connect.testcontainers.connect.KafkaConnectClient.createConnector
-import io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
+import _root_.io.confluent.kafka.serializers.KafkaJsonSerializer
+import _root_.io.lenses.streamreactor.connect.model.Order
+import _root_.io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer
+import _root_.io.lenses.streamreactor.connect.testcontainers.SchemaRegistryContainer
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConfigValue
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.KafkaConnectClient.createConnector
+import _root_.io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.StringSerializer
import org.scalatest.flatspec.AsyncFlatSpec
@@ -22,7 +23,7 @@ import java.net.http.HttpResponse.BodyHandlers
class Elastic8Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorContainerPerSuite with Matchers {
- lazy val container: ElasticsearchContainer = ElasticsearchContainer(dockerTag = "8.10.1").withNetwork(network)
+ lazy val container: ElasticsearchContainer = ElasticsearchContainer("elastic8").withNetwork(network)
override val schemaRegistryContainer: Option[SchemaRegistryContainer] = None
@@ -84,13 +85,13 @@ class Elastic8Test extends AsyncFlatSpec with AsyncIOSpec with StreamReactorCont
ConnectorConfiguration(
"elastic-sink",
Map(
- "connector.class" -> ConfigValue("com.datamountaineer.streamreactor.connect.elastic8.ElasticSinkConnector"),
+ "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.elastic8.Elastic8SinkConnector"),
"tasks.max" -> ConfigValue(1),
"topics" -> ConfigValue("orders"),
"connect.elastic.protocol" -> ConfigValue("http"),
- "connect.elastic.hosts" -> ConfigValue(container.networkAlias),
+ "connect.elastic.hosts" -> ConfigValue(container.setup.key),
"connect.elastic.port" -> ConfigValue(Integer.valueOf(container.port)),
- "connect.elastic.cluster.name" -> ConfigValue("elasticsearch"),
+ "connect.elastic.cluster.name" -> ConfigValue(container.setup.key),
"connect.elastic.kcql" -> ConfigValue("INSERT INTO orders SELECT * FROM orders"),
"connect.progress.enabled" -> ConfigValue(true),
),
diff --git a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterSelectionTest.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterSelectionTest.scala
similarity index 90%
rename from kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterSelectionTest.scala
rename to kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterSelectionTest.scala
index 91f27c904f..3a71602982 100644
--- a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterSelectionTest.scala
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterSelectionTest.scala
@@ -14,13 +14,15 @@
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic8
import org.apache.kafka.connect.sink.SinkTaskContext
import org.mockito.MockitoSugar.mock
import org.mockito.MockitoSugar.when
-class ElasticWriterSelectionTest extends ITBase {
+import scala.jdk.CollectionConverters.SetHasAsJava
+
+class ElasticJsonWriterSelectionTest extends ITBase {
"A ElasticWriter should insert into Elastic Search a number of records" in {
@@ -61,7 +63,7 @@ class ElasticWriterSelectionTest extends ITBase {
private def mockContextForAssignment(): Unit = {
//mock the context to return our assignment when called
val context = mock[SinkTaskContext]
- when(context.assignment()).thenReturn(getAssignment)
+ when(context.assignment()).thenReturn(ASSIGNMENT.asJava)
()
}
diff --git a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterTest.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterTest.scala
similarity index 90%
rename from kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterTest.scala
rename to kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterTest.scala
index 2d1ebc8356..c9611b9dec 100644
--- a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterTest.scala
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterTest.scala
@@ -14,9 +14,10 @@
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic8
import com.sksamuel.elastic4s.ElasticDsl._
+import io.lenses.streamreactor.connect.elastic8.client.Elastic8ClientWrapper
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterEach
@@ -25,7 +26,7 @@ import org.scalatest.concurrent.Eventually.eventually
import java.util.UUID
import scala.util.Using
-class ElasticWriterTest extends ITBase with MockitoSugar with BeforeAndAfterEach {
+class ElasticJsonWriterTest extends ITBase with MockitoSugar with BeforeAndAfterEach {
class TestContext {
@@ -56,13 +57,13 @@ class ElasticWriterTest extends ITBase with MockitoSugar with BeforeAndAfterEach
}
- "It should fail writing to a non-existent index when auto creation is disabled" ignore new TestContext {
+ "It should fail writing to a non-existent index when auto creation is disabled" in new TestContext {
val props = getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(autoCreate = false, RandomClusterName)
Using.resource(LocalNode()) {
case LocalNode(_, client) =>
- Using.resource(ElasticJsonWriter(new HttpKElasticClient(client), props)) {
+ Using.resource(createElasticJsonWriter(new Elastic8ClientWrapper(client), props)) {
writer =>
writer.write(TestRecords)
eventually {
diff --git a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ITBase.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ITBase.scala
similarity index 67%
rename from kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ITBase.scala
rename to kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ITBase.scala
index 4c8be707f9..9705b548d1 100644
--- a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/ITBase.scala
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/ITBase.scala
@@ -14,11 +14,16 @@
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic8
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfigConstants
+import cats.effect.unsafe.implicits.global
import com.sksamuel.elastic4s.ElasticClient
import com.sksamuel.elastic4s.ElasticDsl._
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticJsonWriter
+import io.lenses.streamreactor.connect.elastic8.client.Elastic8ClientWrapper
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.record.TimestampType
import org.apache.kafka.connect.data.Schema
@@ -33,12 +38,13 @@ import org.scalatest.BeforeAndAfter
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter._
-import java.util
-import scala.collection.mutable
-import scala.jdk.CollectionConverters.SetHasAsScala
import scala.util.Using
trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
+
+ val configDef = new Elastic8ConfigDef
+ import configDef._
+
val ELASTIC_SEARCH_HOSTNAMES = "localhost:9300"
val BASIC_AUTH_USERNAME = "usertest"
val BASIC_AUTH_PASSWORD = "userpassword"
@@ -51,18 +57,11 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
val UPDATE_QUERY = s"UPSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
val UPDATE_QUERY_SELECTION = s"UPSERT INTO $INDEX SELECT id, string_field FROM $TOPIC PK id"
- protected val PARTITION: Int = 12
- protected val PARTITION2: Int = 13
- protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
- protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
- protected val ASSIGNMENT: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
- //Set topic assignments
- ASSIGNMENT.add(TOPIC_PARTITION)
- ASSIGNMENT.add(TOPIC_PARTITION2)
-
- //get the assignment of topic partitions for the sinkTask
- def getAssignment: util.Set[TopicPartition] =
- ASSIGNMENT
+ protected val PARTITION: Int = 12
+ protected val PARTITION2: Int = 13
+ protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
+ protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
+ protected val ASSIGNMENT: Set[TopicPartition] = Set(TOPIC_PARTITION, TOPIC_PARTITION2)
//build a test record schema
def createSchema: Schema =
@@ -109,9 +108,8 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
//generate some test records
def getTestRecords: Vector[SinkRecord] = {
val schema = createSchema
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 7).map { i =>
val record: Struct = createRecord(schema, a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -130,9 +128,8 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
def getTestRecordsNested: Vector[SinkRecord] = {
val schema = createSchemaNested
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 7).map { i =>
val record: Struct = createRecordNested(a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -151,9 +148,8 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
def getUpdateTestRecord: Vector[SinkRecord] = {
val schema = createSchema
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 2).map { i =>
val record: Struct = createRecord(schema, a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -172,9 +168,8 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
def getUpdateTestRecordNested: Vector[SinkRecord] = {
val schema = createSchemaNested
- val assignment: mutable.Set[TopicPartition] = getAssignment.asScala
- assignment.flatMap { a =>
+ ASSIGNMENT.flatMap { a =>
(1 to 2).map { i =>
val record: Struct = createRecordNested(a.topic() + "-" + a.partition() + "-" + i)
new SinkRecord(a.topic(),
@@ -192,71 +187,76 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
}
def getElasticSinkConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(QUERY, clusterName)
def getElasticSinkConfigPropsSelection(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(QUERY_SELECTION, clusterName)
def getElasticSinkConfigPropsPk(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(QUERY_PK, clusterName)
def getElasticSinkUpdateConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(UPDATE_QUERY, clusterName)
def getElasticSinkUpdateConfigPropsSelection(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
getBaseElasticSinkConfigProps(UPDATE_QUERY_SELECTION, clusterName)
def getBaseElasticSinkConfigProps(
query: String,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- "topics" -> TOPIC,
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> query,
+ "topics" -> TOPIC,
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> query,
)
def getElasticSinkConfigPropsWithDateSuffixAndIndexAutoCreation(
autoCreate: Boolean,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> (QUERY + (if (autoCreate) " AUTOCREATE "
- else "") + " WITHINDEXSUFFIX=_{YYYY-MM-dd}"),
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> (QUERY + (if (autoCreate) " AUTOCREATE "
+ else "") + " WITHINDEXSUFFIX=_{YYYY-MM-dd}"),
)
def getElasticSinkConfigPropsHTTPClient(
auth: Boolean = false,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
): Map[String, String] =
Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> QUERY,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> QUERY,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
+ else
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
+ else
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
)
+ def writeRecords(writer: ElasticJsonWriter, records: Vector[SinkRecord]): Unit =
+ writer.write(records).attempt.map {
+ case Left(value) => fail(value)
+ case Right(_) => ()
+ }.unsafeRunSync()
protected def writeAndVerifyTestRecords(
props: Map[String, String],
testRecords: Vector[SinkRecord],
@@ -264,15 +264,16 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
index: String = INDEX,
): Any =
Using.resource(LocalNode()) {
+
case LocalNode(_, client) =>
- Using.resource(ElasticJsonWriter(new HttpKElasticClient(client), props)) {
+ Using.resource(createElasticJsonWriter(new Elastic8ClientWrapper(client), props)) {
writer =>
//write records to elastic
- writer.write(testRecords)
+ writeRecords(writer, testRecords)
checkCounts(testRecords, client, index)
if (updateRecords.nonEmpty) {
- writer.write(updateRecords)
+ writeRecords(writer, updateRecords)
Thread.sleep(2000)
checkCounts(testRecords, client, index)
}
@@ -287,4 +288,9 @@ trait ITBase extends AnyWordSpec with Matchers with BeforeAndAfter {
res.result.totalHits shouldBe testRecords.size
}
+ protected def createElasticJsonWriter(client: ElasticClientWrapper, props: Map[String, String]): ElasticJsonWriter =
+ ElasticCommonSettingsReader.read(new Elastic8ConfigDef, props).map(new ElasticJsonWriter(client, _)).getOrElse(fail(
+ "Unable to construct writer",
+ ))
+
}
diff --git a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/LocalNode.scala b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/LocalNode.scala
similarity index 94%
rename from kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/LocalNode.scala
rename to kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/LocalNode.scala
index fc59d6be3d..83dcc0ae7d 100644
--- a/kafka-connect-elastic8/src/it/scala/com/datamountaineer/streamreactor/connect/elastic8/LocalNode.scala
+++ b/kafka-connect-elastic8/src/it/scala/io/lenses/streamreactor/connect/elastic8/LocalNode.scala
@@ -1,4 +1,4 @@
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic8
import com.sksamuel.elastic4s.ElasticClient
import com.sksamuel.elastic4s.ElasticProperties
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkTask.scala b/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkTask.scala
deleted file mode 100644
index fb4291ddeb..0000000000
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkTask.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8
-
-import com.datamountaineer.streamreactor.common.errors.RetryErrorPolicy
-import com.datamountaineer.streamreactor.common.utils.AsciiArtPrinter.printAsciiHeader
-import com.datamountaineer.streamreactor.common.utils.JarManifest
-import com.datamountaineer.streamreactor.common.utils.ProgressCounter
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfig
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfigConstants
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticSettings
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.kafka.clients.consumer.OffsetAndMetadata
-import org.apache.kafka.common.TopicPartition
-import org.apache.kafka.connect.sink.SinkRecord
-import org.apache.kafka.connect.sink.SinkTask
-
-import java.util
-import scala.jdk.CollectionConverters.IterableHasAsScala
-
-class ElasticSinkTask extends SinkTask with StrictLogging {
- private var writer: Option[ElasticJsonWriter] = None
- private val progressCounter = new ProgressCounter
- private var enableProgress: Boolean = false
- private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
-
- /**
- * Parse the configurations and setup the writer
- */
- override def start(props: util.Map[String, String]): Unit = {
- printAsciiHeader(manifest, "/elastic-ascii.txt")
-
- val conf = if (context.configs().isEmpty) props else context.configs()
-
- ElasticConfig.config.parse(conf)
- val sinkConfig = ElasticConfig(conf)
- enableProgress = sinkConfig.getBoolean(ElasticConfigConstants.PROGRESS_COUNTER_ENABLED)
-
- //if error policy is retry set retry interval
- val settings = ElasticSettings(sinkConfig)
- settings.errorPolicy match {
- case RetryErrorPolicy() => context.timeout(sinkConfig.getInt(ElasticConfigConstants.ERROR_RETRY_INTERVAL).toLong)
- case _ =>
- }
-
- writer = Some(ElasticWriter(sinkConfig))
- }
-
- /**
- * Pass the SinkRecords to the writer for Writing
- */
- override def put(records: util.Collection[SinkRecord]): Unit = {
- require(writer.nonEmpty, "Writer is not set!")
- val seq = records.asScala.toVector
- writer.foreach(_.write(seq))
-
- if (enableProgress) {
- progressCounter.update(seq)
- }
- }
-
- /**
- * Clean up writer
- */
- override def stop(): Unit = {
- logger.info("Stopping Elastic sink.")
- writer.foreach(w => w.close())
- progressCounter.empty()
- }
-
- override def flush(map: util.Map[TopicPartition, OffsetAndMetadata]): Unit =
- logger.info("Flushing Elastic Sink")
-
- override def version: String = manifest.version()
-}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriter.scala b/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriter.scala
deleted file mode 100644
index ebe05022a9..0000000000
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriter.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8
-
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfig
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfigConstants
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticSettings
-import com.sksamuel.elastic4s.ElasticNodeEndpoint
-
-import scala.util.Failure
-import scala.util.Success
-import scala.util.Try
-
-object ElasticWriter {
-
- /**
- * Construct a JSONWriter.
- *
- * @param config An elasticSinkConfig to extract settings from.
- * @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
- */
- def apply(config: ElasticConfig): ElasticJsonWriter = {
-
- val hostNames = config.getString(ElasticConfigConstants.HOSTS).split(",")
- val protocol = config.getString(ElasticConfigConstants.PROTOCOL)
- val port = config.getInt(ElasticConfigConstants.ES_PORT)
- val prefix = Try(config.getString(ElasticConfigConstants.ES_PREFIX)) match {
- case Success("") => None
- case Success(configString) => Some(configString)
- case Failure(_) => None
- }
-
- val settings = ElasticSettings(config)
-
- new ElasticJsonWriter(
- KElasticClient.createHttpClient(settings, endpoints(hostNames, protocol, port, prefix).toIndexedSeq),
- settings,
- )
- }
-
- private def endpoints(hostNames: Array[String], protocol: String, port: Integer, prefix: Option[String]) =
- hostNames
- .map(hostname => ElasticNodeEndpoint(protocol, hostname, port, prefix))
-}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/KElasticClient.scala b/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/KElasticClient.scala
deleted file mode 100644
index 07c4273161..0000000000
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/KElasticClient.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8
-
-import com.datamountaineer.kcql.Kcql
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticSettings
-import com.datamountaineer.streamreactor.connect.elastic8.indexname.CreateIndex.getIndexName
-import com.sksamuel.elastic4s.requests.bulk.BulkRequest
-import com.sksamuel.elastic4s.requests.bulk.BulkResponse
-import com.sksamuel.elastic4s.ElasticClient
-import com.sksamuel.elastic4s.ElasticNodeEndpoint
-import com.sksamuel.elastic4s.ElasticProperties
-import com.sksamuel.elastic4s.Response
-import com.sksamuel.elastic4s.http.JavaClient
-import com.typesafe.scalalogging.StrictLogging
-import org.apache.http.auth.AuthScope
-import org.apache.http.auth.UsernamePasswordCredentials
-import org.apache.http.client.config.RequestConfig.Builder
-import org.apache.http.impl.client.BasicCredentialsProvider
-import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
-
-import scala.concurrent.Future
-
-trait KElasticClient extends AutoCloseable {
- def index(kcql: Kcql): Unit
-
- def execute(definition: BulkRequest): Future[Any]
-}
-
-object KElasticClient extends StrictLogging {
-
- def createHttpClient(settings: ElasticSettings, endpoints: Seq[ElasticNodeEndpoint]): KElasticClient =
- if (settings.httpBasicAuthUsername.nonEmpty && settings.httpBasicAuthPassword.nonEmpty) {
- lazy val provider = {
- val provider = new BasicCredentialsProvider
- val credentials =
- new UsernamePasswordCredentials(settings.httpBasicAuthUsername, settings.httpBasicAuthPassword)
- provider.setCredentials(AuthScope.ANY, credentials)
- provider
- }
-
- val javaClient = JavaClient(
- ElasticProperties(endpoints),
- (requestConfigBuilder: Builder) => requestConfigBuilder,
- (httpClientBuilder: HttpAsyncClientBuilder) => httpClientBuilder.setDefaultCredentialsProvider(provider),
- )
-
- val client: ElasticClient = ElasticClient(javaClient)
- new HttpKElasticClient(client)
- } else {
- val client: ElasticClient = ElasticClient(JavaClient(ElasticProperties(endpoints)))
- new HttpKElasticClient(client)
- }
-}
-
-class HttpKElasticClient(client: ElasticClient) extends KElasticClient {
-
- import com.sksamuel.elastic4s.ElasticDsl._
-
- override def index(kcql: Kcql): Unit = {
- require(kcql.isAutoCreate, s"Auto-creating indexes hasn't been enabled for target:${kcql.getTarget}")
-
- val indexName = getIndexName(kcql)
- client.execute {
- createIndex(indexName)
- }
- ()
- }
-
- override def execute(definition: BulkRequest): Future[Response[BulkResponse]] = client.execute(definition)
-
- override def close(): Unit = client.close()
-}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticConfig.scala b/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticConfig.scala
deleted file mode 100644
index 24ee8858f2..0000000000
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticConfig.scala
+++ /dev/null
@@ -1,211 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8.config
-
-import java.util
-import com.datamountaineer.kcql.Kcql
-import com.datamountaineer.streamreactor.common.config.base.traits.BaseConfig
-import com.datamountaineer.streamreactor.common.config.base.traits.ErrorPolicySettings
-import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
-import com.datamountaineer.streamreactor.common.config.base.traits.WriteTimeoutSettings
-import org.apache.kafka.common.config.ConfigDef
-import org.apache.kafka.common.config.ConfigDef.Importance
-import org.apache.kafka.common.config.ConfigDef.Type
-
-object ElasticConfig {
-
- val config: ConfigDef = new ConfigDef()
- .define(
- ElasticConfigConstants.PROTOCOL,
- Type.STRING,
- ElasticConfigConstants.PROTOCOL_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.PROTOCOL_DOC,
- "Connection",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.PROTOCOL,
- )
- .define(
- ElasticConfigConstants.HOSTS,
- Type.STRING,
- ElasticConfigConstants.HOSTS_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.HOSTS_DOC,
- "Connection",
- 2,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_PORT,
- Type.INT,
- ElasticConfigConstants.ES_PORT_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_PORT_DOC,
- "Connection",
- 3,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_PREFIX,
- Type.STRING,
- ElasticConfigConstants.ES_PREFIX_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_PREFIX_DOC,
- "Connection",
- 4,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.HOSTS,
- )
- .define(
- ElasticConfigConstants.ES_CLUSTER_NAME,
- Type.STRING,
- ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ES_CLUSTER_NAME_DOC,
- "Connection",
- 5,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.ES_CLUSTER_NAME,
- )
- .define(
- ElasticConfigConstants.WRITE_TIMEOUT_CONFIG,
- Type.INT,
- ElasticConfigConstants.WRITE_TIMEOUT_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.WRITE_TIMEOUT_DOC,
- "Connection",
- 6,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.WRITE_TIMEOUT_DISPLAY,
- )
- .define(
- ElasticConfigConstants.BATCH_SIZE_CONFIG,
- Type.INT,
- ElasticConfigConstants.BATCH_SIZE_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.BATCH_SIZE_DOC,
- "Connection",
- 7,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.BATCH_SIZE_DISPLAY,
- )
- .define(
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME,
- Type.STRING,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC,
- "Connection",
- 8,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME,
- )
- .define(
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD,
- Type.STRING,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC,
- "Connection",
- 9,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD,
- )
- .define(
- ElasticConfigConstants.ERROR_POLICY_CONFIG,
- Type.STRING,
- ElasticConfigConstants.ERROR_POLICY_DEFAULT,
- Importance.HIGH,
- ElasticConfigConstants.ERROR_POLICY_DOC,
- "Error",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.ERROR_POLICY_CONFIG,
- )
- .define(
- ElasticConfigConstants.NBR_OF_RETRIES_CONFIG,
- Type.INT,
- ElasticConfigConstants.NBR_OF_RETIRES_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.NBR_OF_RETRIES_DOC,
- "Error",
- 2,
- ConfigDef.Width.SHORT,
- ElasticConfigConstants.NBR_OF_RETRIES_CONFIG,
- )
- .define(
- ElasticConfigConstants.ERROR_RETRY_INTERVAL,
- Type.INT,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL_DOC,
- "Error",
- 3,
- ConfigDef.Width.LONG,
- ElasticConfigConstants.ERROR_RETRY_INTERVAL,
- )
- .define(
- ElasticConfigConstants.KCQL,
- Type.STRING,
- Importance.HIGH,
- ElasticConfigConstants.KCQL_DOC,
- "KCQL",
- 1,
- ConfigDef.Width.LONG,
- ElasticConfigConstants.KCQL,
- )
- .define(
- ElasticConfigConstants.PK_JOINER_SEPARATOR,
- Type.STRING,
- ElasticConfigConstants.PK_JOINER_SEPARATOR_DEFAULT,
- Importance.LOW,
- ElasticConfigConstants.PK_JOINER_SEPARATOR_DOC,
- "KCQL",
- 2,
- ConfigDef.Width.SHORT,
- ElasticConfigConstants.PK_JOINER_SEPARATOR,
- )
- .define(
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED,
- Type.BOOLEAN,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DEFAULT,
- Importance.MEDIUM,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DOC,
- "Metrics",
- 1,
- ConfigDef.Width.MEDIUM,
- ElasticConfigConstants.PROGRESS_COUNTER_ENABLED_DISPLAY,
- )
-}
-
-/**
- * ElasticSinkConfig
- *
- * Holds config, extends AbstractConfig.
- */
-case class ElasticConfig(props: util.Map[String, String])
- extends BaseConfig(ElasticConfigConstants.CONNECTOR_PREFIX, ElasticConfig.config, props)
- with WriteTimeoutSettings
- with ErrorPolicySettings
- with NumberRetriesSettings {
- val kcqlConstant: String = ElasticConfigConstants.KCQL
-
- def getKcql(): Seq[Kcql] =
- getString(kcqlConstant).split(";").filter(_.trim.nonEmpty).map(Kcql.parse).toIndexedSeq
-}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticConfigConstants.scala b/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticConfigConstants.scala
deleted file mode 100644
index 0d984d9da8..0000000000
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticConfigConstants.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8.config
-
-import com.datamountaineer.streamreactor.common.config.base.const.TraitConfigConst._
-
-object ElasticConfigConstants {
-
- val CONNECTOR_PREFIX = "connect.elastic"
-
- val PROTOCOL = s"${CONNECTOR_PREFIX}.protocol"
- val PROTOCOL_DOC = "URL protocol (http, https)"
- val PROTOCOL_DEFAULT = "http"
-
- val HOSTS = s"${CONNECTOR_PREFIX}.${CONNECTION_HOSTS_SUFFIX}"
- val HOSTS_DOC = "List of hostnames for Elastic Search cluster node, not including protocol or port."
- val HOSTS_DEFAULT = "localhost"
-
- val ES_PORT = s"${CONNECTOR_PREFIX}.${CONNECTION_PORT_SUFFIX}"
- val ES_PORT_DOC = "Port on which Elastic Search node listens on"
- val ES_PORT_DEFAULT = 9300
-
- val ES_PREFIX = s"${CONNECTOR_PREFIX}.tableprefix"
- val ES_PREFIX_DOC = "Table prefix (optional)"
- val ES_PREFIX_DEFAULT = ""
-
- val ES_CLUSTER_NAME = s"${CONNECTOR_PREFIX}.${CLUSTER_NAME_SUFFIX}"
- val ES_CLUSTER_NAME_DOC = "Name of the elastic search cluster, used in local mode for setting the connection"
- val ES_CLUSTER_NAME_DEFAULT = "elasticsearch"
-
- val KCQL = s"${CONNECTOR_PREFIX}.${KCQL_PROP_SUFFIX}"
- val KCQL_DOC = "KCQL expression describing field selection and routes."
-
- val WRITE_TIMEOUT_CONFIG = s"${CONNECTOR_PREFIX}.${WRITE_TIMEOUT_SUFFIX}"
- val WRITE_TIMEOUT_DOC = "The time to wait in millis. Default is 5 minutes."
- val WRITE_TIMEOUT_DISPLAY = "Write timeout"
- val WRITE_TIMEOUT_DEFAULT = 300000
-
- val CLIENT_HTTP_BASIC_AUTH_USERNAME = s"$CONNECTOR_PREFIX.use.http.username"
- val CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT = ""
- val CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC = "Username if HTTP Basic Auth required default is null."
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD = s"$CONNECTOR_PREFIX.use.http.password"
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT = ""
- val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC = "Password if HTTP Basic Auth required default is null."
-
- val NBR_OF_RETRIES_CONFIG = s"${CONNECTOR_PREFIX}.${MAX_RETRIES_PROP_SUFFIX}"
- val NBR_OF_RETRIES_DOC = "The maximum number of times to try the write again."
- val NBR_OF_RETIRES_DEFAULT = 20
-
- val ERROR_POLICY_CONFIG = s"${CONNECTOR_PREFIX}.${ERROR_POLICY_PROP_SUFFIX}"
- val ERROR_POLICY_DOC: String =
- """Specifies the action to be taken if an error occurs while inserting the data
- |There are two available options:
- |NOOP - the error is swallowed
- |THROW - the error is allowed to propagate.
- |RETRY - The exception causes the Connect framework to retry the message. The number of retries is based on
- |The error will be logged automatically""".stripMargin
- val ERROR_POLICY_DEFAULT = "THROW"
-
- val BATCH_SIZE_CONFIG = s"$CONNECTOR_PREFIX.$BATCH_SIZE_PROP_SUFFIX"
- val BATCH_SIZE_DOC =
- "How many records to process at one time. As records are pulled from Kafka it can be 100k+ which will not be feasible to throw at Elastic search at once"
- val BATCH_SIZE_DISPLAY = "Batch size"
- val BATCH_SIZE_DEFAULT = 4000
-
- val ERROR_RETRY_INTERVAL = s"${CONNECTOR_PREFIX}.${RETRY_INTERVAL_PROP_SUFFIX}"
- val ERROR_RETRY_INTERVAL_DOC = "The time in milliseconds between retries."
- val ERROR_RETRY_INTERVAL_DEFAULT = "60000"
-
- /*
- val INDEX_NAME_SUFFIX = s"${CONNECTOR_PREFIX}.index.suffix"
- val INDEX_NAME_SUFFIX_DOC = "Suffix to append to the index name. Supports date time notation inside curly brackets. E.g. 'abc_{YYYY-MM-dd}_def'"
- val INDEX_NAME_SUFFIX_DEFAULT: String = null
-
- val AUTO_CREATE_INDEX = s"${CONNECTOR_PREFIX}.index.auto.create"
- val AUTO_CREATE_INDEX_DOC = "The flag enables/disables auto creating the ElasticSearch index. Boolean value required. Defaults to TRUE."
- val AUTO_CREATE_INDEX_DEFAULT = true
-
- val DOCUMENT_TYPE = s"${CONNECTOR_PREFIX}.document.type"
- val DOCUMENT_TYPE_DOC = "Sets the ElasticSearch document type. See https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-type-field.html for more info."
- val DOCUMENT_TYPE_DEFAULT: String = null
- */
-
- val PROGRESS_COUNTER_ENABLED = PROGRESS_ENABLED_CONST
- val PROGRESS_COUNTER_ENABLED_DOC = "Enables the output for how many records have been processed"
- val PROGRESS_COUNTER_ENABLED_DEFAULT = false
- val PROGRESS_COUNTER_ENABLED_DISPLAY = "Enable progress counter"
-
- val PK_JOINER_SEPARATOR = s"$CONNECTOR_PREFIX.pk.separator"
- val PK_JOINER_SEPARATOR_DOC = "Separator used when have more that one field in PK"
- val PK_JOINER_SEPARATOR_DEFAULT = "-"
-}
diff --git a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticSettings.scala b/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticSettings.scala
deleted file mode 100644
index ebf0893ce8..0000000000
--- a/kafka-connect-elastic8/src/main/scala/com/datamountaineer/streamreactor/connect/elastic8/config/ElasticSettings.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8.config
-
-import com.datamountaineer.kcql.Kcql
-import com.datamountaineer.streamreactor.common.errors.ErrorPolicy
-
-/**
- * Created by andrew@datamountaineer.com on 13/05/16.
- * stream-reactor-maven
- */
-case class ElasticSettings(
- kcqls: Seq[Kcql],
- errorPolicy: ErrorPolicy,
- taskRetries: Int = ElasticConfigConstants.NBR_OF_RETIRES_DEFAULT,
- writeTimeout: Int = ElasticConfigConstants.WRITE_TIMEOUT_DEFAULT,
- batchSize: Int = ElasticConfigConstants.BATCH_SIZE_DEFAULT,
- pkJoinerSeparator: String = ElasticConfigConstants.PK_JOINER_SEPARATOR_DEFAULT,
- httpBasicAuthUsername: String = ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
- httpBasicAuthPassword: String = ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
-)
-
-object ElasticSettings {
-
- def apply(config: ElasticConfig): ElasticSettings = {
- val kcql = config.getKcql()
- val pkJoinerSeparator = config.getString(ElasticConfigConstants.PK_JOINER_SEPARATOR)
- val writeTimeout = config.getWriteTimeout
- val errorPolicy = config.getErrorPolicy
- val retries = config.getNumberRetries
- val httpBasicAuthUsername = config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME)
- val httpBasicAuthPassword = config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD)
-
- val batchSize = config.getInt(ElasticConfigConstants.BATCH_SIZE_CONFIG)
-
- ElasticSettings(kcql,
- errorPolicy,
- retries,
- writeTimeout,
- batchSize,
- pkJoinerSeparator,
- httpBasicAuthUsername,
- httpBasicAuthPassword,
- )
- }
-}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkConnector.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkConnector.scala
new file mode 100644
index 0000000000..e5c1eab069
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkConnector.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkConnector
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8Settings
+
+class Elastic8SinkConnector
+ extends ElasticSinkConnector[
+ Elastic8Settings,
+ Elastic8ConfigDef,
+ Elastic8SinkTask,
+ ](classOf[Elastic8SinkTask], new Elastic8ConfigDef) {}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkTask.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkTask.scala
new file mode 100644
index 0000000000..8dc07a4ad2
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/Elastic8SinkTask.scala
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkTask
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8Settings
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8SettingsReader
+import io.lenses.streamreactor.connect.elastic8.writers.Elastic8ClientCreator
+
+class Elastic8SinkTask
+ extends ElasticSinkTask[Elastic8Settings, Elastic8ConfigDef](
+ Elastic8SettingsReader,
+ Elastic8ClientCreator,
+ new Elastic8ConfigDef(),
+ "/elastic-ascii.txt",
+ ) {}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/client/Elastic8ClientWrapper.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/client/Elastic8ClientWrapper.scala
new file mode 100644
index 0000000000..f5cce7425b
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/client/Elastic8ClientWrapper.scala
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.client
+
+import cats.effect.IO
+import com.fasterxml.jackson.databind.JsonNode
+import com.sksamuel.elastic4s.ElasticDsl.{ createIndex => indexCreate, _ }
+import com.sksamuel.elastic4s.Index
+import com.sksamuel.elastic4s.Indexable
+import com.sksamuel.elastic4s.{ ElasticClient => UnderlyingElasticClient }
+import com.typesafe.scalalogging.LazyLogging
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.Request
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+
+class Elastic8ClientWrapper(client: UnderlyingElasticClient) extends ElasticClientWrapper with LazyLogging {
+
+ private case object IndexableJsonNode extends Indexable[JsonNode] {
+ override def json(t: JsonNode): String = t.toString
+ }
+
+ override def createIndex(indexName: String): IO[Unit] =
+ IO.fromFuture {
+ IO {
+ client.execute {
+ indexCreate(indexName)
+ }
+ }
+ } *> IO.unit
+
+ override def close(): IO[Unit] = IO {
+ client.close()
+ ()
+ }.recover { t: Throwable =>
+ logger.error("Error during OpenSearch client shutdown", t)
+ ()
+ }
+
+ override def execute(reqs: Seq[Request]): IO[Unit] =
+ IO.fromFuture {
+ IO {
+ val indexes = reqs.map {
+ case InsertRequest(index, id, json, pipeline) =>
+ indexInto(new Index(index))
+ .id(id)
+ .pipeline(pipeline)
+ .source(json.toString)
+ case UpsertRequest(index, id, json) =>
+ updateById(new Index(index), id)
+ .docAsUpsert(json)(IndexableJsonNode)
+ }
+ val bulkRequest = bulk(indexes).refreshImmediately
+ client.execute(bulkRequest)
+ }
+ } *> IO.unit
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8ConfigDef.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8ConfigDef.scala
new file mode 100644
index 0000000000..8476446b63
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8ConfigDef.scala
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.config
+
+import com.datamountaineer.streamreactor.common.config.base.const.TraitConfigConst._
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import org.apache.kafka.common.config.ConfigDef
+import org.apache.kafka.common.config.ConfigDef.Importance
+import org.apache.kafka.common.config.ConfigDef.Type
+
+class Elastic8ConfigDef extends ElasticConfigDef("connect.elastic") {
+
+ val PROTOCOL = s"$connectorPrefix.protocol"
+ val PROTOCOL_DOC = "URL protocol (http, https)"
+ val PROTOCOL_DEFAULT = "http"
+
+ val HOSTS = s"$connectorPrefix.$CONNECTION_HOSTS_SUFFIX"
+ val HOSTS_DOC = "List of hostnames for Elastic Search cluster node, not including protocol or port."
+ val HOSTS_DEFAULT = "localhost"
+
+ val ES_PORT = s"$connectorPrefix.$CONNECTION_PORT_SUFFIX"
+ val ES_PORT_DOC = "Port on which Elastic Search node listens on"
+ val ES_PORT_DEFAULT = 9300
+
+ val ES_PREFIX = s"$connectorPrefix.tableprefix"
+ val ES_PREFIX_DOC = "Table prefix (optional)"
+ val ES_PREFIX_DEFAULT = ""
+
+ val ES_CLUSTER_NAME = s"$connectorPrefix.$CLUSTER_NAME_SUFFIX"
+ val ES_CLUSTER_NAME_DOC = "Name of the elastic search cluster, used in local mode for setting the connection"
+ val ES_CLUSTER_NAME_DEFAULT = "elasticsearch"
+
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME = s"$connectorPrefix.use.http.username"
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC = "Username if HTTP Basic Auth required default is null."
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD = s"$connectorPrefix.use.http.password"
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC = "Password if HTTP Basic Auth required default is null."
+
+ override def configDef: ConfigDef = super.configDef
+ .define(
+ PROTOCOL,
+ Type.STRING,
+ PROTOCOL_DEFAULT,
+ Importance.LOW,
+ PROTOCOL_DOC,
+ "Connection",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ PROTOCOL,
+ )
+ .define(
+ HOSTS,
+ Type.STRING,
+ HOSTS_DEFAULT,
+ Importance.HIGH,
+ HOSTS_DOC,
+ "Connection",
+ 2,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PORT,
+ Type.INT,
+ ES_PORT_DEFAULT,
+ Importance.HIGH,
+ ES_PORT_DOC,
+ "Connection",
+ 3,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PREFIX,
+ Type.STRING,
+ ES_PREFIX_DEFAULT,
+ Importance.HIGH,
+ ES_PREFIX_DOC,
+ "Connection",
+ 4,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_CLUSTER_NAME,
+ Type.STRING,
+ ES_CLUSTER_NAME_DEFAULT,
+ Importance.HIGH,
+ ES_CLUSTER_NAME_DOC,
+ "Connection",
+ 5,
+ ConfigDef.Width.MEDIUM,
+ ES_CLUSTER_NAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC,
+ "Connection",
+ 8,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC,
+ "Connection",
+ 9,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ )
+
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8Settings.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8Settings.scala
new file mode 100644
index 0000000000..616e9ec9d6
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8Settings.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.config
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettings
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
+
+/**
+ * Created by andrew@datamountaineer.com on 13/05/16.
+ * stream-reactor-maven
+ */
+case class Elastic8Settings(
+ elasticCommonSettings: ElasticCommonSettings,
+ httpBasicAuthUsername: String,
+ httpBasicAuthPassword: String,
+ hostnames: Seq[String],
+ protocol: String,
+ port: Int,
+ prefix: Option[String] = Option.empty,
+) extends ElasticSettings {
+ override def common: ElasticCommonSettings = elasticCommonSettings
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8SettingsReader.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8SettingsReader.scala
new file mode 100644
index 0000000000..d33168a657
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/config/Elastic8SettingsReader.scala
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.config
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfig
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettingsReader
+
+import scala.util.Failure
+import scala.util.Success
+import scala.util.Try
+
+object Elastic8SettingsReader extends ElasticSettingsReader[Elastic8Settings, Elastic8ConfigDef] {
+ override def read(configDef: Elastic8ConfigDef, props: Map[String, String]): Either[Throwable, Elastic8Settings] =
+ for {
+ config <- Try(ElasticConfig(configDef, configDef.connectorPrefix, props)).toEither
+ commonSettings <- ElasticCommonSettingsReader.read(config.configDef, props)
+ httpBasicAuthUsername = config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_USERNAME)
+ httpBasicAuthPassword = config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_PASSWORD)
+ hostNames = config.getString(configDef.HOSTS).split(",").toSeq
+ protocol = config.getString(configDef.PROTOCOL)
+ port = config.getInt(configDef.ES_PORT)
+ prefix = Try(config.getString(configDef.ES_PREFIX)) match {
+ case Success("") => None
+ case Success(configString) => Some(configString)
+ case Failure(_) => None
+ }
+ } yield {
+ Elastic8Settings(
+ commonSettings,
+ httpBasicAuthUsername,
+ httpBasicAuthPassword,
+ hostNames,
+ protocol,
+ port,
+ prefix,
+ )
+ }
+
+}
diff --git a/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/writers/Elastic8ClientCreator.scala b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/writers/Elastic8ClientCreator.scala
new file mode 100644
index 0000000000..e6760ddfd6
--- /dev/null
+++ b/kafka-connect-elastic8/src/main/scala/io/lenses/streamreactor/connect/elastic8/writers/Elastic8ClientCreator.scala
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8.writers
+
+import com.sksamuel.elastic4s.http.JavaClient
+import com.sksamuel.elastic4s.ElasticClient
+import com.sksamuel.elastic4s.ElasticNodeEndpoint
+import com.sksamuel.elastic4s.ElasticProperties
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticClientCreator
+import io.lenses.streamreactor.connect.elastic8.client.Elastic8ClientWrapper
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8Settings
+import org.apache.http.auth.AuthScope
+import org.apache.http.auth.UsernamePasswordCredentials
+import org.apache.http.client.config.RequestConfig.Builder
+import org.apache.http.impl.client.BasicCredentialsProvider
+import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
+
+import scala.util.Try
+
+object Elastic8ClientCreator extends ElasticClientCreator[Elastic8Settings] {
+
+ /**
+ * Construct a JSONWriter.
+ *
+ * @param config An elasticSinkConfig to extract settings from.
+ * @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
+ */
+ override def create(settings: Elastic8Settings): Either[Throwable, ElasticClientWrapper] = {
+ Try {
+
+ def endpoints(
+ hostNames: Seq[String],
+ protocol: String,
+ port: Integer,
+ prefix: Option[String],
+ ): Seq[ElasticNodeEndpoint] =
+ hostNames
+ .map(hostname => ElasticNodeEndpoint(protocol, hostname, port, prefix))
+
+ val elasticProperties =
+ ElasticProperties(endpoints(settings.hostnames, settings.protocol, settings.port, settings.prefix).toIndexedSeq)
+ val javaClient = if (settings.httpBasicAuthUsername.nonEmpty && settings.httpBasicAuthPassword.nonEmpty) {
+ lazy val provider = {
+ val provider = new BasicCredentialsProvider
+ val credentials =
+ new UsernamePasswordCredentials(settings.httpBasicAuthUsername, settings.httpBasicAuthPassword)
+ provider.setCredentials(AuthScope.ANY, credentials)
+ provider
+ }
+
+ JavaClient(
+ elasticProperties,
+ (requestConfigBuilder: Builder) => requestConfigBuilder,
+ (httpClientBuilder: HttpAsyncClientBuilder) => httpClientBuilder.setDefaultCredentialsProvider(provider),
+ )
+
+ } else {
+ JavaClient(
+ elasticProperties,
+ )
+ }
+ new Elastic8ClientWrapper(ElasticClient(javaClient))
+ }
+ }.toEither
+}
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticConfigTest.scala b/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticConfigTest.scala
deleted file mode 100644
index 130c6cf5ea..0000000000
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticConfigTest.scala
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8
-
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfig
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfigConstants
-
-class ElasticConfigTest extends TestBase {
- "A ElasticConfig should return the client mode and hostnames" in {
- val config = new ElasticConfig(getElasticSinkConfigProps())
- config.getString(ElasticConfigConstants.HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
- config.getString(ElasticConfigConstants.ES_CLUSTER_NAME) shouldBe ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT
- config.getString(ElasticConfigConstants.KCQL) shouldBe QUERY
- }
-
- "A ElasticConfig should return the http basic auth username and password when set" in {
- val config = new ElasticConfig(getElasticSinkConfigPropsHTTPClient(auth = true))
- config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME) shouldBe BASIC_AUTH_USERNAME
- config.getString(ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD) shouldBe BASIC_AUTH_PASSWORD
- }
-}
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/TestBase.scala b/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/TestBase.scala
deleted file mode 100644
index 5117fd63cd..0000000000
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/TestBase.scala
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2017-2023 Lenses.io Ltd
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.datamountaineer.streamreactor.connect.elastic8
-
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfigConstants
-import org.apache.kafka.common.TopicPartition
-import org.scalatest.BeforeAndAfter
-import org.scalatest.matchers.should.Matchers
-import org.scalatest.wordspec.AnyWordSpec
-
-import java.time.LocalDateTime
-import java.time.format.DateTimeFormatter._
-import java.util
-import scala.jdk.CollectionConverters.MapHasAsJava
-
-trait TestBase extends AnyWordSpec with Matchers with BeforeAndAfter {
- val ELASTIC_SEARCH_HOSTNAMES = "localhost:9300"
- val BASIC_AUTH_USERNAME = "usertest"
- val BASIC_AUTH_PASSWORD = "userpassword"
- val TOPIC = "sink_test"
- val INDEX = "index_andrew"
- val INDEX_WITH_DATE = s"${INDEX}_${LocalDateTime.now.format(ofPattern("YYYY-MM-dd"))}"
- val QUERY = s"INSERT INTO $INDEX SELECT * FROM $TOPIC"
- val QUERY_PK = s"INSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
- val QUERY_SELECTION = s"INSERT INTO $INDEX SELECT id, string_field FROM $TOPIC"
- val UPDATE_QUERY = s"UPSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
- val UPDATE_QUERY_SELECTION = s"UPSERT INTO $INDEX SELECT id, string_field FROM $TOPIC PK id"
-
- protected val PARTITION: Int = 12
- protected val PARTITION2: Int = 13
- protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
- protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
- protected val ASSIGNMENT: util.Set[TopicPartition] = new util.HashSet[TopicPartition]
- //Set topic assignments
- ASSIGNMENT.add(TOPIC_PARTITION)
- ASSIGNMENT.add(TOPIC_PARTITION2)
-
- //get the assignment of topic partitions for the sinkTask
- def getAssignment: util.Set[TopicPartition] =
- ASSIGNMENT
-
- def getElasticSinkConfigProps(
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): util.Map[String, String] =
- getBaseElasticSinkConfigProps(QUERY, clusterName)
-
- def getBaseElasticSinkConfigProps(
- query: String,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): util.Map[String, String] =
- Map(
- "topics" -> TOPIC,
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> query,
- ).asJava
-
- def getElasticSinkConfigPropsHTTPClient(
- auth: Boolean = false,
- clusterName: String = ElasticConfigConstants.ES_CLUSTER_NAME_DEFAULT,
- ): util.Map[String, String] =
- Map(
- ElasticConfigConstants.HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
- ElasticConfigConstants.ES_CLUSTER_NAME -> clusterName,
- ElasticConfigConstants.PROTOCOL -> ElasticConfigConstants.PROTOCOL_DEFAULT,
- ElasticConfigConstants.KCQL -> QUERY,
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
- else
- ElasticConfigConstants.CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
- ).asJava
-}
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/CreateIndexTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/CreateIndexTest.scala
similarity index 91%
rename from kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/CreateIndexTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/CreateIndexTest.scala
index f9db238d62..1c2df5e3ca 100644
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/CreateIndexTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/CreateIndexTest.scala
@@ -13,10 +13,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic8
import com.datamountaineer.kcql.Kcql
-import com.datamountaineer.streamreactor.connect.elastic8.indexname.CreateIndex
+import io.lenses.streamreactor.connect.elastic.common.indexname.CreateIndex
import org.joda.time.DateTime
import org.joda.time.DateTimeZone
import org.scalatest.matchers.should.Matchers
diff --git a/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticConfigTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticConfigTest.scala
new file mode 100644
index 0000000000..82c651a68c
--- /dev/null
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticConfigTest.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfig
+
+class ElasticConfigTest extends TestBase {
+ import configDef._
+
+ "A ElasticConfig should return the client mode and hostnames" in {
+ val config = ElasticConfig(configDef, configDef.connectorPrefix, getElasticSinkConfigProps())
+ config.getString(HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
+ config.getString(ES_CLUSTER_NAME) shouldBe ES_CLUSTER_NAME_DEFAULT
+ config.getString(KCQL) shouldBe QUERY
+ }
+
+ "A ElasticConfig should return the http basic auth username and password when set" in {
+ val config = ElasticConfig(configDef, configDef.connectorPrefix, getElasticSinkConfigPropsHTTPClient(auth = true))
+ config.getString(CLIENT_HTTP_BASIC_AUTH_USERNAME) shouldBe BASIC_AUTH_USERNAME
+ config.getString(CLIENT_HTTP_BASIC_AUTH_PASSWORD) shouldBe BASIC_AUTH_PASSWORD
+ }
+}
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterCredentialsTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterCredentialsTest.scala
similarity index 53%
rename from kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterCredentialsTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterCredentialsTest.scala
index 851ba21011..efa200f27f 100644
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticWriterCredentialsTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticJsonWriterCredentialsTest.scala
@@ -13,17 +13,19 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic8
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfig
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticSettings
+import io.lenses.streamreactor.connect.elastic8
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
+import org.scalatest.EitherValues
-class ElasticWriterCredentialsTest extends TestBase {
+class ElasticJsonWriterCredentialsTest extends TestBase with EitherValues {
"A writer should be using HTTP is set with HTTP Basic Auth Credentials" in {
- val config = new ElasticConfig(getElasticSinkConfigPropsHTTPClient(auth = true))
- val settings = ElasticSettings(config)
- settings.httpBasicAuthUsername shouldBe BASIC_AUTH_USERNAME
- settings.httpBasicAuthPassword shouldBe BASIC_AUTH_PASSWORD
+ val configDef = new Elastic8ConfigDef()
+ val settings =
+ elastic8.config.Elastic8SettingsReader.read(configDef, getElasticSinkConfigPropsHTTPClient(auth = true))
+ settings.value.httpBasicAuthUsername shouldBe BASIC_AUTH_USERNAME
+ settings.value.httpBasicAuthPassword shouldBe BASIC_AUTH_PASSWORD
}
}
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
similarity index 68%
rename from kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
index af0c7511da..cab8068619 100644
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkConnectorTest.scala
@@ -13,26 +13,30 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
-
-import com.datamountaineer.streamreactor.connect.elastic8.config.ElasticConfigConstants
+package io.lenses.streamreactor.connect.elastic8
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
import scala.jdk.CollectionConverters.ListHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsJava
class ElasticSinkConnectorTest extends TestBase {
+
+ override val configDef = new Elastic8ConfigDef()
+ import configDef._
+
"Should start a Elastic Search Connector" in {
//get config
val config = getElasticSinkConfigProps()
//get connector
- val connector = new ElasticSinkConnector()
+ val connector = new Elastic8SinkConnector()
//start with config
- connector.start(config)
+ connector.start(config.asJava)
//check config
val taskConfigs = connector.taskConfigs(10)
- taskConfigs.asScala.head.get(ElasticConfigConstants.HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
+ taskConfigs.asScala.head.get(HOSTS) shouldBe ELASTIC_SEARCH_HOSTNAMES
taskConfigs.size() shouldBe 10
//check connector
- connector.taskClass() shouldBe classOf[ElasticSinkTask]
+ connector.taskClass() shouldBe classOf[Elastic8SinkTask]
connector.stop()
}
}
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
similarity index 88%
rename from kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
index fb2ac5abb4..aa790c2739 100644
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/ElasticSinkTaskTest.scala
@@ -13,11 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8
+package io.lenses.streamreactor.connect.elastic8
import org.apache.kafka.connect.sink.SinkTaskContext
import org.mockito.MockitoSugar
+import scala.jdk.CollectionConverters.MapHasAsJava
+
class ElasticSinkTaskTest extends TestBase with MockitoSugar {
"A ElasticSinkTask should start and write to Elastic Search" in {
//mock the context to return our assignment when called
@@ -26,13 +28,13 @@ class ElasticSinkTaskTest extends TestBase with MockitoSugar {
//get config
val config = getElasticSinkConfigProps()
//get task
- val task = new ElasticSinkTask()
+ val task = new Elastic8SinkTask()
//initialise the tasks context
task.initialize(context)
//check version
task.version() shouldBe ""
//start task
- task.start(config)
+ task.start(config.asJava)
//simulate the call from Connect
//task.put(testRecords.asJava)
//stop task
diff --git a/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/TestBase.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/TestBase.scala
new file mode 100644
index 0000000000..342942ef6f
--- /dev/null
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/TestBase.scala
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.elastic8
+
+import io.lenses.streamreactor.connect.elastic8.config.Elastic8ConfigDef
+import org.apache.kafka.common.TopicPartition
+import org.scalatest.BeforeAndAfter
+import org.scalatest.matchers.should.Matchers
+import org.scalatest.wordspec.AnyWordSpec
+
+import java.time.LocalDateTime
+import java.time.format.DateTimeFormatter._
+import java.util
+import scala.jdk.CollectionConverters.SetHasAsJava
+
+trait TestBase extends AnyWordSpec with Matchers with BeforeAndAfter {
+
+ val configDef = new Elastic8ConfigDef()
+ import configDef._
+
+ val ELASTIC_SEARCH_HOSTNAMES = "localhost:9300"
+ val BASIC_AUTH_USERNAME = "usertest"
+ val BASIC_AUTH_PASSWORD = "userpassword"
+ val TOPIC = "sink_test"
+ val INDEX = "index_andrew"
+ val INDEX_WITH_DATE = s"${INDEX}_${LocalDateTime.now.format(ofPattern("YYYY-MM-dd"))}"
+ val QUERY = s"INSERT INTO $INDEX SELECT * FROM $TOPIC"
+ val QUERY_PK = s"INSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
+ val QUERY_SELECTION = s"INSERT INTO $INDEX SELECT id, string_field FROM $TOPIC"
+ val UPDATE_QUERY = s"UPSERT INTO $INDEX SELECT * FROM $TOPIC PK id"
+ val UPDATE_QUERY_SELECTION = s"UPSERT INTO $INDEX SELECT id, string_field FROM $TOPIC PK id"
+
+ protected val PARTITION: Int = 12
+ protected val PARTITION2: Int = 13
+ protected val TOPIC_PARTITION: TopicPartition = new TopicPartition(TOPIC, PARTITION)
+ protected val TOPIC_PARTITION2: TopicPartition = new TopicPartition(TOPIC, PARTITION2)
+ protected val ASSIGNMENT: Set[TopicPartition] = Set(TOPIC_PARTITION, TOPIC_PARTITION2)
+
+ //get the assignment of topic partitions for the sinkTask
+ def getAssignment: util.Set[TopicPartition] =
+ ASSIGNMENT.asJava
+
+ def getElasticSinkConfigProps(
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
+ ): Map[String, String] =
+ getBaseElasticSinkConfigProps(QUERY, clusterName)
+
+ def getBaseElasticSinkConfigProps(
+ query: String,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
+ ): Map[String, String] =
+ Map(
+ "topics" -> TOPIC,
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> query,
+ )
+
+ def getElasticSinkConfigPropsHTTPClient(
+ auth: Boolean = false,
+ clusterName: String = ES_CLUSTER_NAME_DEFAULT,
+ ): Map[String, String] =
+ Map(
+ HOSTS -> ELASTIC_SEARCH_HOSTNAMES,
+ ES_CLUSTER_NAME -> clusterName,
+ PROTOCOL -> PROTOCOL_DEFAULT,
+ KCQL -> QUERY,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME -> (if (auth) BASIC_AUTH_USERNAME
+ else
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT),
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD -> (if (auth) BASIC_AUTH_PASSWORD
+ else
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT),
+ )
+}
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/ClockFixture.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/ClockFixture.scala
similarity index 91%
rename from kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/ClockFixture.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/ClockFixture.scala
index f42c7146de..10ce47d638 100644
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/ClockFixture.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/ClockFixture.scala
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8.indexname
+package io.lenses.streamreactor.connect.elastic8.indexname
import java.time.Clock
import java.time.Instant
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
similarity index 85%
rename from kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
index 81ab5fdc03..0dedd0ee47 100644
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/CustomIndexNameTest.scala
@@ -13,8 +13,12 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8.indexname
+package io.lenses.streamreactor.connect.elastic8.indexname
+import io.lenses.streamreactor.connect.elastic.common.indexname.CustomIndexName
+import io.lenses.streamreactor.connect.elastic.common.indexname.TextFragment
+import io.lenses.streamreactor.connect.elastic.common.indexname.DateTimeFragment
+import io.lenses.streamreactor.connect.elastic.common.indexname.InvalidCustomIndexNameException
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.prop.TableDrivenPropertyChecks
diff --git a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
similarity index 89%
rename from kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
rename to kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
index cbd8137be2..0416cf82bd 100644
--- a/kafka-connect-elastic8/src/test/scala/com/datamountaineer/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
+++ b/kafka-connect-elastic8/src/test/scala/io/lenses/streamreactor/connect/elastic8/indexname/IndexNameFragmentTest.scala
@@ -13,8 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package com.datamountaineer.streamreactor.connect.elastic8.indexname
+package io.lenses.streamreactor.connect.elastic8.indexname
+import io.lenses.streamreactor.connect.elastic.common.indexname.DateTimeFragment
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
diff --git a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/HazelCastConnection.scala b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/HazelCastConnection.scala
index 822a36e565..5e30e6b7a0 100644
--- a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/HazelCastConnection.scala
+++ b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/HazelCastConnection.scala
@@ -29,6 +29,7 @@ import com.hazelcast.client.config.ClientNetworkConfig
import com.hazelcast.client.config.SocketOptions
import com.hazelcast.config.SSLConfig
import com.hazelcast.core.HazelcastInstance
+import io.lenses.streamreactor.connect.security.StoreInfo
import javax.cache.CacheManager
import javax.cache.Caching
@@ -81,31 +82,23 @@ object HazelCastConnection {
cacheManager
}
- def setSSLOptions(config: HazelCastConnectionConfig) = {
- config.keyStoreLocation match {
- case Some(path) =>
- if (!new File(path).exists) {
- throw new FileNotFoundException(s"Keystore not found in: $path")
- }
-
- System.setProperty("javax.net.ssl.keyStorePassword", config.keyStorePassword.getOrElse(""))
- System.setProperty("javax.net.ssl.keyStore", path)
- System.setProperty("javax.net.ssl.keyStoreType", config.keyStoreType.getOrElse("jks"))
-
- case None =>
- }
-
- config.trustStoreLocation match {
- case Some(path) =>
- if (!new File(path).exists) {
- throw new FileNotFoundException(s"Truststore not found in: $path")
- }
-
- System.setProperty("javax.net.ssl.trustStorePassword", config.trustStorePassword.getOrElse(""))
- System.setProperty("javax.net.ssl.trustStore", path)
- System.setProperty("javax.net.ssl.trustStoreType", config.trustStoreType.getOrElse("jks"))
-
- case None =>
+ def setSSLOptions(config: HazelCastConnectionConfig): Unit =
+ if (config.sslEnabled) {
+ config.storesInfo.keyStore.foreach {
+ case StoreInfo(path: String, _, _) if !new File(path).exists =>
+ throw new FileNotFoundException(s"Keystore not found in: [$path]")
+ case StoreInfo(path: String, storeType: Option[String], storePassword: Option[String]) =>
+ System.setProperty("javax.net.ssl.keyStorePassword", storePassword.getOrElse(""))
+ System.setProperty("javax.net.ssl.keyStore", path)
+ System.setProperty("javax.net.ssl.keyStoreType", storeType.getOrElse("jks"))
+ }
+ config.storesInfo.trustStore.foreach {
+ case StoreInfo(path: String, _, _) if !new File(path).exists =>
+ throw new FileNotFoundException(s"trustStore not found in: [$path]")
+ case StoreInfo(path: String, storeType: Option[String], storePassword: Option[String]) =>
+ System.setProperty("javax.net.ssl.trustStorePassword", storePassword.getOrElse(""))
+ System.setProperty("javax.net.ssl.trustStore", path)
+ System.setProperty("javax.net.ssl.trustStoreType", storeType.getOrElse("jks"))
+ }
}
- }
}
diff --git a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastConnectionConfig.scala b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastConnectionConfig.scala
index d51134fc3e..de7f2070bc 100644
--- a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastConnectionConfig.scala
+++ b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastConnectionConfig.scala
@@ -15,7 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.hazelcast.config
-import org.apache.kafka.common.config.SslConfigs
+import io.lenses.streamreactor.connect.security.StoresInfo
import scala.jdk.CollectionConverters.ListHasAsScala
@@ -26,17 +26,12 @@ import scala.jdk.CollectionConverters.ListHasAsScala
case class HazelCastConnectionConfig(
clusterName: String,
members: Set[String],
- redo: Boolean = true,
+ redo: Boolean = true,
connectionAttempts: Int,
connectionTimeouts: Long,
socketConfig: HazelCastSocketConfig,
- sslEnabled: Boolean = false,
- trustStoreType: Option[String] = None,
- trustStorePassword: Option[String] = None,
- trustStoreLocation: Option[String] = None,
- keyStoreType: Option[String] = None,
- keyStorePassword: Option[String] = None,
- keyStoreLocation: Option[String] = None,
+ sslEnabled: Boolean = false,
+ storesInfo: StoresInfo,
)
case class HazelCastSocketConfig(
@@ -62,19 +57,7 @@ object HazelCastConnectionConfig {
val clusterName = config.getString(HazelCastSinkConfigConstants.CLUSTER_NAME)
val ssl = config.getBoolean(HazelCastSinkConfigConstants.SSL_ENABLED)
- val trustStoreType = Option(config.getString(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG))
- val trustStorePath = Option(config.getString(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG))
- val trustStorePassword = Option(config.getPassword(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)) match {
- case Some(p) => Some(p.value())
- case None => None
- }
-
- val keyStoreType = Option(config.getString(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG))
- val keyStorePath = Option(config.getString(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
- val keyStorePassword = Option(config.getPassword(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)) match {
- case Some(p) => Some(p.value())
- case None => None
- }
+ val storesInfo = StoresInfo(config)
new HazelCastConnectionConfig(
clusterName,
@@ -84,12 +67,7 @@ object HazelCastConnectionConfig {
connectionTimeouts,
socketConfig,
ssl,
- trustStoreType,
- trustStorePassword,
- trustStorePath,
- keyStoreType,
- keyStorePassword,
- keyStorePath,
+ storesInfo,
)
}
}
diff --git a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastSinkConfig.scala b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastSinkConfig.scala
index 5d29c9ec67..db57ff753a 100644
--- a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastSinkConfig.scala
+++ b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/config/HazelCastSinkConfig.scala
@@ -22,7 +22,6 @@ import com.datamountaineer.streamreactor.common.config.base.traits.KcqlSettings
import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
import com.datamountaineer.streamreactor.common.config.base.traits.ThreadPoolSettings
-import java.util
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
import org.apache.kafka.common.config.ConfigDef.Type
@@ -221,7 +220,7 @@ object HazelCastSinkConfig {
.withClientSslSupport()
}
-class HazelCastSinkConfig(props: util.Map[String, String])
+class HazelCastSinkConfig(props: Map[String, String])
extends BaseConfig(HazelCastSinkConfigConstants.HAZELCAST_CONNECTOR_PREFIX, HazelCastSinkConfig.config, props)
with ErrorPolicySettings
with KcqlSettings
diff --git a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkConnector.scala b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkConnector.scala
index 2bc85bab26..5ddd83a3a6 100644
--- a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkConnector.scala
+++ b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.hazelcast.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
@@ -61,7 +62,7 @@ class HazelCastSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Hazelcast sink task.")
- Helpers.checkInputTopics(HazelCastSinkConfigConstants.KCQL, props.asScala.toMap)
+ Helpers.checkInputTopics(HazelCastSinkConfigConstants.KCQL, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkTask.scala b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkTask.scala
index de5252dbf6..56070a2cc1 100644
--- a/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkTask.scala
+++ b/kafka-connect-hazelcast/src/main/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/HazelCastSinkTask.scala
@@ -31,6 +31,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import scala.jdk.CollectionConverters.CollectionHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
/**
* Created by andrew@datamountaineer.com on 10/08/16.
@@ -55,7 +56,7 @@ class HazelCastSinkTask extends SinkTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
HazelCastSinkConfig.config.parse(conf)
- val sinkConfig = new HazelCastSinkConfig(conf)
+ val sinkConfig = new HazelCastSinkConfig(conf.asScala.toMap)
enableProgress = sinkConfig.getBoolean(HazelCastSinkConfigConstants.PROGRESS_COUNTER_ENABLED)
val settings = HazelCastSinkSettings(sinkConfig)
diff --git a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/TestBase.scala b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/TestBase.scala
index ab9f06d37a..df063882d7 100644
--- a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/TestBase.scala
+++ b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/TestBase.scala
@@ -15,9 +15,6 @@
*/
package com.datamountaineer.streamreactor.connect.hazelcast
-import java.io.ByteArrayInputStream
-import java.nio.ByteBuffer
-import java.util
import com.datamountaineer.streamreactor.connect.hazelcast.config.HazelCastSinkConfigConstants
import com.hazelcast.topic.Message
import com.hazelcast.topic.MessageListener
@@ -34,8 +31,10 @@ import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
+import java.io.ByteArrayInputStream
+import java.nio.ByteBuffer
+import java.util
import scala.collection.mutable
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.jdk.CollectionConverters.SetHasAsScala
/**
@@ -74,77 +73,77 @@ trait TestBase extends AnyWordSpec with BeforeAndAfter with Matchers {
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsRB =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_RB,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsJsonQueue =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_JSON_QUEUE,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsJsonSet =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_JSON_SET,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsJsonList =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_JSON_LIST,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsJsonMultiMapDefaultPKS =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_MULTIMAP_DEFAULT_PK,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsJsonICache =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_JSON_ICACHE,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsJsonMapDefaultPKS =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_IMAP_DEFAULT_PK,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsJson =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_JSON,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsSelection =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_SELECTION,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
def getPropsIgnored =
Map(
HazelCastSinkConfigConstants.KCQL -> KCQL_MAP_IGNORED,
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
- ).asJava
+ )
//get the assignment of topic partitions for the sinkTask
def getAssignment: util.Set[TopicPartition] =
diff --git a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkConnector.scala b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkConnector.scala
index 3d551ea4ce..cfbb901994 100644
--- a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkConnector.scala
+++ b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkConnector.scala
@@ -19,6 +19,7 @@ import com.datamountaineer.streamreactor.connect.hazelcast.TestBase
import com.datamountaineer.streamreactor.connect.hazelcast.config.HazelCastSinkConfigConstants
import scala.jdk.CollectionConverters.ListHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsJava
/**
* Created by andrew@datamountaineer.com on 10/08/16.
@@ -28,7 +29,7 @@ class TestHazelCastSinkConnector extends TestBase {
"should start a Hazelcast sink connector" in {
val props = getProps
val connector = new HazelCastSinkConnector
- connector.start(props)
+ connector.start(props.asJava)
val taskConfigs = connector.taskConfigs(1)
taskConfigs.asScala.head.get(HazelCastSinkConfigConstants.KCQL) shouldBe KCQL_MAP
taskConfigs.asScala.head.get(HazelCastSinkConfigConstants.CLUSTER_NAME) shouldBe TESTS_CLUSTER_NAME
diff --git a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkTask.scala b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkTask.scala
index 7bb1dd7b86..60895b2ed9 100644
--- a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkTask.scala
+++ b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastSinkTask.scala
@@ -27,6 +27,7 @@ import com.hazelcast.topic.ITopic
import org.apache.kafka.connect.sink.SinkTaskContext
import org.mockito.MockitoSugar
+import scala.jdk.CollectionConverters.MapHasAsJava
import scala.jdk.CollectionConverters.SeqHasAsJava
/**
@@ -45,13 +46,13 @@ class TestHazelCastSinkTask extends TestBase with MockitoSugar {
val context = mock[SinkTaskContext]
val assignment = getAssignment
when(context.assignment()).thenReturn(assignment)
- when(context.configs()).thenReturn(props)
+ when(context.configs()).thenReturn(props.asJava)
val records = getTestRecords()
val task = new HazelCastSinkTask
//initialise the tasks context
task.initialize(context)
//start task
- task.start(props)
+ task.start(props.asJava)
//get client and check hazelcast
val config = new HazelCastSinkConfig(props)
diff --git a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastWriter.scala b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastWriter.scala
index 21930b6547..e15de3db9c 100644
--- a/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastWriter.scala
+++ b/kafka-connect-hazelcast/src/test/scala/com/datamountaineer/streamreactor/connect/hazelcast/sink/TestHazelCastWriter.scala
@@ -15,32 +15,27 @@
*/
package com.datamountaineer.streamreactor.connect.hazelcast.sink
+import com.datamountaineer.streamreactor.connect.hazelcast._
import com.datamountaineer.streamreactor.connect.hazelcast.config.HazelCastConnectionConfig
import com.datamountaineer.streamreactor.connect.hazelcast.config.HazelCastSinkConfig
import com.datamountaineer.streamreactor.connect.hazelcast.config.HazelCastSinkConfigConstants
import com.datamountaineer.streamreactor.connect.hazelcast.config.HazelCastSinkSettings
import com.datamountaineer.streamreactor.connect.hazelcast.writers.HazelCastWriter
-import com.datamountaineer.streamreactor.connect.hazelcast.HazelCastConnection
-import com.datamountaineer.streamreactor.connect.hazelcast.MessageListenerImplAvro
-import com.datamountaineer.streamreactor.connect.hazelcast.MessageListenerImplJson
-import com.datamountaineer.streamreactor.connect.hazelcast.SlowTest
-import com.datamountaineer.streamreactor.connect.hazelcast.TestBase
+import com.hazelcast.collection._
import com.hazelcast.core._
import com.hazelcast.map.IMap
import com.hazelcast.multimap.MultiMap
-import com.hazelcast.collection._
-import com.hazelcast.topic.ITopic
import com.hazelcast.ringbuffer.Ringbuffer
+import com.hazelcast.topic.ITopic
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.common.config.SslConfigs
-
-import scala.jdk.CollectionConverters.MapHasAsJava
+import org.scalatest.OptionValues
/**
* Created by andrew@datamountaineer.com on 11/08/16.
* stream-reactor
*/
-class TestHazelCastWriter extends TestBase {
+class TestHazelCastWriter extends TestBase with OptionValues {
var instance: HazelcastInstance = _
before {
@@ -215,7 +210,7 @@ class TestHazelCastWriter extends TestBase {
HazelCastSinkConfigConstants.CLUSTER_NAME -> TESTS_CLUSTER_NAME,
HazelCastSinkConfigConstants.CLUSTER_MEMBERS -> "localhost",
)
- val config = new HazelCastSinkConfig(props.asJava)
+ val config = new HazelCastSinkConfig(props)
val settings = HazelCastSinkSettings(config)
val writer = HazelCastWriter(settings)
val records = getTestRecords()
@@ -269,14 +264,14 @@ class TestHazelCastWriter extends TestBase {
SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG -> "keystore-password",
)
- val config = new HazelCastSinkConfig(ssl.asJava)
+ val config = new HazelCastSinkConfig(ssl)
val conConfig = HazelCastConnectionConfig(config)
conConfig.sslEnabled shouldBe true
- conConfig.trustStoreLocation shouldBe Some(truststoreFilePath)
- conConfig.keyStoreLocation shouldBe Some(keystoreFilePath)
- conConfig.trustStorePassword shouldBe Some("truststore-password")
- conConfig.keyStorePassword shouldBe Some("keystore-password")
+ conConfig.storesInfo.trustStore.value.storePath shouldBe truststoreFilePath
+ conConfig.storesInfo.keyStore.value.storePath shouldBe keystoreFilePath
+ conConfig.storesInfo.trustStore.value.storePassword shouldBe Some("truststore-password")
+ conConfig.storesInfo.keyStore.value.storePassword shouldBe Some("keystore-password")
HazelCastConnection.setSSLOptions(conConfig)
val sslProps = System.getProperties
diff --git a/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkConnector.scala b/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkConnector.scala
index abde672487..40ed6d46b8 100644
--- a/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkConnector.scala
+++ b/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.hbase
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
@@ -63,7 +64,7 @@ class HbaseSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Hbase sink task with ${props.toString}.")
- Helpers.checkInputTopics(HBaseConfigConstants.KCQL_QUERY, props.asScala.toMap)
+ Helpers.checkInputTopics(HBaseConfigConstants.KCQL_QUERY, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkTask.scala b/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkTask.scala
index a894213d67..29d6c331f9 100644
--- a/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkTask.scala
+++ b/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/HbaseSinkTask.scala
@@ -35,6 +35,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import scala.jdk.CollectionConverters.IterableHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
/**
* HbaseSinkTask
@@ -60,7 +61,7 @@ class HbaseSinkTask extends SinkTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
HBaseConfig.config.parse(conf)
- val sinkConfig = HBaseConfig(conf)
+ val sinkConfig = HBaseConfig(conf.asScala.toMap)
enableProgress = sinkConfig.getBoolean(HBaseConfigConstants.PROGRESS_COUNTER_ENABLED)
val hbaseSettings = HBaseSettings(sinkConfig)
diff --git a/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseConfig.scala b/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseConfig.scala
index 98fcadcaac..ada49f157a 100644
--- a/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseConfig.scala
+++ b/kafka-connect-hbase/src/main/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseConfig.scala
@@ -19,8 +19,6 @@ import com.datamountaineer.streamreactor.common.config.base.traits.BaseConfig
import com.datamountaineer.streamreactor.common.config.base.traits.ErrorPolicySettings
import com.datamountaineer.streamreactor.common.config.base.traits.KcqlSettings
import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
-
-import java.util
import com.datamountaineer.streamreactor.connect.hbase.config.HBaseConfigConstants.CONNECTOR_PREFIX
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
@@ -239,7 +237,7 @@ object HBaseConfig {
*
* Holds config, extends AbstractConfig.
*/
-case class HBaseConfig(props: util.Map[String, String])
+case class HBaseConfig(props: Map[String, String])
extends BaseConfig(CONNECTOR_PREFIX, HBaseConfig.config, props)
with KcqlSettings
with ErrorPolicySettings
diff --git a/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseSettingsTest.scala b/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseSettingsTest.scala
index 2626ec4a53..82a4a6249e 100644
--- a/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseSettingsTest.scala
+++ b/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/config/HBaseSettingsTest.scala
@@ -23,7 +23,6 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
class HBaseSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
@@ -50,7 +49,7 @@ class HBaseSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
val props = Map(
HBaseConfigConstants.KCQL_QUERY -> QUERY_ALL_KEYS,
HBaseConfigConstants.COLUMN_FAMILY -> "somecolumnFamily",
- ).asJava
+ )
val config = HBaseConfig(props)
val settings = HBaseSettings(config)
@@ -66,7 +65,7 @@ class HBaseSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
val props = Map(
HBaseConfigConstants.KCQL_QUERY -> QUERY_ALL,
HBaseConfigConstants.COLUMN_FAMILY -> "somecolumnFamily",
- ).asJava
+ )
val config = HBaseConfig(props)
val settings = HBaseSettings(config)
@@ -83,7 +82,7 @@ class HBaseSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
val props = Map(
HBaseConfigConstants.KCQL_QUERY -> QUERY_SELECT,
HBaseConfigConstants.COLUMN_FAMILY -> "somecolumnFamily",
- ).asJava
+ )
val config = HBaseConfig(props)
val settings = HBaseSettings(config)
@@ -105,7 +104,7 @@ class HBaseSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
val props = Map(
HBaseConfigConstants.KCQL_QUERY -> QUERY_SELECT_KEYS,
HBaseConfigConstants.COLUMN_FAMILY -> "somecolumnFamily",
- ).asJava
+ )
val config = HBaseConfig(props)
val settings = HBaseSettings(config)
diff --git a/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/KeytabSettingsTest.scala b/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/KeytabSettingsTest.scala
index 8fb4e3e662..81468d5804 100644
--- a/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/KeytabSettingsTest.scala
+++ b/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/KeytabSettingsTest.scala
@@ -21,8 +21,6 @@ import org.apache.kafka.common.config.ConfigException
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class KeytabSettingsTest extends AnyFunSuite with Matchers with FileCreation {
test("validate a keytab setting") {
val file = createFile("keytab1.keytab")
@@ -35,7 +33,7 @@ class KeytabSettingsTest extends AnyFunSuite with Matchers with FileCreation {
HBaseConfigConstants.KerberosKey -> "true",
HBaseConfigConstants.PrincipalKey -> principal,
HBaseConfigConstants.KerberosKeyTabKey -> file.getAbsolutePath,
- ).asJava,
+ ),
)
val actualSettings = KeytabSettings.from(config, HBaseConfigConstants)
@@ -55,7 +53,7 @@ class KeytabSettingsTest extends AnyFunSuite with Matchers with FileCreation {
HBaseConfigConstants.COLUMN_FAMILY -> "someColumnFamily",
HBaseConfigConstants.KerberosKey -> "true",
HBaseConfigConstants.KerberosKeyTabKey -> file.getAbsolutePath,
- ).asJava,
+ ),
)
intercept[ConfigException] {
@@ -76,7 +74,7 @@ class KeytabSettingsTest extends AnyFunSuite with Matchers with FileCreation {
HBaseConfigConstants.KerberosKey -> "true",
HBaseConfigConstants.PrincipalKey -> principal,
HBaseConfigConstants.KerberosKeyTabKey -> "does_not_exists.keytab",
- ).asJava,
+ ),
)
intercept[ConfigException] {
diff --git a/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/UserPasswordSettingsTest.scala b/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/UserPasswordSettingsTest.scala
index 127a73126b..30350f733a 100644
--- a/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/UserPasswordSettingsTest.scala
+++ b/kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/kerberos/UserPasswordSettingsTest.scala
@@ -21,8 +21,6 @@ import org.apache.kafka.common.config.ConfigException
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class UserPasswordSettingsTest extends AnyFunSuite with Matchers with FileCreation {
test("validate a user-password setting") {
val fileKrb5 = createFile(s"krb1.krb5")
@@ -41,7 +39,7 @@ class UserPasswordSettingsTest extends AnyFunSuite with Matchers with FileCreati
HBaseConfigConstants.KerberosKrb5Key -> fileKrb5.getAbsolutePath,
HBaseConfigConstants.KerberosJaasKey -> fileJaas.getAbsolutePath,
HBaseConfigConstants.JaasEntryNameKey -> "abc",
- ).asJava,
+ ),
)
val actualSettings = UserPasswordSettings.from(config, HBaseConfigConstants)
@@ -75,7 +73,7 @@ class UserPasswordSettingsTest extends AnyFunSuite with Matchers with FileCreati
HBaseConfigConstants.KerberosPasswordKey -> password,
HBaseConfigConstants.KerberosKrb5Key -> fileKrb5.getAbsolutePath,
HBaseConfigConstants.KerberosJaasKey -> fileJaas.getAbsolutePath,
- ).asJava,
+ ),
)
intercept[ConfigException] {
@@ -104,7 +102,7 @@ class UserPasswordSettingsTest extends AnyFunSuite with Matchers with FileCreati
HBaseConfigConstants.KerberosPasswordKey -> password,
HBaseConfigConstants.KerberosKrb5Key -> fileKrb5.getAbsolutePath,
HBaseConfigConstants.KerberosJaasKey -> fileJaas.getAbsolutePath,
- ).asJava,
+ ),
)
intercept[ConfigException] {
@@ -131,7 +129,7 @@ class UserPasswordSettingsTest extends AnyFunSuite with Matchers with FileCreati
HBaseConfigConstants.KerberosUserKey -> user,
HBaseConfigConstants.KerberosPasswordKey -> password,
HBaseConfigConstants.KerberosJaasKey -> fileJaas.getAbsolutePath,
- ).asJava,
+ ),
)
intercept[ConfigException] {
@@ -157,7 +155,7 @@ class UserPasswordSettingsTest extends AnyFunSuite with Matchers with FileCreati
HBaseConfigConstants.KerberosUserKey -> user,
HBaseConfigConstants.KerberosPasswordKey -> password,
HBaseConfigConstants.KerberosKrb5Key -> fileKrb5.getAbsolutePath,
- ).asJava,
+ ),
)
intercept[ConfigException] {
diff --git a/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkConnector.scala b/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkConnector.scala
index b696e29054..0522deb0f2 100644
--- a/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkConnector.scala
+++ b/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.influx
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
@@ -63,7 +64,7 @@ class InfluxSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting InfluxDb sink task with ${props.toString}.")
- Helpers.checkInputTopics(InfluxConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(InfluxConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkTask.scala b/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkTask.scala
index 0b126e552b..27e779ac1c 100644
--- a/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkTask.scala
+++ b/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/InfluxSinkTask.scala
@@ -32,6 +32,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import scala.jdk.CollectionConverters.CollectionHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
/**
* InfluxSinkTask
@@ -54,7 +55,7 @@ class InfluxSinkTask extends SinkTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
InfluxConfig.config.parse(conf)
- val sinkConfig = InfluxConfig(conf)
+ val sinkConfig = InfluxConfig(conf.asScala.toMap)
enableProgress = sinkConfig.getBoolean(InfluxConfigConstants.PROGRESS_COUNTER_ENABLED)
val influxSettings = InfluxSettings(sinkConfig)
diff --git a/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxConfig.scala b/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxConfig.scala
index 149a6fe4a8..43be51800e 100644
--- a/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxConfig.scala
+++ b/kafka-connect-influxdb/src/main/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxConfig.scala
@@ -15,20 +15,11 @@
*/
package com.datamountaineer.streamreactor.connect.influx.config
-import com.datamountaineer.streamreactor.common.config.base.traits.BaseConfig
-import com.datamountaineer.streamreactor.common.config.base.traits.ConsistencyLevelSettings
-import com.datamountaineer.streamreactor.common.config.base.traits.DatabaseSettings
-import com.datamountaineer.streamreactor.common.config.base.traits.ErrorPolicySettings
-import com.datamountaineer.streamreactor.common.config.base.traits.KcqlSettings
-import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
-import com.datamountaineer.streamreactor.common.config.base.traits.UserSettings
-
-import java.util
-
+import com.datamountaineer.streamreactor.common.config.base.traits._
+import com.influxdb.client.domain.WriteConsistency
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
import org.apache.kafka.common.config.ConfigDef.Type
-import com.influxdb.client.domain.WriteConsistency
object InfluxConfig {
@@ -157,7 +148,7 @@ object InfluxConfig {
*
* Holds config, extends AbstractConfig.
*/
-case class InfluxConfig(props: util.Map[String, String])
+case class InfluxConfig(props: Map[String, String])
extends BaseConfig(InfluxConfigConstants.CONNECTOR_PREFIX, InfluxConfig.config, props)
with KcqlSettings
with ErrorPolicySettings
diff --git a/kafka-connect-influxdb/src/test/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxSettingsTest.scala b/kafka-connect-influxdb/src/test/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxSettingsTest.scala
index 138edf180e..06a9567b70 100644
--- a/kafka-connect-influxdb/src/test/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxSettingsTest.scala
+++ b/kafka-connect-influxdb/src/test/scala/com/datamountaineer/streamreactor/connect/influx/config/InfluxSettingsTest.scala
@@ -24,7 +24,6 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
@@ -43,7 +42,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_USER_CONFIG -> "myuser",
InfluxConfigConstants.INFLUX_CONNECTION_PASSWORD_CONFIG -> "apass",
InfluxConfigConstants.KCQL_CONFIG -> QUERY_ALL,
- ).asJava
+ )
val config = InfluxConfig(props)
InfluxSettings(config)
@@ -58,7 +57,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_USER_CONFIG -> "myuser",
InfluxConfigConstants.INFLUX_CONNECTION_PASSWORD_CONFIG -> "apass",
InfluxConfigConstants.KCQL_CONFIG -> QUERY_ALL,
- ).asJava
+ )
val config = InfluxConfig(props)
InfluxSettings(config)
@@ -77,7 +76,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_USER_CONFIG -> user,
InfluxConfigConstants.KCQL_CONFIG -> QUERY_ALL,
InfluxConfigConstants.CONSISTENCY_CONFIG -> "SOMELEVEL",
- ).asJava
+ )
val config = InfluxConfig(props)
InfluxSettings(config)
@@ -95,7 +94,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_USER_CONFIG -> "",
InfluxConfigConstants.KCQL_CONFIG -> QUERY_ALL,
InfluxConfigConstants.INFLUX_CONNECTION_PASSWORD_CONFIG -> "apass",
- ).asJava
+ )
val config = InfluxConfig(props)
InfluxSettings(config)
@@ -113,7 +112,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_USER_CONFIG -> user,
InfluxConfigConstants.KCQL_CONFIG -> QUERY_ALL,
InfluxConfigConstants.CONSISTENCY_CONFIG -> WriteConsistency.QUORUM.toString,
- ).asJava
+ )
val config = InfluxConfig(props)
@@ -140,7 +139,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_PASSWORD_CONFIG -> pass,
InfluxConfigConstants.KCQL_CONFIG -> QUERY_SELECT,
InfluxConfigConstants.CONSISTENCY_CONFIG -> WriteConsistency.ANY.toString,
- ).asJava
+ )
val config = InfluxConfig(props)
@@ -168,7 +167,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_PASSWORD_CONFIG -> pass,
InfluxConfigConstants.KCQL_CONFIG -> QUERY_SELECT_AND_TIMESTAMP,
InfluxConfigConstants.CONSISTENCY_CONFIG -> WriteConsistency.ONE.toString,
- ).asJava
+ )
val config = InfluxConfig(props)
@@ -197,7 +196,7 @@ class InfluxSettingsTest extends AnyWordSpec with Matchers with MockitoSugar {
InfluxConfigConstants.INFLUX_CONNECTION_PASSWORD_CONFIG -> pass,
InfluxConfigConstants.KCQL_CONFIG -> QUERY_SELECT_AND_TIMESTAMP_SYSTEM,
InfluxConfigConstants.CONSISTENCY_CONFIG -> WriteConsistency.ONE.toString,
- ).asJava
+ )
val config = InfluxConfig(props)
diff --git a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/ItTestBase.scala b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/ItTestBase.scala
index a5d05b67c8..2b650aff1f 100644
--- a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/ItTestBase.scala
+++ b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/ItTestBase.scala
@@ -35,7 +35,6 @@ import java.io.BufferedWriter
import java.io.ByteArrayOutputStream
import java.io.FileWriter
import java.nio.file.Paths
-import java.util
import java.util.UUID
import javax.jms.BytesMessage
import javax.jms.Session
@@ -121,8 +120,8 @@ trait ItTestBase extends AnyWordSpec with Matchers with MockitoSugar {
topics: String,
url: String,
customProperties: Map[String, String] = Map(),
- ): util.Map[String, String] =
- (Map("topics" -> topics) ++ getProps(kcql, url) ++ customProperties).asJava
+ ): Map[String, String] =
+ Map("topics" -> topics) ++ getProps(kcql, url) ++ customProperties
def getProps(kcql: String, url: String): Map[String, String] =
Map(
diff --git a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTaskTest.scala b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTaskTest.scala
index 8fd75015ed..64a76326e7 100644
--- a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTaskTest.scala
+++ b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTaskTest.scala
@@ -37,6 +37,7 @@ import javax.jms.Message
import javax.jms.MessageListener
import javax.jms.Session
import javax.jms.TextMessage
+import scala.jdk.CollectionConverters.MapHasAsJava
import scala.language.reflectiveCalls
import scala.reflect.io.Path
import scala.util.Using.{ resource => using }
@@ -110,11 +111,11 @@ class JMSSinkTaskTest extends ItTestBase with BeforeAndAfterAll with MockitoSuga
topicsSet.add(new TopicPartition(kafkaTopic1, 0))
topicsSet.add(new TopicPartition(kafkaTopic2, 0))
when(context.assignment()).thenReturn(topicsSet)
- when(context.configs()).thenReturn(props)
+ when(context.configs()).thenReturn(props.asJava)
val task = new JMSSinkTask
task.initialize(context)
- task.start(props)
+ task.start(props.asJava)
val records = new java.util.ArrayList[SinkRecord]
records.add(record1)
diff --git a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/MessageConverterTest.scala b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/MessageConverterTest.scala
index e498373d45..ec2e6a6b85 100644
--- a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/MessageConverterTest.scala
+++ b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/MessageConverterTest.scala
@@ -39,7 +39,6 @@ import javax.jms.MapMessage
import javax.jms.ObjectMessage
import javax.jms.TextMessage
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.jdk.CollectionConverters.MapHasAsScala
import scala.reflect.io.Path
import scala.util.Try
@@ -63,7 +62,7 @@ class MessageConverterTest extends AnyWordSpec with Matchers with ItTestBase wit
val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC")
val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE")
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val setting = settings.settings.head
@@ -109,7 +108,7 @@ class MessageConverterTest extends AnyWordSpec with Matchers with ItTestBase wit
val kcqlT = getKCQL(topicName, kafkaTopic1, "TOPIC")
val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE")
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val setting = settings.settings.head
@@ -159,7 +158,7 @@ class MessageConverterTest extends AnyWordSpec with Matchers with ItTestBase wit
val queueName = UUID.randomUUID().toString
val kcql = getKCQL(queueName, kafkaTopic1, "QUEUE")
val props = getProps(kcql, JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val setting = settings.settings.head
@@ -195,7 +194,7 @@ class MessageConverterTest extends AnyWordSpec with Matchers with ItTestBase wit
val kcqlQ = getKCQL(queueName, kafkaTopic1, "QUEUE")
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val setting = settings.settings.head
using(connectionFactory.createConnection()) { connection =>
@@ -253,7 +252,7 @@ class MessageConverterTest extends AnyWordSpec with Matchers with ItTestBase wit
val queueName = UUID.randomUUID().toString
val kcql = getKCQL(queueName, kafkaTopic1, "QUEUE")
val props = getProps(kcql, JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val setting = settings.settings.head
val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false")
@@ -287,7 +286,7 @@ class MessageConverterTest extends AnyWordSpec with Matchers with ItTestBase wit
val props = getProps(kcql, JMS_URL)
val schema = getProtobufSchema
val struct = getProtobufStruct(schema, "addrressed-person", 103, "addressed-person@gmail.com")
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val connectionFactory = new ActiveMQConnectionFactory("vm://localhost?broker.persistent=false")
using(connectionFactory.createConnection()) { connection =>
diff --git a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSReaderTest.scala b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSReaderTest.scala
index 6b72ae8566..fc7a14ee78 100644
--- a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSReaderTest.scala
+++ b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSReaderTest.scala
@@ -35,7 +35,6 @@ import org.scalatest.time.Span
import java.util.UUID
import javax.jms.Message
import javax.jms.Session
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.jdk.CollectionConverters.MapHasAsScala
import scala.reflect.io.Path
@@ -64,7 +63,7 @@ class JMSReaderTest extends ItTestBase with BeforeAndAfterAll with Eventually {
val kcql = getKCQL(kafkaTopic, queueName, "QUEUE")
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val reader = JMSReader(settings)
@@ -89,7 +88,7 @@ class JMSReaderTest extends ItTestBase with BeforeAndAfterAll with Eventually {
val avroMessages = getBytesMessage(messageCount, session)
avroMessages.foreach(m => avroProducer.send(m))
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, sink = false)
val reader = JMSReader(settings)
@@ -125,7 +124,7 @@ class JMSReaderTest extends ItTestBase with BeforeAndAfterAll with Eventually {
val messageSelector = "Fruit='apples'"
val kcql = kcqlWithMessageSelector(kafkaTopic, topicName, messageSelector)
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val reader = JMSReader(settings)
diff --git a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSessionProviderTest.scala b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSessionProviderTest.scala
index 4daa379757..9eb9dc9ffc 100644
--- a/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSessionProviderTest.scala
+++ b/kafka-connect-jms/src/it/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSessionProviderTest.scala
@@ -32,7 +32,6 @@ import org.scalatest.concurrent.Eventually
import java.util.UUID
import javax.jms.Session
import javax.naming.NameNotFoundException
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.util.Try
class JMSSessionProviderTest extends ItTestBase with BeforeAndAfterAll with Eventually {
@@ -45,7 +44,7 @@ class JMSSessionProviderTest extends ItTestBase with BeforeAndAfterAll with Even
val queueName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic, queueName, "QUEUE")
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, forAJmsConsumer)
val provider = JMSSessionProvider(settings, forAJmsConsumer)
provider.queueConsumers.size shouldBe 1
@@ -60,7 +59,7 @@ class JMSSessionProviderTest extends ItTestBase with BeforeAndAfterAll with Even
val topicName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic, topicName, "TOPIC")
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, forAJmsConsumer)
val provider = JMSSessionProvider(settings, forAJmsConsumer)
provider.queueConsumers.size shouldBe 0
@@ -75,7 +74,7 @@ class JMSSessionProviderTest extends ItTestBase with BeforeAndAfterAll with Even
val queueName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic, queueName, "QUEUE")
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, forAJmsProducer)
val provider = JMSSessionProvider(settings, forAJmsProducer)
provider.queueConsumers.size shouldBe 0
@@ -90,7 +89,7 @@ class JMSSessionProviderTest extends ItTestBase with BeforeAndAfterAll with Even
val topicName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic, topicName, "TOPIC")
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, forAJmsProducer)
val provider = JMSSessionProvider(settings, forAJmsProducer)
provider.queueConsumers.size shouldBe 0
@@ -105,7 +104,7 @@ class JMSSessionProviderTest extends ItTestBase with BeforeAndAfterAll with Even
val topicName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic, topicName, "TOPIC")
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, forAJmsProducer)
val provider = JMSSessionProvider(settings, forAJmsProducer)
provider.close().isSuccess shouldBe true
@@ -119,7 +118,7 @@ class JMSSessionProviderTest extends ItTestBase with BeforeAndAfterAll with Even
val topicName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic, topicName, "TOPIC")
val props = getProps(kcql, brokerUrl)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val validSettings = JMSSettings(config, forAJmsConsumer)
val invalidSettings = validSettings.copy(destinationSelector = DestinationSelector.JNDI)
diff --git a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSConfig.scala b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSConfig.scala
index 7242431063..42ba1edf2a 100644
--- a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSConfig.scala
+++ b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSConfig.scala
@@ -20,8 +20,6 @@ import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
import org.apache.kafka.common.config.ConfigDef.Type
-import java.util
-
object JMSConfig {
val config: ConfigDef = new ConfigDef()
@@ -283,7 +281,7 @@ object JMSConfig {
*
* Holds config, extends AbstractConfig.
*/
-case class JMSConfig(props: util.Map[String, String])
+case class JMSConfig(props: Map[String, String])
extends BaseConfig(JMSConfigConstants.CONNECTOR_PREFIX, JMSConfig.config, props)
with KcqlSettings
with ErrorPolicySettings
diff --git a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettings.scala b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettings.scala
index a6ff97d373..987c0c7daa 100644
--- a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettings.scala
+++ b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettings.scala
@@ -31,7 +31,6 @@ import org.apache.kafka.common.config.ConfigException
import org.apache.kafka.common.config.types.Password
import scala.jdk.CollectionConverters.IterableHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsScala
case class JMSSetting(
source: String,
@@ -121,7 +120,7 @@ object JMSSettings extends StrictLogging {
val settings = kcql.map { r =>
val jmsName = if (sink) r.getTarget else r.getSource
- val converters = JMSConnectorConverters(sink)(r, config.props.asScala.toMap) match {
+ val converters = JMSConnectorConverters(sink)(r, config.props) match {
case None => throw new ConfigException("Converters should not be empty")
case Some(Left(exception)) => throw exception
case Some(Right(converters)) => converters
diff --git a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkConnector.scala b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkConnector.scala
index ed745b612c..a292548be9 100644
--- a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkConnector.scala
+++ b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.jms.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
import com.datamountaineer.streamreactor.connect.jms.config.JMSConfig
@@ -62,7 +63,7 @@ class JMSSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Jms sink task.")
- Helpers.checkInputTopics(JMSConfigConstants.KCQL, props.asScala.toMap)
+ Helpers.checkInputTopics(JMSConfigConstants.KCQL, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTask.scala b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTask.scala
index 61d52166dc..c138095487 100644
--- a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTask.scala
+++ b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/sink/JMSSinkTask.scala
@@ -31,6 +31,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import scala.jdk.CollectionConverters.IterableHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
/**
* JMSSinkTask
@@ -52,7 +53,7 @@ class JMSSinkTask extends SinkTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
JMSConfig.config.parse(conf)
- val sinkConfig = new JMSConfig(conf)
+ val sinkConfig = new JMSConfig(conf.asScala.toMap)
val settings = JMSSettings(sinkConfig, sink = true)
enableProgress = sinkConfig.getBoolean(JMSConfigConstants.PROGRESS_COUNTER_ENABLED)
diff --git a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceConnector.scala b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceConnector.scala
index 2430a8f864..ff4dab27c4 100644
--- a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceConnector.scala
+++ b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceConnector.scala
@@ -35,7 +35,7 @@ import scala.jdk.CollectionConverters.SeqHasAsJava
* stream-reactor
*/
class JMSSourceConnector extends SourceConnector with StrictLogging {
- private var configProps: util.Map[String, String] = _
+ private var configProps: Map[String, String] = _
private val configDef = JMSConfig.config
private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
@@ -46,17 +46,14 @@ class JMSSourceConnector extends SourceConnector with StrictLogging {
require(raw != null && raw.nonEmpty, s"No ${JMSConfigConstants.KCQL} provided!")
//sql1, sql2
- val kcqls = raw.split(";")
+ val kcqls: Seq[String] = raw.map(_.split(";").toSeq).getOrElse(Seq.empty)
val groups = ConnectorUtils.groupPartitions(kcqls.toList.asJava, maxTasks).asScala
//split up the kcql statement based on the number of tasks.
groups
.filterNot(_.isEmpty)
.map { g =>
- val taskConfigs = new java.util.HashMap[String, String]
- taskConfigs.putAll(configProps)
- taskConfigs.put(JMSConfigConstants.KCQL, g.asScala.mkString(";")) //overwrite
- taskConfigs.asScala.toMap.asJava
+ (configProps + (JMSConfigConstants.KCQL -> g.asScala.mkString(";"))).asJava
}
}.asJava
@@ -64,9 +61,7 @@ class JMSSourceConnector extends SourceConnector with StrictLogging {
val raw = configProps.get(JMSConfigConstants.KCQL)
require(raw != null && raw.nonEmpty, s"No ${JMSConfigConstants.KCQL} provided!")
(1 to maxTasks).map { _ =>
- val taskConfigs: util.Map[String, String] = new java.util.HashMap[String, String]
- taskConfigs.putAll(configProps)
- taskConfigs
+ configProps.asJava
}.toList.asJava
}
@@ -81,7 +76,7 @@ class JMSSourceConnector extends SourceConnector with StrictLogging {
override def config(): ConfigDef = configDef
override def start(props: util.Map[String, String]): Unit = {
- val config = new JMSConfig(props)
+ val config = new JMSConfig(props.asScala.toMap)
configProps = config.props
}
diff --git a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceTask.scala b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceTask.scala
index 9cf2daad70..84d2fd3f40 100644
--- a/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceTask.scala
+++ b/kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceTask.scala
@@ -34,6 +34,7 @@ import java.util.function.BiConsumer
import javax.jms.Message
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.duration._
+import scala.jdk.CollectionConverters.MapHasAsScala
import scala.jdk.CollectionConverters.SeqHasAsJava
import scala.util.Failure
import scala.util.Success
@@ -61,7 +62,7 @@ class JMSSourceTask extends SourceTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
JMSConfig.config.parse(conf)
- val config = new JMSConfig(conf)
+ val config = new JMSConfig(conf.asScala.toMap)
val settings = JMSSettings(config, sink = false)
reader = JMSReader(settings)
enableProgress = config.getBoolean(JMSConfigConstants.PROGRESS_COUNTER_ENABLED)
diff --git a/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettingsTest.scala b/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettingsTest.scala
index 025c777183..7dcb21c2ea 100644
--- a/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettingsTest.scala
+++ b/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/config/JMSSettingsTest.scala
@@ -30,7 +30,6 @@ import org.scalatest.EitherValues
import java.util.UUID
import javax.naming.NameNotFoundException
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.reflect.io.Path
class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues {
@@ -44,7 +43,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val queueName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic1, queueName, "QUEUE")
val props = getProps(kcql, JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val setting = settings.settings.head
setting.source shouldBe queueName
@@ -60,7 +59,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val topicName = UUID.randomUUID().toString
val kcql = getKCQL(kafkaTopic1, topicName, "TOPIC")
val props = getProps(kcql, JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val setting = settings.settings.head
setting.source shouldBe topicName
@@ -77,7 +76,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val kcql = getKCQL(kafkaTopic1, topicName, "TOPIC")
val props =
getProps(kcql, JMS_URL) ++ Map(JMSConfigConstants.DESTINATION_SELECTOR -> DestinationSelector.JNDI.toString)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val setting = settings.settings.head
setting.source shouldBe topicName
@@ -100,7 +99,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL) ++
Map(JMSConfigConstants.DESTINATION_SELECTOR -> DestinationSelector.JNDI.toString) ++
Map(JMSConfigConstants.TOPIC_SUBSCRIPTION_NAME -> "subscriptionName")
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val queue = settings.settings.head
@@ -136,7 +135,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
Map(
JMSConfigConstants.DEFAULT_SOURCE_CONVERTER_CONFIG -> "com.datamountaineer.streamreactor.connect.converters.source.AvroConverter",
)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val queue = settings.settings.head
@@ -168,7 +167,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val kcqlT = getKCQL(kafkaTopic1, topicName, "TOPIC")
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val queue = settings.settings.head
queue.source shouldBe queueName
@@ -195,7 +194,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val kcqlT = getKCQLAvroSinkConverter(kafkaTopic1, topicName, "TOPIC")
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val queue = settings.settings.head
queue.source shouldBe queueName
@@ -222,7 +221,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val kcqlQ = getKCQLFormat(kafkaTopic1, queueName, "QUEUE", "PROTOBUF")
val kcqlT = getKCQLFormat(kafkaTopic1, topicName, "TOPIC", "PROTOBUF")
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val queue = settings.settings.head
queue.source shouldBe queueName
@@ -250,7 +249,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val kcqlQ = getKCQLStoreAsAddressedPerson(kafkaTopic1, queueName, "QUEUE")
val kcqlT = getKCQLEmptyStoredAsNonAddressedPerson(kafkaTopic1, topicName, "TOPIC")
val props = getProps(s"$kcqlQ;$kcqlT", JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val queue = settings.settings.head
queue.source shouldBe queueName
@@ -284,7 +283,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
JMSConfigConstants.DEFAULT_SINK_CONVERTER_CONFIG -> "com.datamountaineer.streamreactor.connect.jms.sink.converters.AvroMessageConverter",
)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val queue = settings.settings.head
queue.source shouldBe queueName
@@ -309,7 +308,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
val kcqlT = kcqlWithMessageSelector(kafkaTopic1, topicName, MESSAGE_SELECTOR)
val props = getProps(kcqlT, JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, false)
val topic = settings.settings.head
@@ -334,7 +333,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
JMSConfigConstants.CONNECTION_FACTORY -> CONNECTION_FACTORY,
JMSConfigConstants.JMS_URL -> JMS_URL,
JMSConfigConstants.DESTINATION_SELECTOR -> DestinationSelector.JNDI.toString,
- ).asJava
+ )
val config = jms.config.JMSConfig(props)
intercept[ConfigException] {
JMSSettings(config, false)
@@ -351,7 +350,7 @@ class JMSSettingsTest extends TestBase with BeforeAndAfterAll with EitherValues
JMSConfigConstants.INITIAL_CONTEXT_FACTORY -> INITIAL_CONTEXT_FACTORY,
JMSConfigConstants.CONNECTION_FACTORY -> "plop",
JMSConfigConstants.JMS_URL -> JMS_URL,
- ).asJava
+ )
val config = jms.config.JMSConfig(props)
val settings = JMSSettings(config, true)
intercept[NameNotFoundException] {
diff --git a/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoDynamicConverterTest.scala b/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoDynamicConverterTest.scala
index cf92a878d2..01fee8dcba 100644
--- a/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoDynamicConverterTest.scala
+++ b/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoDynamicConverterTest.scala
@@ -25,7 +25,6 @@ import org.scalatest.EitherValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
import java.util.UUID
class ProtoDynamicConverterTest
@@ -43,7 +42,7 @@ class ProtoDynamicConverterTest
val queueName = UUID.randomUUID().toString
val kcql = getKCQL(queueName, kafkaTopic1, "QUEUE")
val props = getProps(kcql, JMS_URL)
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val setting = settings.settings.head
val schema = getProtobufSchema
diff --git a/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoStoredAsConverterTest.scala b/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoStoredAsConverterTest.scala
index ad04504b6e..9cafa2565b 100644
--- a/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoStoredAsConverterTest.scala
+++ b/kafka-connect-jms/src/test/scala/com/datamountaineer/streamreactor/connect/jms/sink/converters/ProtoStoredAsConverterTest.scala
@@ -31,7 +31,6 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.util.UUID
-import scala.jdk.CollectionConverters.MapHasAsJava
class ProtoStoredAsConverterTest
extends AnyWordSpec
@@ -164,7 +163,7 @@ class ProtoStoredAsConverterTest
schema: Schema,
struct: Struct,
) = {
- val config = JMSConfig(props.asJava)
+ val config = JMSConfig(props)
val settings = JMSSettings(config, true)
val setting = settings.settings.head
diff --git a/kafka-connect-mongodb/src/it/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoWriterTest.scala b/kafka-connect-mongodb/src/it/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoWriterTest.scala
index 0cc27fc3ab..2cfd8f54b0 100644
--- a/kafka-connect-mongodb/src/it/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoWriterTest.scala
+++ b/kafka-connect-mongodb/src/it/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoWriterTest.scala
@@ -47,7 +47,6 @@ import org.scalatest.wordspec.AnyWordSpecLike
import java.util.UUID
import scala.collection.immutable.ListMap
import scala.collection.immutable.ListSet
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.jdk.CollectionConverters.SeqHasAsJava
class MongoWriterTest extends MongoDBContainer with AnyWordSpecLike with Matchers with BeforeAndAfterAll {
@@ -414,7 +413,7 @@ class MongoWriterTest extends MongoDBContainer with AnyWordSpecLike with Matcher
SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG -> "truststore-password",
SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG -> keystoreFilePath,
SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG -> "keystore-password",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
@@ -448,7 +447,7 @@ class MongoWriterTest extends MongoDBContainer with AnyWordSpecLike with Matcher
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017/?ssl=true",
MongoConfigConstants.KCQL_CONFIG -> s"INSERT INTO $collectionName SELECT vehicle, vehicle.fullVIN, header.applicationId FROM topicA",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
@@ -476,7 +475,7 @@ class MongoWriterTest extends MongoDBContainer with AnyWordSpecLike with Matcher
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017/?ssl=true",
MongoConfigConstants.KCQL_CONFIG -> s"UPSERT INTO $collectionName SELECT vehicle.fullVIN, header.applicationId FROM topicA pk vehicle.fullVIN",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
@@ -504,7 +503,7 @@ class MongoWriterTest extends MongoDBContainer with AnyWordSpecLike with Matcher
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017/?ssl=true",
MongoConfigConstants.KCQL_CONFIG -> s"UPSERT INTO $collectionName SELECT sensorID, location.lon as lon, location.lat as lat FROM topicA pk location.lon",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
diff --git a/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoConfig.scala b/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoConfig.scala
index b9aab3cfe8..0c24409a3c 100644
--- a/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoConfig.scala
+++ b/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoConfig.scala
@@ -22,8 +22,6 @@ import com.datamountaineer.streamreactor.common.config.base.traits.KcqlSettings
import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetriesSettings
import com.datamountaineer.streamreactor.common.config.base.traits.UserSettings
-import java.util
-
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
import org.apache.kafka.common.config.ConfigDef.Type
@@ -152,7 +150,7 @@ object MongoConfig {
.withClientSslSupport()
}
-case class MongoConfig(props: util.Map[String, String])
+case class MongoConfig(props: Map[String, String])
extends BaseConfig(MongoConfigConstants.CONNECTOR_PREFIX, MongoConfig.config, props)
with KcqlSettings
with DatabaseSettings
diff --git a/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkConnector.scala b/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkConnector.scala
index c30189c80c..f8202d36c1 100644
--- a/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkConnector.scala
+++ b/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.mongodb.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
@@ -65,8 +66,8 @@ class MongoSinkConnector extends SinkConnector with StrictLogging {
* @param props A map of properties for the connector and worker
*/
override def start(props: util.Map[String, String]): Unit = {
- Helpers.checkInputTopics(MongoConfigConstants.KCQL_CONFIG, props.asScala.toMap)
- Try(MongoConfig(props)) match {
+ Helpers.checkInputTopics(MongoConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
+ Try(MongoConfig(props.asScala.toMap)) match {
case Failure(f) =>
throw new ConnectException(s"Couldn't start Mongo sink due to configuration error: ${f.getMessage}", f)
case _ =>
diff --git a/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkTask.scala b/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkTask.scala
index ca9049ff2b..10244c6aac 100644
--- a/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkTask.scala
+++ b/kafka-connect-mongodb/src/main/scala/com/datamountaineer/streamreactor/connect/mongodb/sink/MongoSinkTask.scala
@@ -29,6 +29,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import scala.jdk.CollectionConverters.IterableHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
import scala.util.Failure
import scala.util.Success
import scala.util.Try
@@ -57,7 +58,7 @@ class MongoSinkTask extends SinkTask with StrictLogging {
printAsciiHeader(manifest, "/mongo-ascii.txt")
- val taskConfig = Try(MongoConfig(conf)) match {
+ val taskConfig = Try(MongoConfig(conf.asScala.toMap)) match {
case Failure(f) => throw new ConnectException("Couldn't start Mongo Sink due to configuration error.", f)
case Success(s) => s
}
diff --git a/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoSettingsTest.scala b/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoSettingsTest.scala
index 6233706f94..bee5c01f40 100644
--- a/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoSettingsTest.scala
+++ b/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/config/MongoSettingsTest.scala
@@ -20,8 +20,6 @@ import org.apache.kafka.common.config.ConfigException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class MongoSettingsTest extends AnyWordSpec with Matchers {
"MongoSinkSettings" should {
"default the host if the hosts settings not provided" in {
@@ -29,7 +27,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT cola as cold, colc FROM topic1",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
@@ -46,7 +44,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1;INSERT INTO coll2 SELECT a as F1, b as F2 FROM topic2",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
@@ -63,7 +61,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1 IGNORE a,b,c",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
@@ -81,7 +79,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1 PK a,b",
- ).asJava
+ )
val config = MongoConfig(map)
val settings = MongoSettings(config)
@@ -99,7 +97,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO SELECT * FROM topic1",
- ).asJava
+ )
val config = MongoConfig(map)
intercept[IllegalArgumentException] {
@@ -111,7 +109,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
val map = Map(
MongoConfigConstants.DATABASE_CONFIG -> "database1",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
- ).asJava
+ )
intercept[ConfigException] {
val config = MongoConfig(map)
@@ -124,7 +122,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.DATABASE_CONFIG -> "",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
- ).asJava
+ )
val config = MongoConfig(map)
intercept[IllegalArgumentException] {
@@ -140,7 +138,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.DATABASE_CONFIG -> "db",
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
- ).asJava
+ )
val settings = MongoSettings(MongoConfig(map))
settings.jsonDateTimeFields shouldBe Set.empty[List[String]]
}
@@ -151,7 +149,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG -> "",
- ).asJava
+ )
val settings = MongoSettings(MongoConfig(map))
settings.jsonDateTimeFields shouldBe Set.empty[Seq[String]]
}
@@ -162,7 +160,7 @@ class MongoSettingsTest extends AnyWordSpec with Matchers {
MongoConfigConstants.CONNECTION_CONFIG -> "mongodb://localhost:27017",
MongoConfigConstants.KCQL_CONFIG -> "INSERT INTO collection1 SELECT * FROM topic1",
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG -> "a, b, c.m, d, e.n.y, f",
- ).asJava
+ )
val settings = MongoSettings(MongoConfig(map))
settings.jsonDateTimeFields shouldBe Set(
List("a"),
diff --git a/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/converters/SinkRecordConverterTest.scala b/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/converters/SinkRecordConverterTest.scala
index 251d5b18fc..2787c196bb 100644
--- a/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/converters/SinkRecordConverterTest.scala
+++ b/kafka-connect-mongodb/src/test/scala/com/datamountaineer/streamreactor/connect/mongodb/converters/SinkRecordConverterTest.scala
@@ -102,7 +102,7 @@ class SinkRecordConverterTest extends AnyWordSpec with Matchers {
"fromJson()" should {
"not modify any values for date fields when jsonDateTimeFields is NOT specified" in {
- implicit val settings = MongoSettings(MongoConfig(baseConfig.asJava))
+ implicit val settings = MongoSettings(MongoConfig(baseConfig))
val doc: Document = SinkRecordConverter.fromJson(parse(jsonStr))
val map: Set[JavaMap.Entry[String, AnyRef]] = doc.entrySet().asScala.toSet
@@ -133,7 +133,7 @@ class SinkRecordConverterTest extends AnyWordSpec with Matchers {
Map(
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG ->
expectedDates.keySet.mkString(","),
- )).asJava),
+ ))),
)
val doc: Document = SinkRecordConverter.fromJson(parse(jsonStr))
val map: Set[JavaMap.Entry[String, AnyRef]] = doc.entrySet().asScala.toSet
@@ -163,7 +163,7 @@ class SinkRecordConverterTest extends AnyWordSpec with Matchers {
Map(
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG ->
expectedDates.keySet.mkString(","),
- )).asJava),
+ ))),
)
val doc: Document = SinkRecordConverter.fromJson(parse(jsonInt))
val map: Set[JavaMap.Entry[String, AnyRef]] = doc.entrySet().asScala.toSet
@@ -200,7 +200,7 @@ class SinkRecordConverterTest extends AnyWordSpec with Matchers {
Map(
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG ->
"a, b, c, d, e, f",
- )).asJava))
+ ))))
val doc: Document = filterNonParsableDates(SinkRecordConverter.fromJson(parse(jsonStr)))
@@ -243,7 +243,7 @@ class SinkRecordConverterTest extends AnyWordSpec with Matchers {
Map(
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG ->
"c.ts, e.n.x.ts",
- )).asJava))
+ ))))
val doc: Document = SinkRecordConverter.fromJson(parse(json))
val map: Set[JavaMap.Entry[String, AnyRef]] = doc.entrySet().asScala.toSet
@@ -307,7 +307,7 @@ class SinkRecordConverterTest extends AnyWordSpec with Matchers {
Map(
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG ->
"A, B.N",
- )).asJava))
+ ))))
val doc = SinkRecordConverter.fromMap(map)
@@ -329,7 +329,7 @@ class SinkRecordConverterTest extends AnyWordSpec with Matchers {
Map(
MongoConfigConstants.JSON_DATETIME_FIELDS_CONFIG ->
"subDoc.N, timestamp, subList.Y",
- )).asJava),
+ ))),
)
//map is {A=0, subList=[Document{{X=100, Y=101}}, Document{{Y=102}}],
diff --git a/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/TestMqttWriter.scala b/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/TestMqttWriter.scala
index 9178b02707..5c83701a04 100644
--- a/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/TestMqttWriter.scala
+++ b/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/TestMqttWriter.scala
@@ -45,8 +45,6 @@ import java.nio.file.Paths
import java.util.UUID
import scala.collection.mutable
import scala.io.Source
-import scala.jdk.CollectionConverters.MapHasAsJava
-import scala.jdk.CollectionConverters.MapHasAsScala
import scala.util.Failure
import scala.util.Success
import scala.util.Try
@@ -207,7 +205,7 @@ class TestMqttWriter extends AnyWordSpec with MqttCallback with ForEachTestConta
MqttConfigConstants.USER_CONFIG -> mqttUser,
)
- val config = MqttSinkConfig(props.asJava)
+ val config = MqttSinkConfig(props)
val settings = MqttSinkSettings(config)
val convertersMap = settings.sinksToConverters.map {
@@ -267,7 +265,7 @@ class TestMqttWriter extends AnyWordSpec with MqttCallback with ForEachTestConta
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> mqttPassword,
MqttConfigConstants.USER_CONFIG -> mqttUser,
- ).asJava
+ )
val config = MqttSinkConfig(props)
val settings = MqttSinkSettings(config)
@@ -283,7 +281,7 @@ class TestMqttWriter extends AnyWordSpec with MqttCallback with ForEachTestConta
)
}
- converter.initialize(props.asScala.toMap)
+ converter.initialize(props)
topic -> converter
}
diff --git a/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttManagerTest.scala b/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttManagerTest.scala
index 0abb24a32c..d9534dcd9c 100644
--- a/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttManagerTest.scala
+++ b/kafka-connect-mqtt/src/it/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttManagerTest.scala
@@ -46,7 +46,6 @@ import java.nio.file.Paths
import java.util
import java.util.UUID
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
class MqttManagerTest extends AnyWordSpec with ForAllTestContainer with Matchers with StrictLogging {
@@ -452,10 +451,8 @@ class MqttManagerTest extends AnyWordSpec with ForAllTestContainer with Matchers
MqttConfigConstants.HOSTS_CONFIG -> getMqttConnectionUrl,
MqttConfigConstants.QS_CONFIG -> qs.toString,
)
- val mqttManager = new MqttManager(MqttClientConnectionFn.apply,
- sourcesToConvMap,
- MqttSourceSettings(MqttSourceConfig(props.asJava)),
- )
+ val mqttManager =
+ new MqttManager(MqttClientConnectionFn.apply, sourcesToConvMap, MqttSourceSettings(MqttSourceConfig(props)))
Thread.sleep(2000)
val message = "message"
diff --git a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttConfig.scala b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttConfig.scala
index 94f3203e43..6fafb53ea1 100644
--- a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttConfig.scala
+++ b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttConfig.scala
@@ -20,8 +20,6 @@ import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
import org.apache.kafka.common.config.ConfigDef.Type
-import java.util
-
/**
* Created by andrew@datamountaineer.com on 27/08/2017.
* stream-reactor
@@ -244,7 +242,7 @@ object MqttSourceConfig {
)
}
-case class MqttSourceConfig(props: util.Map[String, String])
+case class MqttSourceConfig(props: Map[String, String])
extends BaseConfig(MqttConfigConstants.CONNECTOR_PREFIX, MqttSourceConfig.config, props)
with MqttConfigBase
@@ -296,7 +294,7 @@ object MqttSinkConfig {
)
}
-case class MqttSinkConfig(props: util.Map[String, String])
+case class MqttSinkConfig(props: Map[String, String])
extends BaseConfig(MqttConfigConstants.CONNECTOR_PREFIX, MqttSinkConfig.config, props)
with MqttConfigBase
diff --git a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala
index dcf5ac107e..80739c8b52 100644
--- a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala
+++ b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.mqtt.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
import com.datamountaineer.streamreactor.connect.mqtt.config.MqttConfigConstants
@@ -40,7 +41,7 @@ class MqttSinkConnector extends SinkConnector with StrictLogging {
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Mqtt sink connector.")
- Helpers.checkInputTopics(MqttConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(MqttConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkTask.scala b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkTask.scala
index 74ffdafdee..af540200d0 100644
--- a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkTask.scala
+++ b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/sink/MqttSinkTask.scala
@@ -53,7 +53,7 @@ class MqttSinkTask extends SinkTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
MqttSinkConfig.config.parse(conf)
- val sinkConfig = new MqttSinkConfig(conf)
+ val sinkConfig = new MqttSinkConfig(conf.asScala.toMap)
enableProgress = sinkConfig.getBoolean(MqttConfigConstants.PROGRESS_COUNTER_ENABLED)
val settings = MqttSinkSettings(sinkConfig)
diff --git a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnector.scala b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnector.scala
index 22fb9281c3..8bf8e182b5 100644
--- a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnector.scala
+++ b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnector.scala
@@ -47,7 +47,7 @@ class MqttSourceConnector extends SourceConnector with StrictLogging {
*/
override def taskConfigs(maxTasks: Int): util.List[util.Map[String, String]] = {
- val settings = MqttSourceSettings(MqttSourceConfig(configProps))
+ val settings = MqttSourceSettings(MqttSourceConfig(configProps.asScala.toMap))
val kcql = settings.kcql
if (maxTasks == 1 || kcql.length == 1) {
Collections.singletonList(configProps)
diff --git a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceTask.scala b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceTask.scala
index 46f13fb4fd..b544d42d33 100644
--- a/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceTask.scala
+++ b/kafka-connect-mqtt/src/main/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceTask.scala
@@ -45,7 +45,7 @@ class MqttSourceTask extends SourceTask with StrictLogging {
override def start(props: util.Map[String, String]): Unit = {
printAsciiHeader(manifest, "/mqtt-source-ascii.txt")
- val conf = if (context.configs().isEmpty) props else context.configs()
+ val conf = (if (context.configs().isEmpty) props else context.configs()).asScala.toMap
val settings = MqttSourceSettings(MqttSourceConfig(conf))
@@ -77,7 +77,7 @@ class MqttSourceTask extends SourceTask with StrictLogging {
s"Invalid ${MqttConfigConstants.KCQL_CONFIG} is invalid. $clazz should have an empty ctor!",
)
}
- converter.initialize(conf.asScala.toMap)
+ converter.initialize(conf)
topic -> converter
}
diff --git a/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttSourceSettingsTest.scala b/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttSourceSettingsTest.scala
index 92f345e51b..90974423bd 100644
--- a/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttSourceSettingsTest.scala
+++ b/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/config/MqttSourceSettingsTest.scala
@@ -21,8 +21,6 @@ import org.apache.kafka.common.config.ConfigException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
"MqttSourceSetting" should {
@@ -41,7 +39,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
)
}
settings.mqttQualityOfService shouldBe 1
@@ -72,7 +70,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
)
}
@@ -93,7 +91,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
)
}
}
@@ -114,7 +112,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
),
)
}
@@ -136,7 +134,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
),
)
}
@@ -157,7 +155,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
),
)
}
@@ -177,7 +175,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
)
}
}
@@ -196,7 +194,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
)
}
}
@@ -215,7 +213,7 @@ class MqttSourceSettingsTest extends AnyWordSpec with Matchers {
MqttConfigConstants.KEEP_ALIVE_INTERVAL_CONFIG -> "1000",
MqttConfigConstants.PASSWORD_CONFIG -> "somepassw",
MqttConfigConstants.USER_CONFIG -> "user",
- ).asJava,
+ ),
)
}
}
diff --git a/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnectorTest.scala b/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnectorTest.scala
index 4371874589..4ce3c13326 100644
--- a/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnectorTest.scala
+++ b/kafka-connect-mqtt/src/test/scala/com/datamountaineer/streamreactor/connect/mqtt/source/MqttSourceConnectorTest.scala
@@ -24,6 +24,7 @@ import org.scalatest.wordspec.AnyWordSpec
import scala.jdk.CollectionConverters.ListHasAsScala
import scala.jdk.CollectionConverters.MapHasAsJava
+import scala.jdk.CollectionConverters.MapHasAsScala
class MqttSourceConnectorTest extends AnyWordSpec with Matchers {
val baseProps: Map[String, String] = Map(
@@ -113,5 +114,5 @@ class MqttSourceConnectorTest extends AnyWordSpec with Matchers {
}
def extractKcqls(configs: util.List[util.Map[String, String]]): Array[Array[String]] =
- configs.asScala.map(t => MqttSourceSettings(MqttSourceConfig(t)).kcql).toArray
+ configs.asScala.map(t => MqttSourceSettings(MqttSourceConfig(t.asScala.toMap)).kcql).toArray
}
diff --git a/kafka-connect-opensearch/src/fun/resources/keystore.jks b/kafka-connect-opensearch/src/fun/resources/keystore.jks
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/kafka-connect-opensearch/src/fun/resources/truststore.jceks b/kafka-connect-opensearch/src/fun/resources/truststore.jceks
new file mode 100644
index 0000000000..9db5b5f077
Binary files /dev/null and b/kafka-connect-opensearch/src/fun/resources/truststore.jceks differ
diff --git a/kafka-connect-opensearch/src/fun/resources/truststore.jks b/kafka-connect-opensearch/src/fun/resources/truststore.jks
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchSslTest.scala b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchSslTest.scala
new file mode 100644
index 0000000000..553b00f457
--- /dev/null
+++ b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchSslTest.scala
@@ -0,0 +1,49 @@
+package io.lenses.streamreactor.connect
+
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConfigValue
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
+import _root_.io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
+import cats.effect.testing.scalatest.AsyncIOSpec
+import org.apache.kafka.common.config.SslConfigs
+import org.scalatest.flatspec.AsyncFlatSpecLike
+import org.scalatest.matchers.should.Matchers
+
+class OpenSearchSslTest
+ extends OpenSearchTestBase("open-ssl")
+ with AsyncFlatSpecLike
+ with AsyncIOSpec
+ with StreamReactorContainerPerSuite
+ with Matchers {
+
+ behavior of "OpenSearch connector with SSL"
+
+ it should "sink records with ssl enabled" ignore {
+
+ runTest(
+ "https",
+ ConnectorConfiguration(
+ "opensearch-sink-ssl",
+ Map(
+ "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.opensearch.OpenSearchSinkConnector"),
+ "tasks.max" -> ConfigValue(1),
+ "topics" -> ConfigValue("orders"),
+ "connect.opensearch.use.http.username" -> ConfigValue("admin"),
+ "connect.opensearch.use.http.password" -> ConfigValue("admin"),
+ "connect.opensearch.protocol" -> ConfigValue("https"),
+ "connect.opensearch.hosts" -> ConfigValue(container.setup.key),
+ "connect.opensearch.port" -> ConfigValue(Integer.valueOf(container.port)),
+ "connect.opensearch.cluster.name" -> ConfigValue(container.setup.key),
+ "connect.opensearch.kcql" -> ConfigValue("INSERT INTO orders SELECT * FROM orders AUTOCREATE"),
+ "connect.progress.enabled" -> ConfigValue(true),
+ SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG -> ConfigValue("JKS"),
+ SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG -> ConfigValue("/security/truststore.jks"),
+ SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG -> ConfigValue("changeIt"),
+ SslConfigs.SSL_KEYSTORE_TYPE_CONFIG -> ConfigValue("JKS"),
+ SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG -> ConfigValue("/security/keystore.jks"),
+ SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG -> ConfigValue("changeIt"),
+ ),
+ ),
+ )
+ }
+
+}
diff --git a/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTest.scala b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTest.scala
new file mode 100644
index 0000000000..d5701e7c41
--- /dev/null
+++ b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTest.scala
@@ -0,0 +1,39 @@
+package io.lenses.streamreactor.connect
+
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConfigValue
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
+import _root_.io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
+import cats.effect.testing.scalatest.AsyncIOSpec
+import org.scalatest.flatspec.AsyncFlatSpecLike
+import org.scalatest.matchers.should.Matchers
+
+class OpenSearchTest
+ extends OpenSearchTestBase("open")
+ with AsyncFlatSpecLike
+ with AsyncIOSpec
+ with StreamReactorContainerPerSuite
+ with Matchers {
+
+ behavior of "OpenSearch connector"
+
+ it should "sink records" in {
+ runTest(
+ "http",
+ ConnectorConfiguration(
+ "opensearch-sink",
+ Map(
+ "connector.class" -> ConfigValue("io.lenses.streamreactor.connect.opensearch.OpenSearchSinkConnector"),
+ "tasks.max" -> ConfigValue(1),
+ "topics" -> ConfigValue("orders"),
+ "connect.opensearch.protocol" -> ConfigValue("http"),
+ "connect.opensearch.hosts" -> ConfigValue(container.setup.key),
+ "connect.opensearch.port" -> ConfigValue(Integer.valueOf(container.port)),
+ "connect.opensearch.cluster.name" -> ConfigValue(container.setup.key),
+ "connect.opensearch.kcql" -> ConfigValue("INSERT INTO orders SELECT * FROM orders AUTOCREATE"),
+ "connect.progress.enabled" -> ConfigValue(true),
+ ),
+ ),
+ )
+ }
+
+}
diff --git a/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTestBase.scala b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTestBase.scala
new file mode 100644
index 0000000000..ca35679475
--- /dev/null
+++ b/kafka-connect-opensearch/src/fun/scala/io/lenses/streamreactor/connect/OpenSearchTestBase.scala
@@ -0,0 +1,110 @@
+package io.lenses.streamreactor.connect
+
+import _root_.io.confluent.kafka.serializers.KafkaJsonSerializer
+import _root_.io.lenses.streamreactor.connect.model.Order
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.ConnectorConfiguration
+import _root_.io.lenses.streamreactor.connect.testcontainers.connect.KafkaConnectClient.createConnector
+import _root_.io.lenses.streamreactor.connect.testcontainers.scalatest.StreamReactorContainerPerSuite
+import _root_.io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer
+import _root_.io.lenses.streamreactor.connect.testcontainers.SchemaRegistryContainer
+import cats.effect.IO
+import cats.effect.testing.scalatest.AsyncIOSpec
+import cats.implicits.catsSyntaxOptionId
+import com.jayway.jsonpath.JsonPath
+import org.apache.kafka.clients.producer.ProducerRecord
+import org.apache.kafka.common.serialization.StringSerializer
+import org.scalatest.Assertion
+import org.scalatest.flatspec.AsyncFlatSpecLike
+import org.scalatest.matchers.should.Matchers
+
+import java.net.URI
+import java.net.http.HttpResponse.BodyHandlers
+import java.net.http.HttpClient
+import java.net.http.HttpRequest
+import java.security.SecureRandom
+import java.security.cert.X509Certificate
+import javax.net.ssl.SSLContext
+import javax.net.ssl.X509TrustManager
+import scala.concurrent.Future
+
+abstract class OpenSearchTestBase(containerKey: String)
+ extends AsyncFlatSpecLike
+ with AsyncIOSpec
+ with StreamReactorContainerPerSuite
+ with Matchers {
+
+ override val schemaRegistryContainer: Option[SchemaRegistryContainer] = None
+
+ override def connectorModule: String = "opensearch"
+
+ override def useKeyStore: Boolean = true
+
+ val container: ElasticsearchContainer = ElasticsearchContainer(containerKey).withNetwork(network)
+
+ override val commonName: Option[String] = container.setup.key.some
+
+ override def beforeAll(): Unit = {
+ copyBinds(container.container, "/usr/share/opensearch/config/security/")
+ container.start()
+
+ super.beforeAll()
+ }
+
+ override def afterAll(): Unit = {
+ super.afterAll()
+ container.stop()
+ }
+
+ protected def runTest(protocol: String, configuration: ConnectorConfiguration): Future[Assertion] = {
+ val resources = for {
+ producer <- createProducer[String, Order](classOf[StringSerializer], classOf[KafkaJsonSerializer[Order]])
+ connector <- createConnector(configuration, 60L)
+ } yield (producer, connector)
+
+ resources.use {
+ case (producer, _) =>
+ IO {
+ // Write records to topic
+ val order = Order(1, "OP-DAX-P-20150201-95.7", 94.2, 100)
+ producer.send(new ProducerRecord[String, Order]("orders", order)).get()
+ producer.flush()
+
+ val client = HttpClient.newBuilder().sslContext(createTrustAllCertsSslContext).build()
+ val request = HttpRequest.newBuilder()
+ .GET().uri(
+ new URI(
+ s"$protocol://${container.hostNetwork.httpHostAddress}/orders/_search/?q=OP-DAX-P-20150201",
+ ),
+ ).build()
+
+ eventually {
+ val response = client.send(request, BodyHandlers.ofString())
+ val body = response.body
+ assert(JsonPath.read[Int](body, "$.hits.total.value") == 1)
+ }
+
+ client.send(request, BodyHandlers.ofString())
+ }.asserting {
+ response =>
+ val body = response.body
+ JsonPath.read[Int](body, "$.hits.hits[0]._source.id") should be(1)
+ JsonPath.read[String](body, "$.hits.hits[0]._source.product") should be("OP-DAX-P-20150201-95.7")
+ JsonPath.read[Double](body, "$.hits.hits[0]._source.price") should be(94.2)
+ JsonPath.read[Int](body, "$.hits.hits[0]._source.qty") should be(100)
+ }
+ }
+ }
+
+ private def createTrustAllCertsSslContext = {
+ val trustAllCerts = new X509TrustManager {
+ override def checkClientTrusted(chain: Array[X509Certificate], authType: String): Unit = ()
+
+ override def checkServerTrusted(chain: Array[X509Certificate], authType: String): Unit = ()
+
+ override def getAcceptedIssuers: Array[X509Certificate] = null
+ }
+ val sslContext = SSLContext.getInstance("TLS")
+ sslContext.init(null, Array(trustAllCerts), new SecureRandom())
+ sslContext
+ }
+}
diff --git a/kafka-connect-opensearch/src/main/resources/opensearch-ascii.txt b/kafka-connect-opensearch/src/main/resources/opensearch-ascii.txt
new file mode 100644
index 0000000000..73b4edfc49
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/resources/opensearch-ascii.txt
@@ -0,0 +1,24 @@
+
+ ████████▀▀▀▀▀███████████████████████████████████████████████████████████████████
+ █████▀ ▀████████████████████████████████████████████████████████████████
+ ███▀ ▄█████▄ ▀██████████████████████████████████████████████████████████████
+ ███ ▄███████▄ ██████ █████▌ █▌ ████ ███ ▄▄ ██ ███ ▄▄ ███
+ ███ █████████ ██████ █████▌ ██████▌ ▀██ ██ ██████ ██████ ███████
+ ███ ▀███████▀ ██████ █████▌ ██▌ █▄ █ ███▄▄ ██ ███▄▄ ███
+ ████▄ ▄███████ █████▌ ██████▌ ███ ███████ █ ███████████ ██
+ █████████ ████████████ ▌ █▌ ████▄ ██▄ ▄██ █▄ ▄███
+ █████████ ████████████████████████████████████████████████████████████████████
+ █████████ ▄████████████████████████████████████████████████████████████████████
+ ████████████████████████████████████████████████████████████████████████████████
+ ________ _________ .__
+ \_____ \ ______ ____ ____ / _____/ ____ _____ _______ ____ | |__
+ / | \\____ \_/ __ \ / \ \_____ \_/ __ \\__ \\_ __ \_/ ___\| | \
+ / | \ |_> > ___/| | \/ \ ___/ / __ \| | \/\ \___| Y \
+ \_______ / __/ \___ >___| /_______ /\___ >____ /__| \___ >___| /
+ \/|__| \/ \/ \/ \/ \/ \/ \/
+ _________.__ __
+ / _____/|__| ____ | | __
+ \_____ \ | |/ \| |/ /
+ / \| | | \ <
+ /_______ /|__|___| /__|_ \
+ \/ \/ \/
\ No newline at end of file
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkConnector.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkConnector.scala
new file mode 100644
index 0000000000..a206998729
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkConnector.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkConnector
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchConfigDef
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+
+class OpenSearchSinkConnector
+ extends ElasticSinkConnector[
+ OpenSearchSettings,
+ OpenSearchConfigDef,
+ OpenSearchSinkTask,
+ ](classOf[OpenSearchSinkTask], new OpenSearchConfigDef) {}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkTask.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkTask.scala
new file mode 100644
index 0000000000..087d3a7238
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/OpenSearchSinkTask.scala
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch
+
+import io.lenses.streamreactor.connect.elastic.common.ElasticSinkTask
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchConfigDef
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettingsReader
+import io.lenses.streamreactor.connect.opensearch.writers.OpenSearchClientCreator
+
+class OpenSearchSinkTask
+ extends ElasticSinkTask[OpenSearchSettings, OpenSearchConfigDef](
+ OpenSearchSettingsReader,
+ OpenSearchClientCreator,
+ new OpenSearchConfigDef(),
+ "/opensearch-ascii.txt",
+ ) {}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapper.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapper.scala
new file mode 100644
index 0000000000..029eaf344a
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapper.scala
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.client
+
+import cats.effect.IO
+import com.typesafe.scalalogging.LazyLogging
+import io.lenses.streamreactor.connect.elastic.common.client.ElasticClientWrapper
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.Request
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+import org.opensearch.client.opensearch.{ OpenSearchClient => UnderlyingOpenSearchClient }
+import org.opensearch.client.opensearch._types.Refresh
+import org.opensearch.client.opensearch.core.BulkRequest
+import org.opensearch.client.opensearch.core.bulk.BulkOperation
+import org.opensearch.client.opensearch.core.bulk.IndexOperation
+import org.opensearch.client.opensearch.core.bulk.UpdateOperation
+import org.opensearch.client.opensearch.indices.CreateIndexRequest
+import org.opensearch.client.transport.OpenSearchTransport
+
+import scala.jdk.CollectionConverters.SeqHasAsJava
+
+class OpenSearchClientWrapper(transport: OpenSearchTransport, client: UnderlyingOpenSearchClient)
+ extends ElasticClientWrapper
+ with LazyLogging {
+
+ override def createIndex(indexName: String): IO[Unit] =
+ IO {
+ val createIndexRequest = new CreateIndexRequest.Builder()
+ .index(indexName)
+ .build()
+ client.indices().create(createIndexRequest)
+ } *> IO.unit
+
+ override def close(): IO[Unit] = IO {
+ transport.close()
+ ()
+ }.recover { t: Throwable =>
+ logger.error("Error during OpenSearch client shutdown", t)
+ ()
+ }
+
+ override def execute(reqs: Seq[Request]): IO[Unit] =
+ IO {
+ val bulkOps: List[BulkOperation] = reqs.map {
+ case InsertRequest(index, id, json, pipeline) =>
+ new BulkOperation.Builder().index(
+ new IndexOperation.Builder().index(index).id(id).document(json).pipeline(pipeline).build(),
+ ).build()
+ case UpsertRequest(index, id, json) =>
+ new BulkOperation.Builder().update(
+ new UpdateOperation.Builder().index(index).id(id).document(json).docAsUpsert(true).build(),
+ ).build()
+ }.toList
+
+ val bulkReq = new BulkRequest.Builder().refresh(Refresh.True).operations(bulkOps.asJava).build()
+ client.bulk(bulkReq)
+ ()
+ }
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/AuthMode.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/AuthMode.scala
new file mode 100644
index 0000000000..041d7ca3f6
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/AuthMode.scala
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import enumeratum.Enum
+import enumeratum.EnumEntry
+
+import scala.collection.immutable
+
+object AuthMode extends Enum[AuthMode] {
+
+ override val values: immutable.IndexedSeq[AuthMode] = findValues
+
+ case object Credentials extends AuthMode
+
+ case object Default extends AuthMode
+
+}
+
+sealed trait AuthMode extends EnumEntry
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/CredentialPair.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/CredentialPair.scala
new file mode 100644
index 0000000000..bdddb70b68
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/CredentialPair.scala
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+case class CredentialPair(username: String, password: String)
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchConfigDef.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchConfigDef.scala
new file mode 100644
index 0000000000..b6d8c56d66
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchConfigDef.scala
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import com.datamountaineer.streamreactor.common.config.base.const.TraitConfigConst._
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfigDef
+import org.apache.kafka.common.config.ConfigDef
+import org.apache.kafka.common.config.ConfigDef.Importance
+import org.apache.kafka.common.config.ConfigDef.Type
+
+class OpenSearchConfigDef extends ElasticConfigDef("connect.opensearch") {
+
+ val TRANSPORT = s"$connectorPrefix.type"
+ val TRANSPORT_DOC = "OpenSearch Type (AWS, AWS_SERVERLESS, HTTPS)"
+ val TRANSPORT_DEFAULT = "https"
+
+ val AWS_REGION: String = s"$connectorPrefix.aws.region"
+ val AWS_ACCESS_KEY: String = s"$connectorPrefix.aws.access.key"
+ val AWS_SECRET_KEY: String = s"$connectorPrefix.aws.secret.key"
+ val AUTH_MODE: String = s"$connectorPrefix.aws.auth.mode"
+
+ val PROTOCOL = s"$connectorPrefix.protocol"
+ val PROTOCOL_DOC = "URL protocol (http, https) - required for non-AWS only"
+ val PROTOCOL_DEFAULT = "http"
+
+ val HOSTS = s"$connectorPrefix.$CONNECTION_HOSTS_SUFFIX"
+ val HOSTS_DOC = "List of hostnames for Elastic Search cluster node, not including protocol or port."
+ val HOSTS_DEFAULT = "localhost"
+
+ val ES_PORT = s"$connectorPrefix.$CONNECTION_PORT_SUFFIX"
+ val ES_PORT_DOC = "Port on which Elastic Search node listens on"
+ val ES_PORT_DEFAULT = 9300
+
+ val ES_PREFIX = s"$connectorPrefix.tableprefix"
+ val ES_PREFIX_DOC = "Table prefix (optional)"
+ val ES_PREFIX_DEFAULT = ""
+
+ val ES_CLUSTER_NAME = s"$connectorPrefix.$CLUSTER_NAME_SUFFIX"
+ val ES_CLUSTER_NAME_DOC = "Name of the elastic search cluster, used in local mode for setting the connection"
+ val ES_CLUSTER_NAME_DEFAULT = "elasticsearch"
+
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME = s"$connectorPrefix.use.http.username"
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC = "Username if HTTP Basic Auth required default is null."
+
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD = s"$connectorPrefix.use.http.password"
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT = ""
+ val CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC = "Password if HTTP Basic Auth required default is null."
+
+ override def configDef: ConfigDef = super.configDef
+ .define(
+ TRANSPORT,
+ Type.STRING,
+ TRANSPORT_DEFAULT,
+ Importance.HIGH,
+ TRANSPORT_DOC,
+ )
+ .define(
+ PROTOCOL,
+ Type.STRING,
+ PROTOCOL_DEFAULT,
+ Importance.LOW,
+ PROTOCOL_DOC,
+ "Connection",
+ 1,
+ ConfigDef.Width.MEDIUM,
+ PROTOCOL,
+ )
+ .define(
+ HOSTS,
+ Type.STRING,
+ HOSTS_DEFAULT,
+ Importance.HIGH,
+ HOSTS_DOC,
+ "Connection",
+ 2,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PORT,
+ Type.INT,
+ ES_PORT_DEFAULT,
+ Importance.HIGH,
+ ES_PORT_DOC,
+ "Connection",
+ 3,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_PREFIX,
+ Type.STRING,
+ ES_PREFIX_DEFAULT,
+ Importance.HIGH,
+ ES_PREFIX_DOC,
+ "Connection",
+ 4,
+ ConfigDef.Width.MEDIUM,
+ HOSTS,
+ )
+ .define(
+ ES_CLUSTER_NAME,
+ Type.STRING,
+ ES_CLUSTER_NAME_DEFAULT,
+ Importance.HIGH,
+ ES_CLUSTER_NAME_DOC,
+ "Connection",
+ 5,
+ ConfigDef.Width.MEDIUM,
+ ES_CLUSTER_NAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME_DOC,
+ "Connection",
+ 8,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_USERNAME,
+ )
+ .define(
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ Type.STRING,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DEFAULT,
+ Importance.LOW,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD_DOC,
+ "Connection",
+ 9,
+ ConfigDef.Width.MEDIUM,
+ CLIENT_HTTP_BASIC_AUTH_PASSWORD,
+ )
+ .define(
+ AWS_REGION,
+ Type.STRING,
+ "",
+ Importance.HIGH,
+ "AWS region",
+ )
+ .define(
+ AWS_ACCESS_KEY,
+ Type.PASSWORD,
+ "",
+ Importance.HIGH,
+ "AWS access key",
+ )
+ .define(
+ AWS_SECRET_KEY,
+ Type.PASSWORD,
+ "",
+ Importance.HIGH,
+ "AWS password key",
+ )
+ .define(
+ AUTH_MODE,
+ Type.STRING,
+ "",
+ Importance.HIGH,
+ "Authenticate mode, 'credentials' or 'default'",
+ )
+
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettings.scala
new file mode 100644
index 0000000000..407d18c7f3
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettings.scala
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettings
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettings
+import io.lenses.streamreactor.connect.opensearch.config.connection.OpenSeearchConnectionSettings
+
+case class OpenSearchSettings(
+ elasticCommonSettings: ElasticCommonSettings,
+ connection: OpenSeearchConnectionSettings,
+) extends ElasticSettings {
+ override def common: ElasticCommonSettings = elasticCommonSettings
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettingsReader.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettingsReader.scala
new file mode 100644
index 0000000000..4611695ef5
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchSettingsReader.scala
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import cats.implicits.catsSyntaxEitherId
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticCommonSettingsReader
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticConfig
+import io.lenses.streamreactor.connect.elastic.common.config.ElasticSettingsReader
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchTransport.Aws
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchTransport.AwsServerless
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchTransport.Http
+import io.lenses.streamreactor.connect.opensearch.config.connection.AWSConnectionSettings
+import io.lenses.streamreactor.connect.opensearch.config.connection.RestConnectionSettings
+import io.lenses.streamreactor.connect.security.StoresInfo
+import org.apache.kafka.connect.errors.ConnectException
+
+import scala.util.Try
+
+object OpenSearchSettingsReader extends ElasticSettingsReader[OpenSearchSettings, OpenSearchConfigDef] {
+ override def read(configDef: OpenSearchConfigDef, props: Map[String, String]): Either[Throwable, OpenSearchSettings] =
+ for {
+ config <- Try(ElasticConfig(configDef, configDef.connectorPrefix, props)).toEither
+ commonSettings <- ElasticCommonSettingsReader.read(config.configDef, props)
+ transportType = Option(config.getString(configDef.TRANSPORT)).map(_.trim).filterNot(_.isEmpty).flatMap(
+ OpenSearchTransport.withNameInsensitiveOption,
+ ).getOrElse(OpenSearchTransport.Http)
+ hostNames = config.getString(configDef.HOSTS).split(",").toSeq
+
+ connectionSettings <- transportType match {
+ case Http =>
+ createHttpConnectionSettings(configDef, config, hostNames).asRight
+ case Aws | AwsServerless if hostNames.size == 1 =>
+ createAwsConnectionSettings(configDef, config, transportType, hostNames).asRight
+ case _ => new ConnectException("Multiple hosts not supported for AWS").asLeft
+ }
+
+ } yield {
+ OpenSearchSettings(
+ commonSettings,
+ connectionSettings,
+ )
+ }
+
+ private def createHttpConnectionSettings(
+ configDef: OpenSearchConfigDef,
+ config: ElasticConfig,
+ hostNames: Seq[String],
+ ) = {
+ val credentialPair = for {
+ httpBasicAuthUsername <- Option(config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_USERNAME)).filterNot(
+ _.trim.isEmpty,
+ )
+ httpBasicAuthPassword <- Option(config.getString(configDef.CLIENT_HTTP_BASIC_AUTH_PASSWORD)).filterNot(
+ _.trim.isEmpty,
+ )
+ } yield {
+ CredentialPair(httpBasicAuthUsername, httpBasicAuthPassword)
+ }
+
+ val protocol = config.getString(configDef.PROTOCOL)
+ val port = config.getInt(configDef.ES_PORT)
+ val prefix = Option(config.getString(configDef.ES_PREFIX)).filterNot(_ == "")
+ val storesInfo = StoresInfo(config)
+ RestConnectionSettings(
+ hostNames,
+ protocol,
+ port,
+ prefix,
+ credentialPair,
+ storesInfo,
+ )
+ }
+
+ private def createAwsConnectionSettings(
+ configDef: OpenSearchConfigDef,
+ config: ElasticConfig,
+ transportType: OpenSearchTransport,
+ hostNames: Seq[String],
+ ) =
+ AWSConnectionSettings(
+ hostNames.head,
+ config.getString(configDef.AWS_REGION).trim,
+ Option(config.getString(configDef.AWS_ACCESS_KEY)).map(_.trim),
+ Option(config.getString(configDef.AWS_SECRET_KEY)).map(_.trim),
+ Option(config.getString(configDef.AUTH_MODE)).map(_.trim).flatMap(
+ AuthMode.withNameInsensitiveOption,
+ ).getOrElse(AuthMode.Default),
+ serverless = transportType == OpenSearchTransport.AwsServerless,
+ )
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchTransport.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchTransport.scala
new file mode 100644
index 0000000000..941e680452
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/OpenSearchTransport.scala
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config
+
+import enumeratum.Enum
+import enumeratum.EnumEntry
+
+import scala.collection.immutable
+
+object OpenSearchTransport extends Enum[OpenSearchTransport] {
+
+ override val values: immutable.IndexedSeq[OpenSearchTransport] = findValues
+
+ case object Aws extends OpenSearchTransport
+
+ case object AwsServerless extends OpenSearchTransport
+
+ case object Http extends OpenSearchTransport
+
+}
+
+sealed trait OpenSearchTransport extends EnumEntry
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettings.scala
new file mode 100644
index 0000000000..3c39fe414d
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettings.scala
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import cats.implicits.catsSyntaxEitherId
+import io.lenses.streamreactor.connect.opensearch.config.AuthMode
+import io.lenses.streamreactor.connect.opensearch.config.connection.OpenSeearchConnectionSettings.defaultCredentialsProvider
+import org.apache.kafka.connect.errors.ConnectException
+import org.opensearch.client.transport.OpenSearchTransport
+import org.opensearch.client.transport.aws.AwsSdk2Transport
+import org.opensearch.client.transport.aws.AwsSdk2TransportOptions
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider
+import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
+import software.amazon.awssdk.http.apache.ApacheHttpClient
+import software.amazon.awssdk.regions.Region
+
+import scala.util.Try
+
+case class AWSConnectionSettings(
+ endpoint: String,
+ region: String,
+ accessKey: Option[String],
+ secretKey: Option[String],
+ authMode: AuthMode,
+ serverless: Boolean,
+) extends OpenSeearchConnectionSettings {
+ override def toTransport: Either[Throwable, OpenSearchTransport] =
+ for {
+ creds <- credentialsProvider()
+ tOpts <- Try {
+ val transportOptions = AwsSdk2TransportOptions.builder().setCredentials(creds).build()
+
+ val httpClient = ApacheHttpClient.builder().build()
+ new AwsSdk2Transport(
+ httpClient,
+ endpoint, // OpenSearch endpoint, without https://
+ if (serverless) "aoss" else "es",
+ Region.of(region),
+ transportOptions,
+ )
+ }.toEither
+ } yield tOpts
+
+ private def credentialsProvider(): Either[Throwable, AwsCredentialsProvider] =
+ (authMode, accessKey.zip(secretKey)) match {
+ case (AuthMode.Credentials, Some((access, secret))) =>
+ StaticCredentialsProvider.create(AwsBasicCredentials.create(access, secret)).asRight
+ case (AuthMode.Credentials, None) => new ConnectException("No credentials specified").asLeft
+ case (AuthMode.Default, _) => defaultCredentialsProvider.asRight
+ }
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/OpenSeearchConnectionSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/OpenSeearchConnectionSettings.scala
new file mode 100644
index 0000000000..2cd4fc4f86
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/OpenSeearchConnectionSettings.scala
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import org.opensearch.client.transport.OpenSearchTransport
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider
+import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider
+
+object OpenSeearchConnectionSettings {
+
+ val defaultCredentialsProvider: AwsCredentialsProvider = DefaultCredentialsProvider.create()
+
+}
+
+trait OpenSeearchConnectionSettings {
+ def toTransport: Either[Throwable, OpenSearchTransport]
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/RestConnectionSettings.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/RestConnectionSettings.scala
new file mode 100644
index 0000000000..6d2ceada94
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/config/connection/RestConnectionSettings.scala
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import io.lenses.streamreactor.connect.opensearch.config.CredentialPair
+import io.lenses.streamreactor.connect.security.StoresInfo
+import org.apache.http.HttpHost
+import org.apache.http.auth.AuthScope
+import org.apache.http.auth.UsernamePasswordCredentials
+import org.apache.http.impl.client.BasicCredentialsProvider
+import org.apache.http.impl.nio.client.HttpAsyncClientBuilder
+import org.opensearch.client.RestClient
+import org.opensearch.client.json.jackson.JacksonJsonpMapper
+import org.opensearch.client.transport.OpenSearchTransport
+import org.opensearch.client.transport.rest_client.RestClientTransport
+
+import scala.util.Try
+
+case class RestConnectionSettings(
+ hostnames: Seq[String],
+ protocol: String,
+ port: Int,
+ prefix: Option[String],
+ httpBasicCreds: Option[CredentialPair],
+ storesInfo: StoresInfo,
+) extends OpenSeearchConnectionSettings {
+ override def toTransport: Either[Throwable, OpenSearchTransport] =
+ for {
+ restClient <- Try(createAndConfigureRestClient()).toEither
+ transport = new RestClientTransport(restClient, new JacksonJsonpMapper())
+ } yield transport
+
+ private def hostnameToHttpHost(hostname: String): HttpHost =
+ new HttpHost(prefix.map(hostname +).getOrElse(hostname), port, protocol)
+
+ private def createAndConfigureRestClient(): RestClient = {
+
+ val builder = RestClient.builder(
+ hostnames.map(hostnameToHttpHost): _*,
+ )
+
+ val sslContext = storesInfo.toSslContext
+ val credsProvider = httpBasicCreds.map(creds => createCredsProvider(creds.username, creds.password))
+
+ if (sslContext.nonEmpty || credsProvider.nonEmpty) {
+ builder.setHttpClientConfigCallback {
+ (httpClientBuilder: HttpAsyncClientBuilder) =>
+ credsProvider.foreach {
+ httpClientBuilder.setDefaultCredentialsProvider
+ }
+ sslContext.foreach {
+ httpClientBuilder.setSSLContext
+ }
+ httpClientBuilder
+ }
+ }
+ builder.build()
+ }
+
+ private def createCredsProvider(username: String, password: String) = {
+ val provider = new BasicCredentialsProvider()
+ provider.setCredentials(
+ AuthScope.ANY,
+ new UsernamePasswordCredentials(username, password),
+ )
+ provider
+ }
+}
diff --git a/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreator.scala b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreator.scala
new file mode 100644
index 0000000000..d4ae75c47d
--- /dev/null
+++ b/kafka-connect-opensearch/src/main/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreator.scala
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.writers
+
+import io.lenses.streamreactor.connect.elastic.common.writers.ElasticClientCreator
+import io.lenses.streamreactor.connect.opensearch.client.OpenSearchClientWrapper
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+import org.opensearch.client.opensearch.OpenSearchClient
+
+object OpenSearchClientCreator extends ElasticClientCreator[OpenSearchSettings] {
+
+ /**
+ * Construct a JSONWriter.
+ *
+ * @param config An elasticSinkConfig to extract settings from.
+ * @return An ElasticJsonWriter to write records from Kafka to ElasticSearch.
+ */
+ override def create(config: OpenSearchSettings): Either[Throwable, OpenSearchClientWrapper] =
+ for {
+ transport <- config.connection.toTransport
+ } yield new OpenSearchClientWrapper(transport, new OpenSearchClient(transport))
+
+}
diff --git a/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapperTest.scala b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapperTest.scala
new file mode 100644
index 0000000000..fad1d69cba
--- /dev/null
+++ b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/client/OpenSearchClientWrapperTest.scala
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.client
+
+import cats.effect.testing.scalatest.AsyncIOSpec
+import com.fasterxml.jackson.databind.node.TextNode
+import io.lenses.streamreactor.connect.elastic.common.client.InsertRequest
+import io.lenses.streamreactor.connect.elastic.common.client.UpsertRequest
+import org.mockito.ArgumentMatchers.any
+import org.mockito.ArgumentMatchers.argThat
+import org.mockito.Answers
+import org.mockito.MockitoSugar
+import org.opensearch.client.opensearch.OpenSearchClient
+import org.opensearch.client.opensearch.core.bulk.BulkResponseItem
+import org.opensearch.client.opensearch.core.BulkRequest
+import org.opensearch.client.opensearch.core.BulkResponse
+import org.opensearch.client.opensearch.indices.CreateIndexRequest
+import org.opensearch.client.opensearch.indices.CreateIndexResponse
+import org.opensearch.client.transport.OpenSearchTransport
+import org.scalatest.funsuite.AsyncFunSuite
+import org.scalatest.matchers.should.Matchers
+
+import scala.jdk.CollectionConverters.SeqHasAsJava
+
+class OpenSearchClientWrapperTest extends AsyncFunSuite with AsyncIOSpec with Matchers with MockitoSugar {
+
+ test("createIndex should create an index successfully") {
+ val mockClient = mock[OpenSearchClient](Answers.RETURNS_DEEP_STUBS)
+ val mockTransport = mock[OpenSearchTransport]
+
+ val clientWrapper = new OpenSearchClientWrapper(mockTransport, mockClient)
+ val indexName = "test_index"
+ when(mockClient.indices().create(any[CreateIndexRequest])).thenReturn(
+ new CreateIndexResponse.Builder().index(indexName).shardsAcknowledged(true).build(),
+ )
+
+ clientWrapper.createIndex(indexName).asserting {
+ result =>
+ verify(mockClient.indices()).create(argThat { request: CreateIndexRequest =>
+ request.index() == indexName
+ })
+ result shouldBe ()
+ }
+ }
+
+ test("close should close the client successfully") {
+ val mockClient = mock[OpenSearchClient]
+ val mockTransport = mock[OpenSearchTransport]
+
+ val clientWrapper = new OpenSearchClientWrapper(mockTransport, mockClient)
+ clientWrapper.close().asserting {
+ result =>
+ verify(mockTransport).close()
+ result shouldBe ()
+ }
+
+ }
+
+ test("execute should execute bulk requests successfully") {
+ val mockClient = mock[OpenSearchClient]
+ val mockTransport = mock[OpenSearchTransport]
+
+ val clientWrapper = new OpenSearchClientWrapper(mockTransport, mockClient)
+
+ val requests = Seq(
+ InsertRequest("index1", "id1", new TextNode("no"), "pipe"),
+ UpsertRequest("index2", "id2", new TextNode("no")),
+ )
+
+ when(mockClient.bulk(any[BulkRequest])).thenReturn(
+ new BulkResponse.Builder().errors(false).items(List[BulkResponseItem]().asJava).took(200L).build(),
+ )
+
+ clientWrapper.execute(requests).asserting {
+ result =>
+ verify(mockClient).bulk(any[BulkRequest])
+ result shouldBe ()
+ }
+
+ }
+}
diff --git a/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettingsTest.scala b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettingsTest.scala
new file mode 100644
index 0000000000..d3aef41bb9
--- /dev/null
+++ b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/config/connection/AWSConnectionSettingsTest.scala
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.config.connection
+
+import io.lenses.streamreactor.connect.opensearch.config.AuthMode
+import org.apache.kafka.connect.errors.ConnectException
+import org.mockito.MockitoSugar
+import org.opensearch.client.transport.aws.AwsSdk2Transport
+import org.scalatest.EitherValues
+import org.scalatest.funsuite.AnyFunSuite
+import org.scalatest.matchers.should.Matchers
+import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
+import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider
+
+class AWSConnectionSettingsTest extends AnyFunSuite with Matchers with MockitoSugar with EitherValues {
+
+ test("toTransport should return a valid OpenSearchTransport when using credentials") {
+ val endpoint = "test-endpoint"
+ val region = "us-east-1"
+ val accessKey = Some("access-key")
+ val secretKey = Some("secret-key")
+ val authMode = AuthMode.Credentials
+ val serverless = false
+
+ val settings = AWSConnectionSettings(endpoint, region, accessKey, secretKey, authMode, serverless)
+
+ settings.toTransport.value.asInstanceOf[AwsSdk2Transport]
+
+ }
+
+ test("toTransport should return an error when using credentials but they are not provided") {
+ val endpoint = "test-endpoint"
+ val region = "us-east-1"
+ val accessKey = None
+ val secretKey = None
+ val authMode = AuthMode.Credentials
+ val serverless = false
+
+ val settings = AWSConnectionSettings(endpoint, region, accessKey, secretKey, authMode, serverless)
+
+ val result = settings.toTransport
+
+ result shouldBe a[Left[_, _]]
+ result.left.value shouldBe a[ConnectException]
+ }
+
+ test("toTransport should return an error when an exception occurs during transport creation") {
+ val endpoint = "test-endpoint"
+ val region = ""
+ val accessKey = Some("access-key")
+ val secretKey = Some("secret-key")
+ val authMode = AuthMode.Credentials
+ val serverless = false
+
+ val settings = AWSConnectionSettings(endpoint, region, accessKey, secretKey, authMode, serverless)
+
+ val mockCredentialsProvider = mock[AwsCredentialsProvider]
+
+ when(mockCredentialsProvider.resolveCredentials()).thenReturn(AwsBasicCredentials.create("access-key",
+ "secret-key",
+ ))
+
+ val result = settings.toTransport
+
+ result shouldBe a[Left[_, _]]
+ result.left.value shouldBe a[IllegalArgumentException]
+ }
+}
diff --git a/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreatorTest.scala b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreatorTest.scala
new file mode 100644
index 0000000000..116af41568
--- /dev/null
+++ b/kafka-connect-opensearch/src/test/scala/io/lenses/streamreactor/connect/opensearch/writers/OpenSearchClientCreatorTest.scala
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2017-2023 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.opensearch.writers
+
+import io.lenses.streamreactor.connect.opensearch.client.OpenSearchClientWrapper
+import io.lenses.streamreactor.connect.opensearch.config.OpenSearchSettings
+import org.mockito.Answers
+import org.mockito.MockitoSugar
+import org.opensearch.client.transport.OpenSearchTransport
+import org.scalatest.EitherValues
+import org.scalatest.funsuite.AnyFunSuite
+import org.scalatest.matchers.should.Matchers
+
+class OpenSearchClientCreatorTest extends AnyFunSuite with Matchers with MockitoSugar with EitherValues {
+
+ test("create should return an OpenSearchClientWrapper with a valid OpenSearchClient") {
+
+ val mockSettings = mock[OpenSearchSettings](Answers.RETURNS_DEEP_STUBS)
+ val mockTransport = mock[OpenSearchTransport]
+ when(mockSettings.connection.toTransport).thenReturn(Right(mockTransport))
+
+ OpenSearchClientCreator.create(mockSettings).value should be(a[OpenSearchClientWrapper])
+ verify(mockSettings.connection).toTransport
+ }
+
+ test("create should return an error if creating a transport fails") {
+ val ex = new RuntimeException("Transport error")
+ val mockSettings = mock[OpenSearchSettings](Answers.RETURNS_DEEP_STUBS)
+ when(mockSettings.connection.toTransport).thenReturn(Left(ex))
+
+ OpenSearchClientCreator.create(mockSettings).left.value should be(ex)
+ verify(mockSettings.connection).toTransport
+ }
+}
diff --git a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarConfig.scala b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarConfig.scala
index 6b945032fc..4d6f3ba964 100644
--- a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarConfig.scala
+++ b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarConfig.scala
@@ -23,8 +23,6 @@ import com.datamountaineer.streamreactor.common.config.base.traits.NumberRetries
import com.datamountaineer.streamreactor.common.config.base.traits.SSLSettings
import com.datamountaineer.streamreactor.common.config.base.traits.UserSettings
-import java.util
-
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
import org.apache.kafka.common.config.ConfigDef.Type
@@ -150,7 +148,7 @@ object PulsarSourceConfig {
}
-case class PulsarSourceConfig(props: util.Map[String, String])
+case class PulsarSourceConfig(props: Map[String, String])
extends BaseConfig(PulsarConfigConstants.CONNECTOR_PREFIX, PulsarSourceConfig.config, props)
with PulsarConfigBase
@@ -191,7 +189,7 @@ object PulsarSinkConfig {
)
}
-case class PulsarSinkConfig(props: util.Map[String, String])
+case class PulsarSinkConfig(props: Map[String, String])
extends BaseConfig(PulsarConfigConstants.CONNECTOR_PREFIX, PulsarSinkConfig.config, props)
with PulsarConfigBase
diff --git a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkConnector.scala b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkConnector.scala
index 948435cfa3..28a40a70c2 100644
--- a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkConnector.scala
+++ b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.pulsar.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
@@ -36,7 +37,7 @@ class PulsarSinkConnector extends SinkConnector with StrictLogging {
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Pulsar sink connector.")
- Helpers.checkInputTopics(PulsarConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(PulsarConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
configProps = Some(props)
}
diff --git a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkTask.scala b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkTask.scala
index c86fc21a89..f461789eaf 100644
--- a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkTask.scala
+++ b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarSinkTask.scala
@@ -31,6 +31,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import java.util.UUID
import scala.jdk.CollectionConverters.IterableHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
/**
* Created by andrew@datamountaineer.com on 27/08/2017.
@@ -50,7 +51,7 @@ class PulsarSinkTask extends SinkTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
PulsarSinkConfig.config.parse(conf)
- val sinkConfig = new PulsarSinkConfig(conf)
+ val sinkConfig = new PulsarSinkConfig(conf.asScala.toMap)
enableProgress = sinkConfig.getBoolean(PulsarConfigConstants.PROGRESS_COUNTER_ENABLED)
settings = Some(PulsarSinkSettings(sinkConfig))
diff --git a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceConnector.scala b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceConnector.scala
index c7d4751a59..8c6dd45b04 100644
--- a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceConnector.scala
+++ b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceConnector.scala
@@ -25,6 +25,7 @@ import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.source.SourceConnector
+import scala.jdk.CollectionConverters.MapHasAsScala
import scala.jdk.CollectionConverters.SeqHasAsJava
class PulsarSourceConnector extends SourceConnector with StrictLogging {
@@ -46,7 +47,7 @@ class PulsarSourceConnector extends SourceConnector with StrictLogging {
override def taskConfigs(maxTasks: Int): util.List[util.Map[String, String]] = {
logger.info(s"Setting task configurations for $maxTasks workers.")
// call settings here makes sure we don't have an exclusive subscription over more than one worker
- PulsarSourceSettings(PulsarSourceConfig(configProps), maxTasks)
+ PulsarSourceSettings(PulsarSourceConfig(configProps.asScala.toMap), maxTasks)
// distribute all kcqls to all workers and let the Pulsar subscription type handle the routing
(1 to maxTasks).map(_ => configProps).toList.asJava
}
diff --git a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTask.scala b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTask.scala
index b3bfce98b0..d55b8840f3 100644
--- a/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTask.scala
+++ b/kafka-connect-pulsar/src/main/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTask.scala
@@ -49,11 +49,11 @@ class PulsarSourceTask extends SourceTask with StrictLogging {
override def start(props: util.Map[String, String]): Unit = {
printAsciiHeader(manifest, "/pulsar-source-ascii.txt")
- val conf = if (context.configs().isEmpty) props else context.configs()
+ val conf = (if (context.configs().isEmpty) props else context.configs()).asScala.toMap
implicit val settings = PulsarSourceSettings(PulsarSourceConfig(conf), props.getOrDefault("tasks.max", "1").toInt)
- val name = conf.getOrDefault("name", s"kafka-connect-pulsar-source-${UUID.randomUUID().toString}")
+ val name = conf.getOrElse("name", s"kafka-connect-pulsar-source-${UUID.randomUUID().toString}")
val convertersMap = buildConvertersMap(conf, settings)
val messageConverter = PulsarMessageConverter(
@@ -81,7 +81,7 @@ class PulsarSourceTask extends SourceTask with StrictLogging {
enableProgress = settings.enableProgress
}
- def buildConvertersMap(props: util.Map[String, String], settings: PulsarSourceSettings): Map[String, Converter] =
+ def buildConvertersMap(props: Map[String, String], settings: PulsarSourceSettings): Map[String, Converter] =
settings.sourcesToConverters.map {
case (topic, clazz) =>
logger.info(s"Creating converter instance for $clazz")
@@ -92,7 +92,7 @@ class PulsarSourceTask extends SourceTask with StrictLogging {
s"Invalid ${PulsarConfigConstants.KCQL_CONFIG} is invalid. $clazz should have an empty ctor!",
)
}
- converter.initialize(props.asScala.toMap)
+ converter.initialize(props)
topic -> converter
}
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ConsumerConfigFactoryTest.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ConsumerConfigFactoryTest.scala
index fe438f4c57..470d6069dc 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ConsumerConfigFactoryTest.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ConsumerConfigFactoryTest.scala
@@ -24,8 +24,6 @@ import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
/**
* Created by andrew@datamountaineer.com on 23/01/2018.
* stream-reactor
@@ -52,7 +50,7 @@ class ConsumerConfigFactoryTest extends AnyWordSpec with Matchers with MockitoSu
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSourceSettings(config, 1)
@@ -72,7 +70,7 @@ class ConsumerConfigFactoryTest extends AnyWordSpec with Matchers with MockitoSu
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = failOver",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSourceSettings(config, 2)
@@ -93,7 +91,7 @@ class ConsumerConfigFactoryTest extends AnyWordSpec with Matchers with MockitoSu
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = Exclusive",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSourceSettings(config, 1)
@@ -112,7 +110,7 @@ class ConsumerConfigFactoryTest extends AnyWordSpec with Matchers with MockitoSu
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10 WITHSUBSCRIPTION = shared",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSourceSettings(config, 2)
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ProducerConfigFactoryTest.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ProducerConfigFactoryTest.scala
index ef205dfada..d77ff535ef 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ProducerConfigFactoryTest.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/ProducerConfigFactoryTest.scala
@@ -29,7 +29,6 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.util.concurrent.TimeUnit
-import scala.jdk.CollectionConverters.MapHasAsJava
/**
* Created by andrew@datamountaineer.com on 23/01/2018.
@@ -55,7 +54,7 @@ class ProducerConfigFactoryTest extends AnyWordSpec with Matchers with MockitoSu
Map(
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -74,7 +73,7 @@ class ProducerConfigFactoryTest extends AnyWordSpec with Matchers with MockitoSu
Map(
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic WITHPARTITIONER = CustomPartition",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -90,7 +89,7 @@ class ProducerConfigFactoryTest extends AnyWordSpec with Matchers with MockitoSu
Map(
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = ROUNDROBINPARTITION",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSinkSettingsTest.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSinkSettingsTest.scala
index 760dcebc33..c1e39dacd3 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSinkSettingsTest.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSinkSettingsTest.scala
@@ -18,7 +18,6 @@ package com.datamountaineer.streamreactor.connect.pulsar.config
import com.datamountaineer.kcql.CompressionType
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
/**
* Created by andrew@datamountaineer.com on 22/01/2018.
@@ -35,7 +34,7 @@ class PulsarSinkSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -49,7 +48,7 @@ class PulsarSinkSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHPARTITIONER = singlepartition",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -64,7 +63,7 @@ class PulsarSinkSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHPARTITIONER = RoundRobinPartition",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -79,7 +78,7 @@ class PulsarSinkSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHPARTITIONER = CustomPartition",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -94,7 +93,7 @@ class PulsarSinkSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $topic SELECT * FROM kafka_topic WITHCOMPRESSION = LZ4",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSourceSettingsTest.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSourceSettingsTest.scala
index 7f28f660d1..67bf38f62b 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSourceSettingsTest.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/config/PulsarSourceSettingsTest.scala
@@ -21,8 +21,6 @@ import org.apache.kafka.common.config.ConfigException
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
"PulsarSourceSetting" should {
@@ -35,7 +33,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM $pulsarTopic WITHCONVERTER=`${classOf[AvroConverter].getCanonicalName}`",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSourceSettings(config, 1)
@@ -52,7 +50,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> "INSERT INTO kTopic SELECT * FROM pulsarSource",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val settings = PulsarSourceSettings(config, 1)
settings.sourcesToConverters shouldBe Map("pulsarSource" -> classOf[BytesConverter].getCanonicalName)
@@ -65,7 +63,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
}
}
@@ -77,7 +75,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> "INSERT INTO kTopic SELECT * FROM pulsarSource",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
PulsarSourceSettings(config, 1)
}
@@ -90,7 +88,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM pulsarSource WITHCONVERTER=`com.non.existance.SomeConverter`",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
}
}
@@ -102,7 +100,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM pulsarSource WITHCONVERTER=`${classOf[AvroConverter].getCanonicalName}`",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
}
}
@@ -114,7 +112,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM pulsarSource WITHCONVERTER=`${classOf[AvroConverter].getCanonicalName}`",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
}
}
@@ -127,7 +125,7 @@ class PulsarSourceSettingsTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kTopic SELECT * FROM $pulsarTopic WITHSUBSCRIPTION = exclusive",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava,
+ ),
)
val _ = PulsarSourceSettings(config, 2)
}
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarWriterTest.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarWriterTest.scala
index dd1e629081..0e65cddeed 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarWriterTest.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/PulsarWriterTest.scala
@@ -30,8 +30,6 @@ import org.mockito.MockitoSugar
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
/**
* Created by andrew@datamountaineer.com on 23/01/2018.
* stream-reactor
@@ -68,7 +66,7 @@ class PulsarWriterTest extends AnyWordSpec with MockitoSugar with Matchers {
Map(
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000",
- ).asJava,
+ ),
)
val schema = getSchema
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/TestPulsarMessageBuilder.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/TestPulsarMessageBuilder.scala
index ae4580f9ed..354e72faf1 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/TestPulsarMessageBuilder.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/sink/TestPulsarMessageBuilder.scala
@@ -28,8 +28,6 @@ import org.scalatest.OptionValues
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class TestPulsarMessageBuilder
extends AnyWordSpec
with Matchers
@@ -67,7 +65,7 @@ class TestPulsarMessageBuilder
Map(
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHPARTITIONER = SinglePartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -87,7 +85,7 @@ class TestPulsarMessageBuilder
Map(
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic WITHKEY(string) WITHPARTITIONER = CustomPartition WITHCOMPRESSION = ZLIB WITHDELAY = 1000",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
@@ -112,7 +110,7 @@ class TestPulsarMessageBuilder
Map(
PulsarConfigConstants.HOSTS_CONFIG -> "pulsar://localhost:6650",
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO $pulsarTopic SELECT * FROM kafka_topic BATCH = 10 WITHKEY(string) WITHPARTITIONER = RoundRobinPartition WITHDELAY = 1000",
- ).asJava,
+ ),
)
val settings = PulsarSinkSettings(config)
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarMessageConverterTest.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarMessageConverterTest.scala
index 005f05e4e2..cd91cd3442 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarMessageConverterTest.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarMessageConverterTest.scala
@@ -28,7 +28,6 @@ import org.scalatest.wordspec.AnyWordSpec
import java.util
import scala.annotation.nowarn
-import scala.jdk.CollectionConverters.MapHasAsJava
/**
* Created by andrew@datamountaineer.com on 24/01/2018.
@@ -47,7 +46,7 @@ class PulsarMessageConverterTest extends AnyWordSpec with Matchers with Converte
PulsarConfigConstants.KCQL_CONFIG -> s"INSERT INTO kafka_topic SELECT * FROM $pulsarTopic BATCH = 10",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava
+ )
val config = PulsarSourceConfig(props)
val settings = PulsarSourceSettings(config, 1)
diff --git a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTaskTest.scala b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTaskTest.scala
index f26eb260e6..2a6ef29abd 100644
--- a/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTaskTest.scala
+++ b/kafka-connect-pulsar/src/test/scala/com/datamountaineer/streamreactor/connect/pulsar/source/PulsarSourceTaskTest.scala
@@ -22,7 +22,6 @@ import com.datamountaineer.streamreactor.connect.pulsar.config.PulsarSourceConfi
import com.datamountaineer.streamreactor.connect.pulsar.config.PulsarSourceSettings
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
/**
* Created by andrew@datamountaineer.com on 24/01/2018.
@@ -41,7 +40,7 @@ class PulsarSourceTaskTest extends AnyWordSpec with Matchers {
PulsarConfigConstants.KCQL_CONFIG -> s"$kcql",
PulsarConfigConstants.THROW_ON_CONVERT_ERRORS_CONFIG -> "true",
PulsarConfigConstants.POLLING_TIMEOUT_CONFIG -> "500",
- ).asJava
+ )
val config = PulsarSourceConfig(props)
val settings = PulsarSourceSettings(config, 1)
diff --git a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisCacheTest.scala b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisCacheTest.scala
index a9a43c8e33..5071e29a4d 100644
--- a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisCacheTest.scala
+++ b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisCacheTest.scala
@@ -32,7 +32,6 @@ import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.Jedis
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.jdk.CollectionConverters.MapHasAsScala
class RedisCacheTest
@@ -63,7 +62,7 @@ class RedisCacheTest
}
"write Kafka records to Redis using CACHE mode with JSON no schema" in new BasePropsContext {
val QUERY_ALL = s"SELECT * FROM $TOPIC PK firstName, child.firstName"
- val props = (baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)).asJava
+ val props = baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
@@ -96,7 +95,7 @@ class RedisCacheTest
"write Kafka records to Redis using CACHE mode" in new BasePropsContext {
val QUERY_ALL = s"SELECT * FROM $TOPIC PK firstName, child.firstName"
- val props = (baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)).asJava
+ val props = baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
@@ -151,7 +150,7 @@ class RedisCacheTest
"write Kafka records to Redis using CACHE mode and PK field is not in the selected fields" in new BasePropsContext {
val QUERY_ALL = s"SELECT age FROM $TOPIC PK firstName"
- val props = (baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)).asJava
+ val props = baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
@@ -186,7 +185,7 @@ class RedisCacheTest
"write Kafka records to Redis using CACHE mode with explicit KEY (using INSERT)" in new BasePropsContext {
val TOPIC = "topic2"
val KCQL = s"INSERT INTO KEY_PREFIX_ SELECT * FROM $TOPIC PK firstName"
- val props = (baseProps + (RedisConfigConstants.KCQL_CONFIG -> KCQL)).asJava
+ val props = baseProps + (RedisConfigConstants.KCQL_CONFIG -> KCQL)
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
@@ -258,7 +257,7 @@ class RedisCacheTest
"write Kafka records to Redis using CACHE mode and PK has default delimiter" in new BasePropsContext {
- val props = base_Props.asJava
+ val props = base_Props
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
@@ -277,7 +276,7 @@ class RedisCacheTest
"write Kafka records to Redis using CACHE mode and PK has custom delimiter" in new BasePropsContext {
val delimiter = "-"
- val props = (base_Props + (RedisConfigConstants.REDIS_PK_DELIMITER -> delimiter)).asJava
+ val props = base_Props + (RedisConfigConstants.REDIS_PK_DELIMITER -> delimiter)
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
@@ -294,7 +293,7 @@ class RedisCacheTest
"write Kafka records to Redis using CACHE mode and PK has custom delimiter but not set" in new BasePropsContext {
val delimiter = "$"
- val props = base_Props.asJava
+ val props = base_Props
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
diff --git a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisGeoAddTest.scala b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisGeoAddTest.scala
index 64ff5a2fa5..883432f84e 100644
--- a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisGeoAddTest.scala
+++ b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisGeoAddTest.scala
@@ -17,7 +17,6 @@ import redis.clients.jedis.Jedis
import redis.clients.jedis.args.GeoUnit
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
class RedisGeoAddTest extends AnyWordSpec with Matchers with MockitoSugar with ForAllTestContainer {
@@ -37,7 +36,7 @@ class RedisGeoAddTest extends AnyWordSpec with Matchers with MockitoSugar with F
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> container.mappedPort(6379).toString,
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val connectionInfo = new RedisConnectionInfo("localhost", container.mappedPort(6379), None)
@@ -81,7 +80,7 @@ class RedisGeoAddTest extends AnyWordSpec with Matchers with MockitoSugar with F
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> container.mappedPort(6379).toString,
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val connectionInfo = new RedisConnectionInfo("localhost", container.mappedPort(6379), None)
@@ -127,7 +126,7 @@ class RedisGeoAddTest extends AnyWordSpec with Matchers with MockitoSugar with F
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> container.mappedPort(6379).toString,
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val connectionInfo = new RedisConnectionInfo("localhost", container.mappedPort(6379), None)
diff --git a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisInsertSortedSetTest.scala b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisInsertSortedSetTest.scala
index 29c7463112..c2c02bfb10 100644
--- a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisInsertSortedSetTest.scala
+++ b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisInsertSortedSetTest.scala
@@ -32,7 +32,6 @@ import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.Jedis
import scala.jdk.CollectionConverters.ListHasAsScala
-import scala.jdk.CollectionConverters.MapHasAsJava
class RedisInsertSortedSetTest extends AnyWordSpec with Matchers with MockitoSugar with ForAllTestContainer {
@@ -52,7 +51,7 @@ class RedisInsertSortedSetTest extends AnyWordSpec with Matchers with MockitoSug
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> container.mappedPort(6379).toString,
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val connectionInfo = new RedisConnectionInfo("localhost", container.mappedPort(6379), None)
diff --git a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisMultipleSortedSetsTest.scala b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisMultipleSortedSetsTest.scala
index 94154c98c9..347df75b57 100644
--- a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisMultipleSortedSetsTest.scala
+++ b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisMultipleSortedSetsTest.scala
@@ -55,7 +55,7 @@ class RedisMultipleSortedSetsTest extends AnyWordSpec with Matchers with Mockito
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> container.mappedPort(6379).toString,
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
diff --git a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisPubSubTest.scala b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisPubSubTest.scala
index 5ce85431f5..379d93504d 100644
--- a/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisPubSubTest.scala
+++ b/kafka-connect-redis/src/it/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisPubSubTest.scala
@@ -35,7 +35,6 @@ import redis.clients.jedis.Jedis
import redis.clients.jedis.JedisPubSub
import scala.collection.mutable.ListBuffer
-import scala.jdk.CollectionConverters.MapHasAsJava
class RedisPubSubTest extends AnyWordSpec with Matchers with MockitoSugar with LazyLogging with ForAllTestContainer {
@@ -55,7 +54,7 @@ class RedisPubSubTest extends AnyWordSpec with Matchers with MockitoSugar with L
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> container.mappedPort(6379).toString,
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val connectionInfo = new RedisConnectionInfo("localhost", container.mappedPort(6379), None)
diff --git a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkConnector.scala b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkConnector.scala
index 1018327309..66d56a35c2 100644
--- a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkConnector.scala
+++ b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkConnector.scala
@@ -15,6 +15,7 @@
*/
package com.datamountaineer.streamreactor.connect.redis.sink
+import cats.implicits.toBifunctorOps
import com.datamountaineer.streamreactor.common.config.Helpers
import com.datamountaineer.streamreactor.common.utils.JarManifest
import com.datamountaineer.streamreactor.connect.redis.sink.config.RedisConfig
@@ -62,7 +63,7 @@ class RedisSinkConnector extends SinkConnector with StrictLogging {
*/
override def start(props: util.Map[String, String]): Unit = {
logger.info(s"Starting Redis sink task with [${props.toString}].")
- Helpers.checkInputTopics(RedisConfigConstants.KCQL_CONFIG, props.asScala.toMap)
+ Helpers.checkInputTopics(RedisConfigConstants.KCQL_CONFIG, props.asScala.toMap).leftMap(throw _)
configProps = props
}
diff --git a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTask.scala b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTask.scala
index b8e901ce31..ef9d513597 100644
--- a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTask.scala
+++ b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTask.scala
@@ -32,6 +32,7 @@ import org.apache.kafka.connect.sink.SinkTask
import java.util
import scala.jdk.CollectionConverters.IterableHasAsScala
import scala.jdk.CollectionConverters.ListHasAsScala
+import scala.jdk.CollectionConverters.MapHasAsScala
/**
* RedisSinkTask
@@ -55,7 +56,7 @@ class RedisSinkTask extends SinkTask with StrictLogging {
val conf = if (context.configs().isEmpty) props else context.configs()
RedisConfig.config.parse(conf)
- val sinkConfig = new RedisConfig(conf)
+ val sinkConfig = new RedisConfig(conf.asScala.toMap)
val settings = RedisSinkSettings(sinkConfig)
enableProgress = sinkConfig.getBoolean(RedisConfigConstants.PROGRESS_COUNTER_ENABLED)
diff --git a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisConfig.scala b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisConfig.scala
index 41dfcd6088..30683c9c4c 100644
--- a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisConfig.scala
+++ b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisConfig.scala
@@ -20,8 +20,6 @@ import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.common.config.ConfigDef.Importance
import org.apache.kafka.common.config.ConfigDef.Type
-import java.util
-
object RedisConfig {
val config: ConfigDef = new ConfigDef()
@@ -136,7 +134,7 @@ object RedisConfig {
*
* Holds config, extends AbstractConfig.
*/
-case class RedisConfig(props: util.Map[String, String])
+case class RedisConfig(props: Map[String, String])
extends BaseConfig(RedisConfigConstants.CONNECTOR_PREFIX, RedisConfig.config, props)
with KcqlSettings
with ErrorPolicySettings
diff --git a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisSinkSettings.scala b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisSinkSettings.scala
index e55cf60fe7..df579a035c 100644
--- a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisSinkSettings.scala
+++ b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/config/RedisSinkSettings.scala
@@ -19,24 +19,39 @@ import com.datamountaineer.kcql.Kcql
import com.datamountaineer.streamreactor.common.errors.ErrorPolicy
import com.datamountaineer.streamreactor.common.errors.ThrowErrorPolicy
import com.datamountaineer.streamreactor.common.rowkeys.StringKeyBuilder
+import io.lenses.streamreactor.connect.security.StoresInfo
import org.apache.kafka.common.config.ConfigException
-import org.apache.kafka.common.config.SslConfigs
import scala.jdk.CollectionConverters.ListHasAsScala
+object RedisConnectionInfo {
+ def apply(config: RedisConfig): RedisConnectionInfo = {
+ val host = config.getString(RedisConfigConstants.REDIS_HOST)
+ if (host.isEmpty) new ConfigException(s"[${RedisConfigConstants.REDIS_HOST}] is not set correctly")
+
+ val password = Option(config.getPassword(RedisConfigConstants.REDIS_PASSWORD)).map(_.value())
+
+ val isSslConnection = config.getBoolean(RedisConfigConstants.REDIS_SSL_ENABLED)
+
+ val storesInfo: StoresInfo = StoresInfo(config)
+
+ new RedisConnectionInfo(host = host,
+ port = config.getInt(RedisConfigConstants.REDIS_PORT),
+ password = password,
+ isSslConnection = isSslConnection,
+ storesInfo = storesInfo,
+ )
+ }
+
+}
+
// Redis connection details: host, port, password
case class RedisConnectionInfo(
- host: String,
- port: Int,
- password: Option[String],
- isSslConnection: Boolean = false,
- keyPassword: Option[String] = None,
- keyStoreType: Option[String] = None,
- keyStorePassword: Option[String] = None,
- keyStoreFilepath: Option[String] = None,
- trustStoreType: Option[String] = None,
- trustStorePassword: Option[String] = None,
- trustStoreFilepath: Option[String] = None,
+ host: String,
+ port: Int,
+ password: Option[String],
+ isSslConnection: Boolean = false,
+ storesInfo: StoresInfo = StoresInfo(),
)
// Sink settings of each Redis KCQL statement
@@ -97,41 +112,3 @@ object RedisSinkSettings {
}
}
-
-object RedisConnectionInfo {
- def apply(config: RedisConfig): RedisConnectionInfo = {
- val host = config.getString(RedisConfigConstants.REDIS_HOST)
- if (host.isEmpty) new ConfigException(s"[${RedisConfigConstants.REDIS_HOST}] is not set correctly")
-
- val password = Option(config.getPassword(RedisConfigConstants.REDIS_PASSWORD)).map(_.value())
-
- val isSslConnection = config.getBoolean(RedisConfigConstants.REDIS_SSL_ENABLED)
-
- val trustStoreType = Option(config.getString(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG))
- val trustStorePath = Option(config.getString(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG))
- val trustStorePassword = Option(config.getPassword(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)) match {
- case Some(p) => Some(p.value())
- case None => None
- }
-
- val keyStoreType = Option(config.getString(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG))
- val keyStorePath = Option(config.getString(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
- val keyStorePassword = Option(config.getPassword(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)) match {
- case Some(p) => Some(p.value())
- case None => None
- }
-
- new RedisConnectionInfo(
- host = host,
- port = config.getInt(RedisConfigConstants.REDIS_PORT),
- password = password,
- isSslConnection = isSslConnection,
- keyStoreType = keyStoreType,
- keyStorePassword = keyStorePassword,
- keyStoreFilepath = keyStorePath,
- trustStoreType = trustStoreType,
- trustStorePassword = trustStorePassword,
- trustStoreFilepath = trustStorePath,
- )
- }
-}
diff --git a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisWriter.scala b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisWriter.scala
index a456cb41a0..ed910a7916 100644
--- a/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisWriter.scala
+++ b/kafka-connect-redis/src/main/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisWriter.scala
@@ -19,6 +19,7 @@ import com.datamountaineer.streamreactor.common.errors.ErrorHandler
import com.datamountaineer.streamreactor.common.sink.DbWriter
import com.datamountaineer.streamreactor.connect.redis.sink.config.RedisSinkSettings
import com.typesafe.scalalogging.StrictLogging
+import io.lenses.streamreactor.connect.security.StoreInfo
import redis.clients.jedis.Jedis
import java.io.File
@@ -35,30 +36,21 @@ abstract class RedisWriter extends DbWriter with StrictLogging with ErrorHandler
val connection = sinkSettings.connectionInfo
if (connection.isSslConnection) {
- connection.keyStoreFilepath match {
- case Some(path) =>
- if (!new File(path).exists) {
- throw new FileNotFoundException(s"Keystore not found in: [$path]")
- }
-
- System.setProperty("javax.net.ssl.keyStorePassword", connection.keyStorePassword.getOrElse(""))
+ connection.storesInfo.keyStore.foreach {
+ case StoreInfo(path: String, _, _) if !new File(path).exists =>
+ throw new FileNotFoundException(s"Keystore not found in: [$path]")
+ case StoreInfo(path: String, storeType: Option[String], storePassword: Option[String]) =>
+ System.setProperty("javax.net.ssl.keyStorePassword", storePassword.getOrElse(""))
System.setProperty("javax.net.ssl.keyStore", path)
- System.setProperty("javax.net.ssl.keyStoreType", connection.keyStoreType.getOrElse("jceks"))
-
- case None =>
+ System.setProperty("javax.net.ssl.keyStoreType", storeType.getOrElse("jceks"))
}
-
- connection.trustStoreFilepath match {
- case Some(path) =>
- if (!new File(path).exists) {
- throw new FileNotFoundException(s"Truststore not found in: $path")
- }
-
- System.setProperty("javax.net.ssl.trustStorePassword", connection.trustStorePassword.getOrElse(""))
+ connection.storesInfo.trustStore.foreach {
+ case StoreInfo(path: String, _, _) if !new File(path).exists =>
+ throw new FileNotFoundException(s"trustStore not found in: [$path]")
+ case StoreInfo(path: String, storeType: Option[String], storePassword: Option[String]) =>
+ System.setProperty("javax.net.ssl.trustStorePassword", storePassword.getOrElse(""))
System.setProperty("javax.net.ssl.trustStore", path)
- System.setProperty("javax.net.ssl.trustStoreType", connection.trustStoreType.getOrElse("jceks"))
-
- case None =>
+ System.setProperty("javax.net.ssl.trustStoreType", storeType.getOrElse("jceks"))
}
}
diff --git a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTaskTest.scala b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTaskTest.scala
index 85294f3831..d6e5b11b55 100644
--- a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTaskTest.scala
+++ b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/RedisSinkTaskTest.scala
@@ -22,8 +22,6 @@ import com.datamountaineer.streamreactor.connect.redis.sink.support.RedisMockSup
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
-import scala.jdk.CollectionConverters.MapHasAsJava
-
class RedisSinkTaskTest extends AnyWordSpec with Matchers with RedisMockSupport {
"work with Cache" -> {
@@ -33,7 +31,7 @@ class RedisSinkTaskTest extends AnyWordSpec with Matchers with RedisMockSupport
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> "0000",
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
@@ -52,7 +50,7 @@ class RedisSinkTaskTest extends AnyWordSpec with Matchers with RedisMockSupport
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> "0000",
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
@@ -71,7 +69,7 @@ class RedisSinkTaskTest extends AnyWordSpec with Matchers with RedisMockSupport
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> "0000",
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
@@ -97,7 +95,7 @@ class RedisSinkTaskTest extends AnyWordSpec with Matchers with RedisMockSupport
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> "0000",
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
@@ -131,7 +129,7 @@ class RedisSinkTaskTest extends AnyWordSpec with Matchers with RedisMockSupport
RedisConfigConstants.REDIS_HOST -> "localhost",
RedisConfigConstants.REDIS_PORT -> "0000",
RedisConfigConstants.KCQL_CONFIG -> KCQL,
- ).asJava
+ )
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
diff --git a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/support/RedisMockSupport.scala b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/support/RedisMockSupport.scala
index 1b426215af..c5a1917ce9 100644
--- a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/support/RedisMockSupport.scala
+++ b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/support/RedisMockSupport.scala
@@ -19,8 +19,6 @@ import com.datamountaineer.streamreactor.connect.redis.sink.config.RedisConfig
import com.datamountaineer.streamreactor.connect.redis.sink.config.RedisConfigConstants
import org.mockito.MockitoSugar
-import scala.jdk.CollectionConverters.MapHasAsJava
-
trait RedisMockSupport extends MockitoSugar {
def getRedisSinkConfig(password: Boolean, KCQL: Option[String], pkDelimiter: Option[String] = None): RedisConfig = {
@@ -42,7 +40,7 @@ trait RedisMockSupport extends MockitoSugar {
baseProps += RedisConfigConstants.REDIS_PK_DELIMITER -> delimiter
}
- RedisConfig(baseProps.asJava)
+ RedisConfig(baseProps.toMap)
}
}
diff --git a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisSslTest.scala b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisSslTest.scala
index 2104a57870..4e4b2578b4 100644
--- a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisSslTest.scala
+++ b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisSslTest.scala
@@ -31,7 +31,6 @@ import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.Jedis
import java.net.URI
-import scala.jdk.CollectionConverters.MapHasAsJava
import scala.jdk.CollectionConverters.MapHasAsScala
/*
@@ -103,7 +102,7 @@ class RedisSslTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with
SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG -> "keystore-password",
)
- val config = RedisConfig(map.asJava)
+ val config = RedisConfig(map)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
@@ -147,7 +146,7 @@ class RedisSslTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with
jedis.ping() shouldBe "PONG"
val QUERY_ALL = s"SELECT * FROM $TOPIC PK firstName, child.firstName"
- val props = (baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)).asJava
+ val props = baseProps + (RedisConfigConstants.KCQL_CONFIG -> QUERY_ALL)
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
val writer = new RedisCache(settings)
diff --git a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisStreamTest.scala b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisStreamTest.scala
index 978875da24..c262289e85 100644
--- a/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisStreamTest.scala
+++ b/kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisStreamTest.scala
@@ -48,7 +48,6 @@ import redis.clients.jedis.StreamEntryID
import redis.clients.jedis.params.XAddParams
import java.util
-import scala.jdk.CollectionConverters.MapHasAsJava
class RedisStreamTest
extends AnyWordSpec
@@ -74,7 +73,7 @@ class RedisStreamTest
RedisConfigConstants.REDIS_PORT -> "6379",
RedisConfigConstants.KCQL_CONFIG -> KCQL,
RedisConfigConstants.REDIS_PASSWORD -> "",
- ).asJava
+ )
val config = RedisConfig(props)
val settings = RedisSinkSettings(config)
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
index 2dffc8ae0a..55f35a2fac 100644
--- a/project/Dependencies.scala
+++ b/project/Dependencies.scala
@@ -80,7 +80,7 @@ object Dependencies {
val jerseyCommonVersion = "3.1.1"
val calciteVersion = "1.34.0"
- val awsSdkVersion = "2.20.69"
+ val awsSdkVersion = "2.20.153"
val guavaVersion = "31.0.1-jre"
val guiceVersion = "5.1.0"
val javaxBindVersion = "2.3.1"
@@ -165,6 +165,11 @@ object Dependencies {
override val jnaVersion: String = "4.5.1"
}
+ object OpenSearchVersions extends ElasticVersions {
+ override val elastic4sVersion: String = "8.9.2"
+ override val elasticSearchVersion: String = "8.10.1"
+ override val jnaVersion: String = "4.5.1"
+ }
}
import Versions._
@@ -237,12 +242,12 @@ object Dependencies {
val http4sCirce = "org.http4s" %% "http4s-circe" % http4sVersion
val http4s = Seq(http4sDsl, http4sAsyncClient, http4sBlazeServer, http4sCirce)
- val bouncyProv = "org.bouncycastle" % "bcprov-jdk15on" % bouncyCastleVersion
- val bouncyUtil = "org.bouncycastle" % "bcutil-jdk15on" % bouncyCastleVersion
- val bouncyPkix = "org.bouncycastle" % "bcpkix-jdk15on" % bouncyCastleVersion
- val bouncyBcpg = "org.bouncycastle" % "bcpg-jdk15on" % bouncyCastleVersion
- val bouncyTls = "org.bouncycastle" % "bctls-jdk15on" % bouncyCastleVersion
- val bouncyCastle = Seq(bouncyProv, bouncyUtil, bouncyPkix, bouncyBcpg, bouncyTls)
+ val bouncyProv = "org.bouncycastle" % "bcprov-jdk15on" % bouncyCastleVersion
+ val bouncyUtil = "org.bouncycastle" % "bcutil-jdk15on" % bouncyCastleVersion
+ val bouncyPkix = "org.bouncycastle" % "bcpkix-jdk15on" % bouncyCastleVersion
+ val bouncyBcpg = "org.bouncycastle" % "bcpg-jdk15on" % bouncyCastleVersion
+ val bouncyTls = "org.bouncycastle" % "bctls-jdk15on" % bouncyCastleVersion
+ val bouncyCastle: Seq[ModuleID] = Seq(bouncyProv, bouncyUtil, bouncyPkix, bouncyBcpg, bouncyTls)
lazy val avro = "org.apache.avro" % "avro" % avroVersion
lazy val avroProtobuf = "org.apache.avro" % "avro-protobuf" % avroVersion
@@ -304,9 +309,10 @@ object Dependencies {
lazy val calciteLinq4J = "org.apache.calcite" % "calcite-linq4j" % calciteVersion
- lazy val s3Sdk = "software.amazon.awssdk" % "s3" % awsSdkVersion
- lazy val stsSdk = "software.amazon.awssdk" % "sts" % awsSdkVersion
- lazy val javaxBind = "javax.xml.bind" % "jaxb-api" % javaxBindVersion
+ lazy val s3Sdk = "software.amazon.awssdk" % "s3" % awsSdkVersion
+ lazy val stsSdk = "software.amazon.awssdk" % "sts" % awsSdkVersion
+ lazy val javaxBind = "javax.xml.bind" % "jaxb-api" % javaxBindVersion
+ lazy val awsOpenSearch = "software.amazon.awssdk" % "opensearch" % awsSdkVersion
lazy val guava = "com.google.guava" % "guava" % guavaVersion
lazy val guice = "com.google.inject" % "guice" % guiceVersion
@@ -351,8 +357,8 @@ object Dependencies {
lazy val testContainersScalaMongodb = "com.dimafeng" %% "testcontainers-scala-mongodb" % testcontainersScalaVersion
lazy val testContainersScalaToxiProxy =
"com.dimafeng" %% "testcontainers-scala-toxiproxy" % testcontainersScalaVersion
- lazy val testContainersScalaElasticsearch =
- "com.dimafeng" %% "testcontainers-scala-elasticsearch" % testcontainersScalaVersion
+ //lazy val testContainersScalaElasticsearch =
+ // "com.dimafeng" %% "testcontainers-scala-elasticsearch" % testcontainersScalaVersion
lazy val testcontainersCore = "org.testcontainers" % "testcontainers" % testcontainersVersion
lazy val testcontainersKafka = "org.testcontainers" % "kafka" % testcontainersVersion
@@ -427,6 +433,7 @@ object Dependencies {
lazy val festAssert = "org.easytesting" % "fest-assert" % "1.4"
def elastic4sCore(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-core" % v
+ def elastic4sCats(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-effect-cats-3" % v
def elastic4sClient(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-client-esjava" % v
def elastic4sTestKit(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-testkit" % v
def elastic4sHttp(v: String): ModuleID = "com.sksamuel.elastic4s" %% "elastic4s-http" % v
@@ -436,6 +443,8 @@ object Dependencies {
def jna(v: String): ModuleID = "net.java.dev.jna" % "jna" % v
+ val openSearchRest: ModuleID = "org.opensearch.client" % "opensearch-rest-client" % "2.9.0"
+ val openSearchJava: ModuleID = "org.opensearch.client" % "opensearch-java" % "2.6.0"
}
trait Dependencies {
@@ -568,11 +577,16 @@ trait Dependencies {
def elasticTestCommonDeps(v: ElasticVersions): Seq[ModuleID] = Seq(
elastic4sTestKit(v.elastic4sVersion),
testContainersScala,
- testContainersScalaElasticsearch,
)
+ val kafkaConnectElasticBaseDeps: Seq[ModuleID] =
+ Seq[ModuleID]()
+
val kafkaConnectElastic8Deps: Seq[ModuleID] =
- elasticCommonDeps(Elastic8Versions) ++ Seq(elastic4sClient(Elastic8Versions.elastic4sVersion))
+ kafkaConnectElasticBaseDeps ++ Seq(elastic4sClient(Elastic8Versions.elastic4sVersion))
+
+ val kafkaConnectOpenSearchDeps: Seq[ModuleID] =
+ kafkaConnectElasticBaseDeps ++ Seq(openSearchRest, openSearchJava, awsOpenSearch, stsSdk)
val kafkaConnectElastic8TestDeps: Seq[ModuleID] = baseTestDeps ++ elasticTestCommonDeps(Elastic8Versions)
@@ -602,7 +616,7 @@ trait Dependencies {
moduleId: ModuleID => moduleId.extra("scope" -> "test")
}
- val testCommonDeps: Seq[ModuleID] = baseDeps ++ Seq(
+ val testCommonDeps: Seq[ModuleID] = baseDeps ++ bouncyCastle ++ Seq(
scalatest,
json4sJackson,
json4sNative,
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala
index 078aee71c6..fb03ecc639 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/ElasticsearchContainer.scala
@@ -1,38 +1,80 @@
package io.lenses.streamreactor.connect.testcontainers
-import io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer.defaultNetworkAlias
-import io.lenses.streamreactor.connect.testcontainers.ElasticsearchContainer.defaultTag
-import org.testcontainers.elasticsearch.{ ElasticsearchContainer => JavaElasticsearchContainer }
+import cats.implicits.{catsSyntaxOptionId, none}
+import org.scalatest.Assertions.fail
+import org.testcontainers.elasticsearch.{ElasticsearchContainer => JavaElasticsearchContainer}
import org.testcontainers.utility.DockerImageName
+
+case class ElasticContainerSetup(
+ key: String,
+ imageUrl: String,
+ imageVersion: String,
+ compatibleSubstituteFor: Option[String],
+ envs: Seq[(String, String)]
+ )
+object ElasticsearchContainer {
+
+ private val setup: Map[String, ElasticContainerSetup] =
+ Seq(
+ ElasticContainerSetup("elastic8", "docker.elastic.co/elasticsearch/elasticsearch", "8.10.1", none, Seq("xpack.security.enabled" -> "false")),
+ ElasticContainerSetup("elastic8-ssl", "docker.elastic.co/elasticsearch/elasticsearch", "8.10.1", none, Seq.empty),
+ ElasticContainerSetup("open", "opensearchproject/opensearch", "2.10.0", "docker.elastic.co/elasticsearch/elasticsearch".some, Seq("plugins.security.disabled" -> "true")),
+ ElasticContainerSetup("open-ssl",
+ "opensearchproject/opensearch",
+ "2.10.0",
+ "docker.elastic.co/elasticsearch/elasticsearch".some,
+ Seq(
+ "plugins.security.ssl.http.enabled" -> "true",
+ "plugins.security.ssl.http.keystore_type" -> "jks",
+ "plugins.security.ssl.http.keystore_filepath" -> "security/keystore.jks",
+ "plugins.security.ssl.http.keystore_password" -> "changeIt",
+ "plugins.security.ssl.http.truststore_type" -> "jks",
+ "plugins.security.ssl.http.truststore_filepath" -> "security/truststore.jks",
+ "plugins.security.ssl.http.truststore_password" -> "changeIt",
+
+// "plugins.security.ssl.transport.keystore_type" -> "jks",
+// "plugins.security.ssl.transport.keystore_filepath" -> "security/keystore.jks",
+// "plugins.security.ssl.transport.keystore_password" -> "changeIt",
+// "plugins.security.ssl.transport.truststore_type" -> "jks",
+// "plugins.security.ssl.transport.truststore_filepath" -> "security/truststore.jks",
+// "plugins.security.ssl.transport.truststore_password" -> "changeIt",
+ )
+ )
+ ).map(ec => ec.key -> ec).toMap
+ def apply(containerKey: String): ElasticsearchContainer = {
+ val version = setup.getOrElse(containerKey, fail("Container not found"))
+ new ElasticsearchContainer(version)
+ }
+
+}
class ElasticsearchContainer(
- dockerImage: DockerImageName,
- dockerTag: String = defaultTag,
- val networkAlias: String = defaultNetworkAlias,
-) extends SingleContainer[JavaElasticsearchContainer] {
+ val setup: ElasticContainerSetup,
+ ) extends SingleContainer[JavaElasticsearchContainer] {
- val port: Int = 9200
+ val port : Int = 9200
- override val container: JavaElasticsearchContainer =
- new JavaElasticsearchContainer(dockerImage.withTag(dockerTag))
- container.withNetworkAliases(networkAlias)
- container.withEnv("xpack.security.enabled", "false")
+
+ override val container: JavaElasticsearchContainer = {
+ val image = DockerImageName
+ .parse(setup.imageUrl)
+ .withTag(setup.imageVersion)
+ val imageWithSub = setup.compatibleSubstituteFor.fold(image)(
+ image.asCompatibleSubstituteFor
+ )
+ new JavaElasticsearchContainer(imageWithSub)
+ }
+
+
+ container.withNetworkAliases(setup.key)
+
+ setup.envs.foreach {case (k,v) => container.withEnv(k, v)}
lazy val hostNetwork = new HostNetwork()
class HostNetwork {
def httpHostAddress: String = container.getHttpHostAddress
}
-}
-object ElasticsearchContainer {
- private val dockerImage = DockerImageName.parse("docker.elastic.co/elasticsearch/elasticsearch")
- private val defaultTag = "6.8.8"
- private val defaultNetworkAlias = "elastic"
-
- def apply(
- networkAlias: String = defaultNetworkAlias,
- dockerTag: String = defaultTag,
- ): ElasticsearchContainer =
- new ElasticsearchContainer(dockerImage, dockerTag, networkAlias)
+
}
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala
index 4224cc0b64..2a4ec58b93 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaConnectContainer.scala
@@ -3,7 +3,7 @@ package io.lenses.streamreactor.connect.testcontainers
import com.github.dockerjava.api.model.Ulimit
import io.lenses.streamreactor.connect.testcontainers.KafkaVersions.ConfluentVersion
import io.lenses.streamreactor.connect.testcontainers.KafkaConnectContainer.{defaultNetworkAlias, defaultRestPort}
-import org.testcontainers.containers.{GenericContainer, KafkaContainer}
+import org.testcontainers.containers.{BindMode, GenericContainer, KafkaContainer}
import org.testcontainers.containers.wait.strategy.Wait
import org.testcontainers.utility.DockerImageName
@@ -74,6 +74,15 @@ class KafkaConnectContainer(
def installPackage(pkg: String): ExecResult =
rootExecInContainer(container = this, commands = Seq(s"microdnf","install",pkg))
+
+
+ def copyBinds(binds: Seq[(String, String)]): Unit = {
+ binds.foreach {
+ case (k, v) =>
+ addFileSystemBind(k, v, BindMode.READ_WRITE)
+ }
+ }
+
}
object KafkaConnectContainer {
private val dockerImage = DockerImageName.parse("confluentinc/cp-kafka-connect")
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala
index 5b0c0bee0c..20c055cf36 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/KafkaVersions.scala
@@ -4,7 +4,7 @@ import com.typesafe.scalalogging.LazyLogging
object KafkaVersions extends LazyLogging{
- private val FallbackConfluentVersion = "7.3.1"
+ private val FallbackConfluentVersion = "7.5.0"
val ConfluentVersion: String = {
val (vers, from) = sys.env.get("CONFLUENT_VERSION") match {
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala
index 898512f026..732e3ed575 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/ConnectorConfiguration.scala
@@ -10,7 +10,7 @@ case class ConnectorConfiguration(
implicit val formats: DefaultFormats.type = DefaultFormats
- def toJson(): String = {
+ def toJson: String = {
val mergedConfigMap = config + ("tasks.max" -> ConfigValue(1))
Serialization.write(
Map[String, Any](
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala
index 3b40b71cba..e3bba2171b 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/connect/KafkaConnectClient.scala
@@ -34,7 +34,7 @@ class KafkaConnectClient(kafkaConnectContainer: KafkaConnectContainer) extends S
timeoutSeconds: Long = 10L,
): Unit = {
val httpPost = HttpRequest.newBuilder()
- .POST(HttpRequest.BodyPublishers.ofString(connector.toJson()))
+ .POST(HttpRequest.BodyPublishers.ofString(connector.toJson))
.uri(URI.create(s"${kafkaConnectContainer.hostNetwork.restEndpointUrl}/connectors"))
.header("Accept", "application/json")
.header("Content-Type", "application/json")
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/KeyStoreUtils.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/KeyStoreUtils.scala
new file mode 100644
index 0000000000..5415937750
--- /dev/null
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/KeyStoreUtils.scala
@@ -0,0 +1,98 @@
+package io.lenses.streamreactor.connect.testcontainers.scalatest
+
+import com.typesafe.scalalogging.LazyLogging
+import org.bouncycastle.asn1.x500.X500Name
+import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo
+import org.bouncycastle.cert.X509v3CertificateBuilder
+import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter
+import org.bouncycastle.jce.provider.BouncyCastleProvider
+import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder
+
+import java.io.FileOutputStream
+import java.math.BigInteger
+import java.nio.file.{Files, Path}
+import java.security.cert.X509Certificate
+import java.security.interfaces.RSAPrivateKey
+import java.security.{KeyPairGenerator, KeyStore, Security}
+import java.util.Date
+
+object KeyStoreUtils extends LazyLogging{
+ Security.addProvider(new BouncyCastleProvider())
+
+ def createKeystore(commonName: String): Path = {
+
+ val tmpDir: Path = Files.createTempDirectory("security")
+
+ val (certificate, privateKey) = KeyStoreUtils.generateSelfSignedCertificate(2048, 365, commonName)
+ val _ = KeyStoreUtils.createAndSaveKeystore(tmpDir, "changeIt", certificate, privateKey)
+ val _ = KeyStoreUtils.createAndSaveTruststore(tmpDir, "changeIt", certificate)
+ logger.info(s"container -> Creating keystore at $tmpDir")
+ tmpDir
+ }
+
+ def generateSelfSignedCertificate(
+ keySize: Int,
+ certificateValidityDays: Int,
+ commonName: String
+ ): (X509Certificate, RSAPrivateKey) = {
+ val keyPairGen = KeyPairGenerator.getInstance("RSA", "BC")
+ keyPairGen.initialize(keySize)
+ val keyPair = keyPairGen.generateKeyPair()
+
+ val notBefore = new Date()
+ val notAfter = new Date(System.currentTimeMillis() + certificateValidityDays * 24L * 60 * 60 * 1000)
+
+ val publicKeyInfo = SubjectPublicKeyInfo.getInstance(keyPair.getPublic.getEncoded)
+
+ val certBuilder = new X509v3CertificateBuilder(
+ new X500Name(s"CN=$commonName"),
+ BigInteger.valueOf(System.currentTimeMillis()),
+ notBefore,
+ notAfter,
+ new X500Name(s"CN=$commonName"),
+ publicKeyInfo
+ )
+
+ val contentSigner = new JcaContentSignerBuilder("SHA256WithRSAEncryption").setProvider("BC").build(keyPair.getPrivate)
+ val certHolder = certBuilder.build(contentSigner)
+ val cert = new JcaX509CertificateConverter().setProvider("BC").getCertificate(certHolder)
+
+ (cert, keyPair.getPrivate.asInstanceOf[RSAPrivateKey])
+ }
+
+
+ def createAndSaveKeystore(tmpDir: Path, password: String, certificate: X509Certificate, privateKey: RSAPrivateKey): String = {
+
+ val keyStore = KeyStore.getInstance("JKS")
+ keyStore.load(null, password.toCharArray)
+
+ // Store the private key and certificate in the keystore
+ keyStore.setKeyEntry("alias", privateKey, password.toCharArray, Array(certificate))
+
+ val keyStorePath = tmpDir.resolve("keystore.jks").toString
+ // Save the keystore to a file
+ val keystoreOutputStream = new FileOutputStream(keyStorePath)
+ keyStore.store(keystoreOutputStream, password.toCharArray)
+ keystoreOutputStream.close()
+
+ keyStorePath
+ }
+
+ def createAndSaveTruststore(tmpDir: Path, password: String, certificate: X509Certificate): String = {
+
+ val trustStore = KeyStore.getInstance("JKS")
+ trustStore.load(null, password.toCharArray)
+
+ // Add the trusted certificate to the truststore
+ trustStore.setCertificateEntry("alias", certificate)
+ val trustStorePath = tmpDir.resolve("truststore.jks").toString
+
+ // Save the truststore to a file
+ val truststoreOutputStream = new FileOutputStream(trustStorePath)
+ trustStore.store(truststoreOutputStream, password.toCharArray)
+ truststoreOutputStream.close()
+
+ trustStorePath
+ }
+
+}
diff --git a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala
index 2b625ed164..050d65d828 100644
--- a/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala
+++ b/test-common/src/main/scala/io/lenses/streamreactor/connect/testcontainers/scalatest/StreamReactorContainerPerSuite.scala
@@ -14,10 +14,10 @@ import org.scalatest.concurrent.Eventually
import org.scalatest.time.{Minute, Span}
import org.scalatest.{AsyncTestSuite, BeforeAndAfterAll}
import org.testcontainers.containers.output.Slf4jLogConsumer
-import org.testcontainers.containers.{KafkaContainer, Network}
+import org.testcontainers.containers.{BindMode, GenericContainer, KafkaContainer, Network}
import org.testcontainers.utility.DockerImageName
-import java.nio.file.{Files, Paths}
+import java.nio.file.{Files, Path, Paths}
import java.time.Duration
import java.util.{Properties, UUID}
import scala.collection.mutable.ListBuffer
@@ -28,8 +28,14 @@ trait StreamReactorContainerPerSuite extends BeforeAndAfterAll with Eventually w
override implicit def patienceConfig: PatienceConfig = PatienceConfig(timeout = Span(1, Minute))
+ val commonName : Option[String] = None
+
val network: Network = Network.SHARED
+ def useKeyStore : Boolean = false
+
+ def keyStorePath: Option[Path] = Option.when(useKeyStore)(KeyStoreUtils.createKeystore(commonName.fold("")(e => e)))
+
def connectorModule: String
def providedJars(): Seq[String] = Seq()
@@ -40,12 +46,25 @@ trait StreamReactorContainerPerSuite extends BeforeAndAfterAll with Eventually w
.withLogConsumer(new Slf4jLogConsumer(logger.underlying))
lazy val kafkaConnectContainer: KafkaConnectContainer = {
- KafkaConnectContainer(
+ val c = KafkaConnectContainer(
kafkaContainer = kafkaContainer,
schemaRegistryContainer = schemaRegistryContainer,
connectPluginPath = Some(connectPluginPath()),
providedJars = providedJars(),
- ).withNetwork(network).withLogConsumer(new Slf4jLogConsumer(logger.underlying))
+ )
+ .withNetwork(network)
+ .withLogConsumer(new Slf4jLogConsumer(logger.underlying))
+
+ copyBinds(c, "/security")
+ c
+ }
+
+ protected def copyBinds(container: GenericContainer[_], path: String): Unit = {
+ keyStorePath.foreach {
+ ksp =>
+ container.addFileSystemBind(ksp.resolve("keystore.jks").toAbsolutePath.toString, s"$path/keystore.jks", BindMode.READ_WRITE)
+ container.addFileSystemBind(ksp.resolve("truststore.jks").toAbsolutePath.toString, s"$path/truststore.jks", BindMode.READ_WRITE)
+ }
}
// Override for different SchemaRegistryContainer configs