diff --git a/.github/workflows/java-build.yml b/.github/workflows/java-build.yml
new file mode 100644
index 000000000..04bedeb28
--- /dev/null
+++ b/.github/workflows/java-build.yml
@@ -0,0 +1,135 @@
+name: CI-java
+on:
+ pull_request:
+ workflow_call:
+ inputs:
+ version:
+ required: true
+ type: string
+ outputs:
+ java_modules:
+ description: "Stream reactor collection of java modules"
+ value: ${{ jobs.initiate-java-modules.outputs.java_matrix }}
+
+jobs:
+
+ initiate-java-modules:
+ timeout-minutes: 5
+ runs-on: ubuntu-latest
+ outputs:
+ java_matrix: ${{ steps.java-mods.outputs.java-matrix }}
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up JDK 17
+ uses: actions/setup-java@v4
+ with:
+ java-version: '17'
+ distribution: 'temurin'
+ cache: 'gradle'
+ - name: Generate modules lists
+ run: cd 'java-connectors' && ./gradlew releaseModuleList
+ env:
+ JVM_OPTS: -Xmx512m
+ - name: Read java modules lists
+ id: java-mods
+ run: |
+ echo "java-matrix=$(cat ./java-connectors/gradle-modules.txt)" >> $GITHUB_OUTPUT
+
+ test:
+ needs:
+ - initiate-java-modules
+ strategy:
+ matrix:
+ module: ${{fromJSON(needs.initiate-java-modules.outputs.java_matrix)}}
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up JDK 17
+ uses: actions/setup-java@v4
+ with:
+ java-version: 17
+ distribution: 'temurin'
+
+ - name: Setup Gradle
+ uses: gradle/actions/setup-gradle@v3
+ with:
+ gradle-version: 8.6
+
+ - name: Check License Headers and Test with Gradle
+ run: cd 'java-connectors' && ./gradlew ${{ matrix.module }}:test
+
+ build-and-cache:
+ needs:
+ - test
+ - initiate-java-modules
+ strategy:
+ matrix:
+ module: ${{fromJSON(needs.initiate-java-modules.outputs.java_matrix)}}
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up JDK 17
+ uses: actions/setup-java@v4
+ with:
+ java-version: 17
+ distribution: 'temurin'
+ cache: gradle
+
+ - name: Setup Gradle
+ uses: gradle/actions/setup-gradle@v3
+ with:
+ gradle-version: 8.6
+
+ - name: Execute Gradle build
+ run: cd 'java-connectors' && ./gradlew ${{ matrix.module }}:shadowJar --scan
+
+ - name: Move to release folder
+ shell: bash
+ run: |
+ JAVA_RELEASE_FOLDER=java-connectors/release
+ JAVA_BUILD_FOLDER=java-connectors/${{ matrix.module }}/build/libs
+ mkdir -p $JAVA_RELEASE_FOLDER
+ cp $JAVA_BUILD_FOLDER/${{ matrix.module }}*.jar LICENSE $JAVA_RELEASE_FOLDER/
+
+ - name: Cache assembly
+ uses: actions/cache/save@v4
+ with:
+ path: ./java-connectors/release/${{ matrix.module }}*.jar
+ key: assembly-java-${{ github.run_id }}
+
+ jar-dependency-check:
+ needs:
+ - build-and-cache
+ - initiate-java-modules
+ timeout-minutes: 30
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ module: ${{fromJSON(needs.initiate-java-modules.outputs.java_matrix)}}
+ steps:
+ - name: Restore assembly
+ uses: actions/cache/restore@v4
+ with:
+ path: ./java-connectors/release/${{ matrix.module }}*.jar
+ key: assembly-java-${{ github.run_id }}
+ fail-on-cache-miss: true
+
+ - name: Get branch names.
+ id: branch_name
+ uses: tj-actions/branch-names@v8
+ - name: JAR Dependency Check
+ uses: dependency-check/Dependency-Check_Action@main
+ with:
+ project: kafka-connect-${{matrix.module}}-deps
+ path: ./java-connectors/release/${{ matrix.module }}*.jar
+ format: 'HTML'
+ args: >-
+ --failOnCVSS 5
+ --suppression https://raw.githubusercontent.com/${{ github.event.pull_request.head.repo.owner.login }}/${{github.event.repository.name}}/${{ steps.branch_name.outputs.tag }}${{ steps.branch_name.outputs.current_branch }}/suppression.xml
+
+ - name: Upload Test results
+ uses: actions/upload-artifact@master
+ with:
+ name: ${{matrix.module}}-depcheck-results
+ path: ${{github.workspace}}/reports
\ No newline at end of file
diff --git a/.github/workflows/java-release.yml b/.github/workflows/java-release.yml
new file mode 100644
index 000000000..4738213cf
--- /dev/null
+++ b/.github/workflows/java-release.yml
@@ -0,0 +1,95 @@
+name: Publish New Java Release
+on:
+ push:
+ tags:
+ - "*"
+ workflow_dispatch:
+
+jobs:
+ validate-tag:
+ runs-on: ubuntu-latest
+ outputs:
+ draft_release: ${{ steps.get_tag.outputs.draft_release }}
+ tag: ${{ steps.get_tag.outputs.tag }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Get tag, release mode
+ shell: bash
+ id: get_tag
+ run: |
+ if [[ ${GITHUB_REF##*/} =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]];
+ then
+ draft_release=false
+ elif [[ ${GITHUB_REF##*/} =~ ^[0-9]+\.[0-9]+\.[0-9]+(-(alpha|beta|rc)(\.[0-9]+)?)?(\+[A-Za-z0-9.]+)?$ ]];
+ then
+ draft_release=true
+ else
+ echo "Exiting, github ref needs to be a tag with format x.y.z or x.y.z-(alpha|beta|rc)"
+ exit 1
+ fi
+ echo "draft_release=$draft_release" >> $GITHUB_OUTPUT
+ echo "tag=${GITHUB_REF##*/}" >> $GITHUB_OUTPUT
+
+ build:
+ needs:
+ - validate-tag
+ uses: ./.github/workflows/java-build.yml
+ with:
+ version: ${{ needs.validate-tag.outputs.tag }}
+ secrets: inherit
+
+ create-release:
+ runs-on: ubuntu-latest
+ needs:
+ - validate-tag
+ - build
+ strategy:
+ # Avoid parallel uploads
+ max-parallel: 1
+ # GitHub will NOT cancel all in-progress and queued jobs in the matrix if any job in the matrix fails, which could create inconsistencies.
+ # If any matrix job fails, the job will be marked as failure
+ fail-fast: false
+ matrix:
+ module: ${{fromJSON(needs.build.outputs.java_modules)}}
+ env:
+ DRAFT_RELEASE: '${{ needs.validate-tag.outputs.draft_release }}'
+ TAG: ${{ needs.validate-tag.outputs.tag }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ - name: Set up JDK 17
+ uses: actions/setup-java@v4
+ with:
+ java-version: '17'
+ distribution: 'temurin'
+ cache: 'gradle'
+
+ - name: Uncache assembly
+ uses: actions/cache/restore@v4
+ with:
+ path: |
+ ./java-connectors/release/${{ matrix.module }}*.jar
+ key: assembly-java-${{ github.run_id }}
+ fail-on-cache-miss: true
+
+ - name: Package Connector
+ shell: bash
+ run: |
+ JAVA_RELEASE_FOLDER=java-connectors/release
+ FOLDER=${{ matrix.module }}-${{ env.TAG }}
+ mkdir -p $FOLDER
+ cp $JAVA_RELEASE_FOLDER/${{ matrix.module }}*.jar LICENSE $FOLDER/
+ zip -r "$FOLDER.zip" $FOLDER/
+
+ - name: Upload binaries to release
+ uses: svenstaro/upload-release-action@v2
+ with:
+ file: ${{ matrix.module }}-${{ env.TAG }}.zip
+ asset_name: "${{ matrix.module }}-${{ env.TAG }}.zip"
+ release_name: 'Stream Reactor ${{ env.TAG }}'
+ prerelease: ${{ env.DRAFT_RELEASE }}
\ No newline at end of file
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 956b47966..a6fb59e27 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -22,14 +22,14 @@ jobs:
shell: bash
id: get_tag
run: |
- if [[ ${GITHUB_REF##*/} =~ ^[0-9]\.[0-9]\.[0-9]$ ]];
+ if [[ ${GITHUB_REF##*/} =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]];
then
draft_release=false
- elif [[ ${GITHUB_REF##*/} =~ ^[0-9]\.[0-9]\.[0-9]+(-(alpha|beta|rc)(\.[0-9]+)?)?(\+[A-Za-z0-9.]+)?$ ]];
+ elif [[ ${GITHUB_REF##*/} =~ ^[0-9]+\.[0-9]+\.[0-9]+(-(alpha|beta|rc)(\.[0-9]+)?)?(\+[A-Za-z0-9.]+)?$ ]];
then
draft_release=true
else
- echo "Exiting, github ref needs to be a tag with format x.y.z or x.y.z+(alpha|beta|rc)"
+ echo "Exiting, github ref needs to be a tag with format x.y.z or x.y.z-(alpha|beta|rc)"
exit 1
fi
echo "draft_release=$draft_release" >> $GITHUB_OUTPUT
diff --git a/.gitignore b/.gitignore
index d5b55b6dc..980db392b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,6 @@
+#Including java-specific ignores
+#!include:java-connectors/.gitignore
+
.bsp
stageman
cass-test
diff --git a/java-connectors/.gitignore b/java-connectors/.gitignore
new file mode 100644
index 000000000..13047a0b9
--- /dev/null
+++ b/java-connectors/.gitignore
@@ -0,0 +1,47 @@
+.gradle
+build/
+!gradle/wrapper/gradle-wrapper.jar
+!**/src/main/**/build/
+!**/src/test/**/build/
+
+### IntelliJ IDEA ###
+.idea/*
+.idea/modules.xml
+.idea/jarRepositories.xml
+.idea/compiler.xml
+.idea/libraries/
+*.iws
+*.iml
+*.ipr
+out/
+!**/src/main/**/out/
+!**/src/test/**/out/
+
+### Eclipse ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+bin/
+!**/src/main/**/bin/
+!**/src/test/**/bin/
+
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+
+### VS Code ###
+.vscode/
+
+### Mac OS ###
+.DS_Store
+
+### Lenses-specific ###
+release/
+gradle-modules.txt
\ No newline at end of file
diff --git a/java-connectors/HEADER.txt b/java-connectors/HEADER.txt
new file mode 100644
index 000000000..8828f9cd2
--- /dev/null
+++ b/java-connectors/HEADER.txt
@@ -0,0 +1,13 @@
+Copyright 2017-${year} ${name} Ltd
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
\ No newline at end of file
diff --git a/java-connectors/build.gradle b/java-connectors/build.gradle
new file mode 100644
index 000000000..c7c74b793
--- /dev/null
+++ b/java-connectors/build.gradle
@@ -0,0 +1,177 @@
+plugins {
+ id 'com.github.johnrengelman.shadow' version '8.1.1'
+ id 'org.cadixdev.licenser' version '0.6.1'
+ id 'java'
+ id 'java-library'
+}
+
+allprojects {
+
+ group = "io.lenses.streamreactor"
+ version = "6.4.0-SNAPSHOT"
+ description = "stream-reactor"
+
+ apply plugin: 'java'
+ apply plugin: 'java-library'
+ apply plugin: 'com.github.johnrengelman.shadow'
+ apply plugin: 'org.cadixdev.licenser'
+
+ java {
+ setSourceCompatibility(JavaVersion.VERSION_11)
+ setTargetCompatibility(JavaVersion.VERSION_11)
+ }
+
+ ext {
+ //DEPENDENCY VERSIONS
+ lombokVersion = '1.18.30'
+ kafkaVersion = '3.7.0'
+ logbackVersion = '1.4.14'
+ jUnitVersion = '5.9.1'
+ mockitoJupiterVersion = '5.10.0'
+ apacheToConfluentVersionAxis = ["2.8.1": "6.2.2", "3.3.0": "7.3.1"]
+
+ //Other Manifest Info
+ mainClassName = ''
+ gitCommitHash = ("git rev-parse HEAD").execute().text.trim()
+ gitTag = ("git describe --abbrev=0 --tags").execute().text.trim()
+ gitRepo = ("git remote get-url origin").execute().text.trim()
+
+ //for jar building
+ rootRelease = "${project.rootDir}/release/"
+ versionDir = "${rootRelease}/${project.description}-${project.version}"
+ confDir = "${versionDir}/conf"
+ libsDir = "${versionDir}/libs"
+ }
+
+ repositories {
+ mavenCentral()
+ maven {
+ url "https://packages.confluent.io/maven/"
+ }
+ }
+
+ dependencies {
+ //logback
+ implementation group: 'ch.qos.logback', name: 'logback-classic', version: logbackVersion
+
+ //lombok
+ compileOnly group: 'org.projectlombok', name: 'lombok', version: '1.18.30'
+ annotationProcessor group: 'org.projectlombok', name: 'lombok', version: lombokVersion
+
+ //tests
+ testImplementation group: 'org.mockito', name: 'mockito-core', version: mockitoJupiterVersion
+ testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter', version: mockitoJupiterVersion
+ testImplementation group: 'org.assertj', name: 'assertj-core', version: '3.25.3'
+
+ }
+
+ test {
+ useJUnitPlatform()
+
+ maxHeapSize = '1G'
+
+ testLogging {
+ events "passed"
+ }
+ }
+
+ license {
+ include("**/**.java", "**/**Test.java")
+ exclude("**/kcql/antlr4/**.java") //antlr generated files
+ header = project.file("${project.rootDir}/HEADER.txt")
+ newLine = false
+
+ style {
+ java = 'BLOCK_COMMENT'
+ }
+
+ properties {
+ name = 'Lenses.io'
+ year = LocalDate.now().year
+ }
+ }
+
+ jar {
+ manifest {
+ attributes("StreamReactor-Version": project.version,
+ "Kafka-Version": kafkaVersion,
+ "Created-By": "Lenses",
+ "Created-At": new Date().format("YYYYMMDDHHmm"),
+ "Git-Repo": gitRepo,
+ "Git-Commit-Hash": gitCommitHash,
+ "Git-Tag": gitTag,
+ "StreamReactor-Docs": "https://docs.lenses.io/connectors/"
+ )
+ }
+ }
+
+ shadowJar {
+
+ manifest {
+ attributes("StreamReactor-Version": project.version,
+ "Kafka-Version": kafkaVersion,
+ "Created-By": "Lenses",
+ "Created-At": new Date().format("YYYYMMDDHHmm"),
+ "Git-Repo": gitRepo,
+ "Git-Commit-Hash": gitCommitHash,
+ "Git-Tag": gitTag,
+ "StreamReactor-Docs": "https://docs.lenses.io/connectors/"
+ )
+ }
+ configurations = [project.configurations.compileClasspath]
+ //archiveBaseName = "${project.name}-${project.version}-${kafkaVersion}-all"
+ zip64 true
+
+ mergeServiceFiles {
+ exclude "META-INF/*.SF"
+ exclude "META-INF/*.DSA"
+ exclude "META-INF/*.RSA"
+ }
+
+ //shadowing antlr packages in order to avoid conflict when using kafka connect
+ relocate('org.antlr', 'lshaded.antlr')
+
+ dependencies {
+// // UNCOMMENT BELOW IF NEED CLEAN JAR
+// exclude(dependency {
+// it.moduleGroup != 'io.lenses.streamreactor'
+// })
+// exclude(dependency('org.apache.logging.log4j:log4j-core:2.11.1'))
+// exclude(dependency("org.apache.avro:.*"))
+// exclude(dependency("org.apache.kafka:.*"))
+// exclude(dependency("io.confluent:.*"))
+// exclude(dependency("org.apache.kafka:.*"))
+// exclude(dependency("org.apache.zookeeper:.*"))
+// exclude(dependency("com.google.guava:guava:28.1-android"))
+ }
+
+ }
+ compileJava.dependsOn("checkLicenses")
+
+ task fatJar(dependsOn: [test, jar, shadowJar])
+
+ task collectFatJar(type: Copy, dependsOn: [fatJar]) {
+ from("${buildDir}/libs").include("kafka-connect-*-all.jar")
+ .exclude("*-common-*").into(libsDir)
+ }
+}
+
+task prepareRelease(dependsOn: [collectFatJar]) {
+ dependsOn subprojects.collectFatJar
+}
+
+task releaseModuleList() {
+ def nonReleaseModules = ["java-reactor", "kafka-connect-cloud-common",
+ "kafka-connect-common", "kafka-connect-query-language"]
+
+ def modulesFile = new File("gradle-modules.txt")
+ modulesFile.delete()
+ modulesFile.createNewFile()
+
+ def modulesBuilder = new StringBuilder("[")
+ allprojects.name.stream()
+ .filter {moduleName -> !nonReleaseModules.contains(moduleName)}
+ .forEach {moduleName -> modulesBuilder.append("\"" + moduleName + "\",") }
+ modulesBuilder.deleteCharAt(modulesBuilder.lastIndexOf(",")).append("]")
+ modulesFile.append(modulesBuilder)
+}
diff --git a/java-connectors/config/checkstyle/checkstyle.xml b/java-connectors/config/checkstyle/checkstyle.xml
new file mode 100644
index 000000000..c731fe024
--- /dev/null
+++ b/java-connectors/config/checkstyle/checkstyle.xml
@@ -0,0 +1,379 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/java-connectors/config/checkstyle/suppressions.xml b/java-connectors/config/checkstyle/suppressions.xml
new file mode 100644
index 000000000..3d20b94b9
--- /dev/null
+++ b/java-connectors/config/checkstyle/suppressions.xml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
diff --git a/java-connectors/gradle/wrapper/gradle-wrapper.jar b/java-connectors/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 000000000..249e5832f
Binary files /dev/null and b/java-connectors/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/java-connectors/gradle/wrapper/gradle-wrapper.properties b/java-connectors/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 000000000..a1610a6f4
--- /dev/null
+++ b/java-connectors/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Sat Feb 10 15:56:21 CET 2024
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-bin.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/java-connectors/gradlew b/java-connectors/gradlew
new file mode 100755
index 000000000..1b6c78733
--- /dev/null
+++ b/java-connectors/gradlew
@@ -0,0 +1,234 @@
+#!/bin/sh
+
+#
+# Copyright © 2015-2021 the original authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+##############################################################################
+#
+# Gradle start up script for POSIX generated by Gradle.
+#
+# Important for running:
+#
+# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
+# noncompliant, but you have some other compliant shell such as ksh or
+# bash, then to run this script, type that shell name before the whole
+# command line, like:
+#
+# ksh Gradle
+#
+# Busybox and similar reduced shells will NOT work, because this script
+# requires all of these POSIX shell features:
+# * functions;
+# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
+# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
+# * compound commands having a testable exit status, especially «case»;
+# * various built-in commands including «command», «set», and «ulimit».
+#
+# Important for patching:
+#
+# (2) This script targets any POSIX shell, so it avoids extensions provided
+# by Bash, Ksh, etc; in particular arrays are avoided.
+#
+# The "traditional" practice of packing multiple parameters into a
+# space-separated string is a well documented source of bugs and security
+# problems, so this is (mostly) avoided, by progressively accumulating
+# options in "$@", and eventually passing that to Java.
+#
+# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
+# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
+# see the in-line comments for details.
+#
+# There are tweaks for specific operating systems such as AIX, CygWin,
+# Darwin, MinGW, and NonStop.
+#
+# (3) This script is generated from the Groovy template
+# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
+# within the Gradle project.
+#
+# You can find Gradle at https://github.com/gradle/gradle/.
+#
+##############################################################################
+
+# Attempt to set APP_HOME
+
+# Resolve links: $0 may be a link
+app_path=$0
+
+# Need this for daisy-chained symlinks.
+while
+ APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
+ [ -h "$app_path" ]
+do
+ ls=$( ls -ld "$app_path" )
+ link=${ls#*' -> '}
+ case $link in #(
+ /*) app_path=$link ;; #(
+ *) app_path=$APP_HOME$link ;;
+ esac
+done
+
+APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
+
+APP_NAME="Gradle"
+APP_BASE_NAME=${0##*/}
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD=maximum
+
+warn () {
+ echo "$*"
+} >&2
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+} >&2
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "$( uname )" in #(
+ CYGWIN* ) cygwin=true ;; #(
+ Darwin* ) darwin=true ;; #(
+ MSYS* | MINGW* ) msys=true ;; #(
+ NONSTOP* ) nonstop=true ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD=$JAVA_HOME/jre/sh/java
+ else
+ JAVACMD=$JAVA_HOME/bin/java
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD=java
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
+ case $MAX_FD in #(
+ max*)
+ MAX_FD=$( ulimit -H -n ) ||
+ warn "Could not query maximum file descriptor limit"
+ esac
+ case $MAX_FD in #(
+ '' | soft) :;; #(
+ *)
+ ulimit -n "$MAX_FD" ||
+ warn "Could not set maximum file descriptor limit to $MAX_FD"
+ esac
+fi
+
+# Collect all arguments for the java command, stacking in reverse order:
+# * args from the command line
+# * the main class name
+# * -classpath
+# * -D...appname settings
+# * --module-path (only if needed)
+# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
+
+# For Cygwin or MSYS, switch paths to Windows format before running java
+if "$cygwin" || "$msys" ; then
+ APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
+ CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
+
+ JAVACMD=$( cygpath --unix "$JAVACMD" )
+
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ for arg do
+ if
+ case $arg in #(
+ -*) false ;; # don't mess with options #(
+ /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
+ [ -e "$t" ] ;; #(
+ *) false ;;
+ esac
+ then
+ arg=$( cygpath --path --ignore --mixed "$arg" )
+ fi
+ # Roll the args list around exactly as many times as the number of
+ # args, so each arg winds up back in the position where it started, but
+ # possibly modified.
+ #
+ # NB: a `for` loop captures its iteration list before it begins, so
+ # changing the positional parameters here affects neither the number of
+ # iterations, nor the values presented in `arg`.
+ shift # remove old arg
+ set -- "$@" "$arg" # push replacement arg
+ done
+fi
+
+# Collect all arguments for the java command;
+# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
+# shell script including quotes and variable substitutions, so put them in
+# double quotes to make sure that they get re-expanded; and
+# * put everything else in single quotes, so that it's not re-expanded.
+
+set -- \
+ "-Dorg.gradle.appname=$APP_BASE_NAME" \
+ -classpath "$CLASSPATH" \
+ org.gradle.wrapper.GradleWrapperMain \
+ "$@"
+
+# Use "xargs" to parse quoted args.
+#
+# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
+#
+# In Bash we could simply go:
+#
+# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
+# set -- "${ARGS[@]}" "$@"
+#
+# but POSIX shell has neither arrays nor command substitution, so instead we
+# post-process each arg (as a line of input to sed) to backslash-escape any
+# character that might be a shell metacharacter, then use eval to reverse
+# that process (while maintaining the separation between arguments), and wrap
+# the whole thing up as a single "set" statement.
+#
+# This will of course break if any of these variables contains a newline or
+# an unmatched quote.
+#
+
+eval "set -- $(
+ printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
+ xargs -n1 |
+ sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
+ tr '\n' ' '
+ )" '"$@"'
+
+exec "$JAVACMD" "$@"
diff --git a/java-connectors/gradlew.bat b/java-connectors/gradlew.bat
new file mode 100644
index 000000000..ac1b06f93
--- /dev/null
+++ b/java-connectors/gradlew.bat
@@ -0,0 +1,89 @@
+@rem
+@rem Copyright 2015 the original author or authors.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem https://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+@rem
+
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Resolve any "." and ".." in APP_HOME to make it shorter.
+for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/java-connectors/kafka-connect-azure-eventhubs/build.gradle b/java-connectors/kafka-connect-azure-eventhubs/build.gradle
new file mode 100644
index 000000000..60ea80b6c
--- /dev/null
+++ b/java-connectors/kafka-connect-azure-eventhubs/build.gradle
@@ -0,0 +1,19 @@
+project(':kafka-connect-azure-eventhubs') {
+
+
+ test {
+ maxParallelForks = 1
+ }
+
+ dependencies {
+ implementation project(':kafka-connect-common')
+ implementation project(':kafka-connect-query-language')
+
+// //azure-specific dependencies in case we want to change from kafka protocol
+// implementation group: 'com.azure', name: 'azure-identity', version: '1.11.2'
+// implementation group: 'com.azure', name: 'azure-messaging-eventhubs', version: '5.18.0'
+// implementation group: 'com.azure', name: 'azure-storage-blob', version: '12.25.1'
+// implementation group: 'com.azure', name: 'azure-messaging-eventhubs-checkpointstore-blob', version: '1.19.0'
+
+ }
+}
\ No newline at end of file
diff --git a/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/AzureEventHubsConfigConstants.java b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/AzureEventHubsConfigConstants.java
new file mode 100644
index 000000000..35fab9bb6
--- /dev/null
+++ b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/AzureEventHubsConfigConstants.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.azure.eventhubs.config;
+
+import io.lenses.streamreactor.connect.azure.eventhubs.source.AzureEventHubsSourceConnector;
+
+/**
+ * Class represents Config Constants for AzureEventHubsSourceConnector Config Definition.
+ */
+public class AzureEventHubsConfigConstants {
+
+
+ private static final String DOT = ".";
+ public static final String OPTIONAL_EMPTY_DEFAULT = "";
+ public static final String CONNECTOR_PREFIX = "connect.eventhubs";
+ public static final String SOURCE_CONNECTOR_PREFIX = CONNECTOR_PREFIX + DOT + "source";
+
+ public static final String CONNECTOR_NAME = "name";
+ public static final String CONNECTOR_NAME_DOC = "Connector's name";
+ public static final String CONNECTOR_NAME_DEFAULT = AzureEventHubsSourceConnector.class.getSimpleName();
+
+ public static final String CONNECTOR_WITH_CONSUMER_PREFIX =
+ SOURCE_CONNECTOR_PREFIX + DOT + "connection.settings" + DOT;
+ public static final String CONSUMER_OFFSET = SOURCE_CONNECTOR_PREFIX + DOT + "default.offset";
+ public static final String CONSUMER_OFFSET_DOC =
+ "Specifies whether by default we should consumer from earliest (default) or latest offset.";
+ public static final String CONSUMER_OFFSET_DEFAULT = "earliest";
+ public static final String CONSUMER_CLOSE_TIMEOUT = SOURCE_CONNECTOR_PREFIX + DOT + "close.timeout";
+ public static final String CONSUMER_CLOSE_TIMEOUT_DOC =
+ "Specifies timeout for consumer closing.";
+ public static final String CONSUMER_CLOSE_TIMEOUT_DEFAULT = "30";
+
+ public static final String KCQL_CONFIG = CONNECTOR_PREFIX + DOT + "kcql";
+ public static final String KCQL_DOC =
+ "KCQL expression describing field selection and data routing to the target.";
+
+}
diff --git a/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/AzureEventHubsSourceConfig.java b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/AzureEventHubsSourceConfig.java
new file mode 100644
index 000000000..fba2ccd28
--- /dev/null
+++ b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/AzureEventHubsSourceConfig.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.azure.eventhubs.config;
+
+import io.lenses.streamreactor.common.config.base.BaseConfig;
+import io.lenses.streamreactor.common.config.base.intf.ConnectorPrefixed;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.UnaryOperator;
+import lombok.Getter;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.common.config.ConfigDef;
+import org.apache.kafka.common.config.ConfigDef.Importance;
+import org.apache.kafka.common.config.ConfigDef.Type;
+
+/**
+ * Class represents Config Definition for AzureEventHubsSourceConnector. It additionally adds
+ * configs from org.apache.kafka.clients.consumer.ConsumerConfig but adds standard Connector
+ * prefixes to them.
+ */
+public class AzureEventHubsSourceConfig extends BaseConfig implements ConnectorPrefixed {
+
+ public static final String CONNECTION_GROUP = "Connection";
+
+ private static final UnaryOperator CONFIG_NAME_PREFIX_APPENDER = name ->
+ AzureEventHubsConfigConstants.CONNECTOR_WITH_CONSUMER_PREFIX + name;
+
+ private static final Set EXCLUDED_CONSUMER_PROPERTIES =
+ Set.of(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
+ ConsumerConfig.GROUP_ID_CONFIG, ConsumerConfig.CLIENT_ID_CONFIG, ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG);
+
+
+ @Getter
+ static ConfigDef configDefinition;
+
+ static {
+ ConfigDef kafkaConsumerConfigToExpose = getKafkaConsumerConfigToExpose();
+ configDefinition = new ConfigDef(kafkaConsumerConfigToExpose)
+ .define(AzureEventHubsConfigConstants.CONNECTOR_NAME,
+ Type.STRING,
+ AzureEventHubsConfigConstants.CONNECTOR_NAME_DEFAULT,
+ Importance.HIGH,
+ AzureEventHubsConfigConstants.CONNECTOR_NAME_DOC,
+ CONNECTION_GROUP,
+ 1,
+ ConfigDef.Width.LONG,
+ AzureEventHubsConfigConstants.CONNECTOR_NAME
+ ).define(AzureEventHubsConfigConstants.CONSUMER_CLOSE_TIMEOUT,
+ Type.INT,
+ AzureEventHubsConfigConstants.CONSUMER_CLOSE_TIMEOUT_DEFAULT,
+ Importance.MEDIUM,
+ AzureEventHubsConfigConstants.CONSUMER_CLOSE_TIMEOUT_DOC,
+ CONNECTION_GROUP,
+ 3,
+ ConfigDef.Width.LONG,
+ AzureEventHubsConfigConstants.CONSUMER_CLOSE_TIMEOUT
+ )
+ .define(AzureEventHubsConfigConstants.CONSUMER_OFFSET,
+ Type.STRING,
+ AzureEventHubsConfigConstants.CONSUMER_OFFSET_DEFAULT,
+ Importance.MEDIUM,
+ AzureEventHubsConfigConstants.CONSUMER_OFFSET_DOC,
+ CONNECTION_GROUP,
+ 4,
+ ConfigDef.Width.LONG,
+ AzureEventHubsConfigConstants.CONSUMER_OFFSET
+ ).define(AzureEventHubsConfigConstants.KCQL_CONFIG,
+ Type.STRING,
+ Importance.HIGH,
+ AzureEventHubsConfigConstants.KCQL_DOC,
+ "Mappings",
+ 1,
+ ConfigDef.Width.LONG,
+ AzureEventHubsConfigConstants.KCQL_CONFIG
+ );
+ }
+
+ public AzureEventHubsSourceConfig(Map, ?> properties) {
+ super(AzureEventHubsConfigConstants.CONNECTOR_PREFIX, getConfigDefinition(), properties);
+ }
+
+ /**
+ * Provides prefixed KafkaConsumerConfig key.
+ *
+ * @param kafkaConsumerConfigKey from org.apache.kafka.clients.consumer.ConsumerConfig
+ * @return prefixed key.
+ */
+ public static String getPrefixedKafkaConsumerConfigKey(String kafkaConsumerConfigKey) {
+ return CONFIG_NAME_PREFIX_APPENDER.apply(kafkaConsumerConfigKey);
+ }
+
+ @Override
+ public String connectorPrefix() {
+ return connectorPrefix;
+ }
+
+ private static ConfigDef getKafkaConsumerConfigToExpose() {
+ ConfigDef kafkaConsumerConfigToExpose = new ConfigDef();
+ ConsumerConfig.configDef().configKeys().values().stream()
+ .filter(configKey -> !EXCLUDED_CONSUMER_PROPERTIES.contains(configKey.name))
+ .forEach(configKey -> kafkaConsumerConfigToExpose.define(
+ CONFIG_NAME_PREFIX_APPENDER.apply(configKey.name),
+ configKey.type, configKey.defaultValue,
+ configKey.importance, configKey.documentation, configKey.group,
+ configKey.orderInGroup, configKey.width, configKey.displayName));
+
+ return kafkaConsumerConfigToExpose;
+ }
+}
diff --git a/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/SourceDataType.java b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/SourceDataType.java
new file mode 100644
index 000000000..92722e393
--- /dev/null
+++ b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/config/SourceDataType.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.azure.eventhubs.config;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import lombok.EqualsAndHashCode;
+import lombok.Getter;
+import org.apache.kafka.common.serialization.ByteArrayDeserializer;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.connect.data.Schema;
+
+/**
+ * Class to indicate what kind of data is being received from Kafka Consumer.
+ */
+@Getter
+public enum SourceDataType {
+
+ BYTES(ByteArrayDeserializer.class, Schema.OPTIONAL_BYTES_SCHEMA);
+
+ private final Class extends Deserializer> deserializerClass;
+ private final Schema schema;
+ private static final Map NAME_TO_DATA_SERIALIZER_TYPE;
+
+ static {
+ NAME_TO_DATA_SERIALIZER_TYPE =
+ Arrays.stream(values()).collect(Collectors.toMap(Enum::name, Function.identity()));
+ }
+
+ SourceDataType(Class extends Deserializer> deserializerClass, Schema schema) {
+ this.deserializerClass = deserializerClass;
+ this.schema = schema;
+ }
+
+ public static SourceDataType fromName(String name) {
+ return NAME_TO_DATA_SERIALIZER_TYPE.get(name.toUpperCase());
+ }
+
+ /**
+ * Class indicates what data types are being transferred by Task.
+ */
+ @Getter
+ @EqualsAndHashCode
+ public static class KeyValueTypes {
+ private final SourceDataType keyType;
+ private final SourceDataType valueType;
+ public static final KeyValueTypes DEFAULT_TYPES = new KeyValueTypes(BYTES, BYTES);
+
+ public KeyValueTypes(SourceDataType keyType, SourceDataType valueType) {
+ this.keyType = keyType;
+ this.valueType = valueType;
+ }
+ }
+}
diff --git a/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/mapping/SourceRecordMapper.java b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/mapping/SourceRecordMapper.java
new file mode 100644
index 000000000..0c5f0436c
--- /dev/null
+++ b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/mapping/SourceRecordMapper.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.azure.eventhubs.mapping;
+
+import java.util.Map;
+import org.apache.kafka.clients.consumer.ConsumerRecord;
+import org.apache.kafka.common.header.Header;
+import org.apache.kafka.connect.data.Schema;
+import org.apache.kafka.connect.data.SchemaAndValue;
+import org.apache.kafka.connect.header.ConnectHeaders;
+import org.apache.kafka.connect.source.SourceRecord;
+
+/**
+ * Class with utility method to convert to SourceRecord.
+ */
+public class SourceRecordMapper {
+
+ /**
+ * Method to make SourceRecord out of ConsumerRecord including optional byte headers from original
+ * message.
+ *
+ * @param consumerRecord original consumer record
+ * @param partitionKey AzureTopicPartitionKey to indicate topic and partition
+ * @param offsetMap AzureOffsetMarker to indicate offset
+ * @param outputTopic Output topic for record
+ * @param keySchema Schema of the key
+ * @param valueSchema Schema of the value
+ * @return SourceRecord with headers
+ */
+ public static SourceRecord mapSourceRecordIncludingHeaders(
+ ConsumerRecord, ?> consumerRecord,
+ Map partitionKey, Map offsetMap,
+ String outputTopic, Schema keySchema, Schema valueSchema) {
+ Iterable headers = consumerRecord.headers();
+ ConnectHeaders connectHeaders = new ConnectHeaders();
+ for (Header header : headers) {
+ connectHeaders.add(header.key(),
+ new SchemaAndValue(Schema.OPTIONAL_BYTES_SCHEMA, header.value()));
+ }
+ return new SourceRecord(partitionKey, offsetMap,
+ outputTopic, null, keySchema, consumerRecord.key(),
+ valueSchema, consumerRecord.value(), consumerRecord.timestamp(),
+ connectHeaders);
+ }
+
+ /**
+ * Method to make SourceRecord out of ConsumerRecord including optional byte headers
+ * from original message.
+ *
+ * @param consumerRecord original consumer record
+ * @param partitionKey partitionKey to indicate topic and partition
+ * @param offsetMap AzureOffsetMarker to indicate offset
+ * @param outputTopic Output topic for record
+ * @param keySchema Schema of the key
+ * @param valueSchema Schema of the value
+ * @return SourceRecord without headers
+ */
+ public static SourceRecord mapSourceRecordWithoutHeaders(
+ ConsumerRecord, ?> consumerRecord,
+ Map partitionKey, Map offsetMap,
+ String outputTopic, Schema keySchema, Schema valueSchema) {
+ return new SourceRecord(partitionKey, offsetMap,
+ outputTopic, null, keySchema, consumerRecord.key(),
+ valueSchema, consumerRecord.value(), consumerRecord.timestamp(), null);
+ }
+
+}
diff --git a/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/source/AzureConsumerRebalancerListener.java b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/source/AzureConsumerRebalancerListener.java
new file mode 100644
index 000000000..2b800e759
--- /dev/null
+++ b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/source/AzureConsumerRebalancerListener.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.azure.eventhubs.source;
+
+import io.lenses.streamreactor.connect.azure.eventhubs.source.TopicPartitionOffsetProvider.AzureOffsetMarker;
+import io.lenses.streamreactor.connect.azure.eventhubs.source.TopicPartitionOffsetProvider.AzureTopicPartitionKey;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Optional;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.consumer.Consumer;
+import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
+import org.apache.kafka.common.TopicPartition;
+
+/**
+ * This class is an implementation of {@link ConsumerRebalanceListener} that can be used to provide
+ * OnlyOnce support and seek consumers into relevant offsets if needed.
+ */
+@Slf4j
+public class AzureConsumerRebalancerListener implements ConsumerRebalanceListener {
+
+ private final boolean shouldSeekToLatest;
+ private final TopicPartitionOffsetProvider topicPartitionOffsetProvider;
+ private final Consumer, ?> kafkaConsumer;
+
+ /**
+ * Constructs {@link AzureConsumerRebalancerListener} for particular Kafka Consumer.
+ *
+ * @param topicPartitionOffsetProvider provider of committed offsets
+ * @param kafkaConsumer Kafka Consumer
+ * @param shouldSeekToLatest informs whether we should seek to latest or earliest if no offsets found
+ */
+ public AzureConsumerRebalancerListener(
+ TopicPartitionOffsetProvider topicPartitionOffsetProvider,
+ Consumer, ?> kafkaConsumer, boolean shouldSeekToLatest) {
+ this.topicPartitionOffsetProvider = topicPartitionOffsetProvider;
+ this.kafkaConsumer = kafkaConsumer;
+ this.shouldSeekToLatest = shouldSeekToLatest;
+ }
+
+ @Override
+ public void onPartitionsRevoked(Collection partitions) {
+ // implementation not needed, offsets already committed
+ }
+
+ @Override
+ public void onPartitionsAssigned(Collection partitions) {
+ List partitionsWithoutOffsets = new ArrayList<>();
+ partitions.forEach(partition -> {
+ AzureTopicPartitionKey partitionKey = new AzureTopicPartitionKey(
+ partition.topic(), partition.partition());
+ Optional partitionOffset = topicPartitionOffsetProvider.getOffset(partitionKey);
+ partitionOffset.ifPresentOrElse(
+ offset -> kafkaConsumer.seek(partition, offset.getOffsetValue()),
+ () -> partitionsWithoutOffsets.add(partition));
+ });
+ if (!partitionsWithoutOffsets.isEmpty()) {
+ if (shouldSeekToLatest) {
+ kafkaConsumer.seekToEnd(partitionsWithoutOffsets);
+ } else {
+ kafkaConsumer.seekToBeginning(partitionsWithoutOffsets);
+ }
+ }
+ }
+
+}
diff --git a/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/source/AzureEventHubsSourceConnector.java b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/source/AzureEventHubsSourceConnector.java
new file mode 100644
index 000000000..d0d019f65
--- /dev/null
+++ b/java-connectors/kafka-connect-azure-eventhubs/src/main/java/io/lenses/streamreactor/connect/azure/eventhubs/source/AzureEventHubsSourceConnector.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2017-2024 Lenses.io Ltd
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.lenses.streamreactor.connect.azure.eventhubs.source;
+
+import static io.lenses.streamreactor.common.util.AsciiArtPrinter.printAsciiHeader;
+
+import io.lenses.streamreactor.common.util.JarManifest;
+import io.lenses.streamreactor.connect.azure.eventhubs.config.AzureEventHubsConfigConstants;
+import io.lenses.streamreactor.connect.azure.eventhubs.config.AzureEventHubsSourceConfig;
+import io.lenses.streamreactor.connect.azure.eventhubs.util.KcqlConfigTopicMapper;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.IntStream;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.common.config.ConfigDef;
+import org.apache.kafka.connect.connector.Task;
+import org.apache.kafka.connect.source.ExactlyOnceSupport;
+import org.apache.kafka.connect.source.SourceConnector;
+
+/**
+ * Implementation of {@link SourceConnector} for Microsoft Azure EventHubs.
+ */
+@Slf4j
+public class AzureEventHubsSourceConnector extends SourceConnector {
+
+ private final JarManifest jarManifest =
+ new JarManifest(getClass().getProtectionDomain().getCodeSource().getLocation());
+ private Map configProperties;
+
+ @Override
+ public void start(Map props) {
+ configProperties = props;
+ parseAndValidateConfigs(props);
+ printAsciiHeader(jarManifest, "/azure-eventhubs-ascii.txt");
+ }
+
+ @Override
+ public Class extends Task> taskClass() {
+ return AzureEventHubsSourceTask.class;
+ }
+
+ @Override
+ public List