diff --git a/.gitignore b/.gitignore
index 530c0530..11577ec1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,7 +5,7 @@ build/
out/
.gradle/
docker/users
-docker/source-fitbit.properties
-docker/source-oura.properties
+docker/legacy/source-fitbit.properties
+docker/legacy/source-oura.properties
bin/
-.DS_Store
\ No newline at end of file
+.DS_Store
diff --git a/README.md b/README.md
index eeafadba..f830d147 100644
--- a/README.md
+++ b/README.md
@@ -26,7 +26,11 @@ of Java 17 or later.
Generally, this component is installed
with [RADAR-Kubernetes](https://github.com/RADAR-base/RADAR-Kubernetes). It uses Docker
-image [radarbase/kafka-connect-rest-fitbit-source](https://hub.docker.com/r/radarbase/kafka-connect-rest-fitbit-source).
+image [radarbase/kafka-connect-rest-fitbit-source](https://hub.docker.com/r/radarbase/kafka-connect-rest-fitbit-source),
+which is built from the `kafka-connect-fitbit-source/Dockerfile`. The image is based on the Strimzi Kafka Connect image.
+
+The Fitbit source connector can be also run with docker compose and the Confluent Kafka Connect image, using the `kafka-connect-fitbit-source/Dockerfile-legacy`.
+
First, [register a Fitbit App](https://dev.fitbit.com/apps) with Fitbit. It should be either a
server app, for multiple users, or a personal app for a single user. With the server app, you need
@@ -39,7 +43,7 @@ For every Fitbit user you want access to, copy `docker/fitbit-user.yml.template`
For automatic configuration for multiple users, please take a look at
`scripts/REDCAP-FITBIT-AUTH-AUTO/README.md`.
-Copy `docker/source-fitbit.properties.template` to `docker/source-fitbit.properties` and enter
+Copy `docker/legacy/source-fitbit.properties.template` to `docker/legacy/source-fitbit.properties` and enter
your Fitbit App client ID and client secret. The following tables shows the possible properties.
diff --git a/docker-compose.yml b/docker-compose.yml
index 3dd12369..769fdb47 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -145,11 +145,11 @@ services:
radar-fitbit-connector:
build:
context: .
- dockerfile: ./kafka-connect-fitbit-source/Dockerfile
+ dockerfile: ./kafka-connect-fitbit-source/Dockerfile-legacy
image: radarbase/radar-connect-fitbit-source
restart: on-failure
volumes:
- - ./docker/source-fitbit.properties:/etc/kafka-connect/source-fitbit.properties
+ - ./docker/legacy/source-fitbit.properties:/etc/kafka-connect/source-fitbit.properties
- ./docker/users:/var/lib/kafka-connect-fitbit-source/users
- fitbit-logs:/var/lib/kafka-connect-fitbit-source/logs
depends_on:
@@ -191,11 +191,11 @@ services:
radar-oura-connector:
build:
context: .
- dockerfile: ./kafka-connect-oura-source/Dockerfile
+ dockerfile: ./kafka-connect-oura-source/Dockerfile-legacy
image: radarbase/radar-connect-oura-source
restart: on-failure
volumes:
- - ./docker/source-oura.properties:/etc/kafka-connect/source-oura.properties
+ - ./docker/legacy/source-oura.properties:/etc/kafka-connect/source-oura.properties
- ./docker/users:/var/lib/kafka-connect-oura-source/users
depends_on:
- zookeeper-1
diff --git a/docker/ensure b/docker/ensure
index 13e94dad..18fed8fd 100755
--- a/docker/ensure
+++ b/docker/ensure
@@ -1,88 +1,35 @@
#!/bin/bash
-if [ "$WAIT_FOR_KAFKA" != "1" ]; then
- echo "Starting without checking for Kafka availability"
- exit 0
+# Get the schema registry URL from the config.
+ss_url=$(grep -E '^key.converter.schema.registry.url=' /tmp/strimzi-connect.properties | cut -d'=' -f2)
+
+# If the schema registry URL is not set, exit...
+if [ -z "$ss_url" ]; then
+ echo "Schema registry URL is not set in strimzi-connect.properties."
+ echo "We will not check for schema registry availability."
+ exit 0
fi
-max_timeout=32
-
-IS_TEMP=0
-
-echo "===> Wait for infrastructure ..."
-
-if [ -z "$COMMAND_CONFIG_FILE_PATH" ]; then
- COMMAND_CONFIG_FILE_PATH="$(mktemp)"
- IS_TEMP=1
-fi
-
-if [ ! -f "$COMMAND_CONFIG_FILE_PATH" ] || [ $IS_TEMP = 1 ]; then
- while IFS='=' read -r -d '' n v; do
- if [[ "$n" == "CONNECT_"* ]]; then
- name="${n/CONNECT_/""}" # remove first "CONNECT_"
- name="${name,,}" # lower case
- name="${name//_/"."}" # replace all '_' with '.'
- echo "$name=$v" >> ${COMMAND_CONFIG_FILE_PATH}
- fi
- done < <(env -0)
-fi
-
-# Check if variables exist
-if [ -z "$CONNECT_BOOTSTRAP_SERVERS" ]; then
- echo "CONNECT_BOOTSTRAP_SERVERS is not defined"
-else
- KAFKA_BROKERS=${KAFKA_BROKERS:-3}
-
- tries=10
- timeout=1
- while true; do
- KAFKA_CHECK=$(kafka-broker-api-versions --bootstrap-server "$CONNECT_BOOTSTRAP_SERVERS" --command-config "${COMMAND_CONFIG_FILE_PATH}" | grep "(id: " | wc -l)
+echo "===> Checking if Schema Registry is available ..."
- if [ "$KAFKA_CHECK" -ge "$KAFKA_BROKERS" ]; then
- echo "Kafka brokers available."
- break
- fi
-
- tries=$((tries - 1))
- if [ ${tries} -eq 0 ]; then
- echo "FAILED: KAFKA BROKERs NOT READY."
- exit 5
- fi
- echo "Expected $KAFKA_BROKERS brokers but found only $KAFKA_CHECK. Waiting $timeout second before retrying ..."
- sleep ${timeout}
- if [ ${timeout} -lt ${max_timeout} ]; then
- timeout=$((timeout * 2))
- fi
- done
-
- echo "Kafka is available."
-fi
-
-if [ -z "$CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL" ]; then
- echo "CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL is not defined"
-else
- tries=10
- timeout=1
- while true; do
- if wget --spider -q "${CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL}/subjects" 2>/dev/null; then
- echo "Schema registry available."
- break
- fi
- tries=$((tries - 1))
- if [ $tries -eq 0 ]; then
- echo "FAILED TO REACH SCHEMA REGISTRY."
- exit 6
- fi
- echo "Failed to reach schema registry. Retrying in ${timeout} seconds."
- sleep ${timeout}
- if [ ${timeout} -lt ${max_timeout} ]; then
- timeout=$((timeout * 2))
- fi
- done
-
- echo "Schema registry is available."
-fi
-
-if [ $IS_TEMP = 1 ]; then
- /bin/rm -f "$COMMAND_CONFIG_FILE_PATH"
-fi
+max_timeout=32
+tries=10
+timeout=1
+while true; do
+ if curl --head --silent --fail "${ss_url}/subjects" > /dev/null; then
+ echo "Schema registry available."
+ break
+ fi
+ tries=$((tries - 1))
+ if [ $tries -eq 0 ]; then
+ echo "FAILED TO REACH SCHEMA REGISTRY."
+ exit 6
+ fi
+ echo "Failed to reach schema registry. Retrying in ${timeout} seconds."
+ sleep ${timeout}
+ if [ ${timeout} -lt ${max_timeout} ]; then
+ timeout=$((timeout * 2))
+ fi
+done
+
+echo "Schema registry is available."
diff --git a/docker/kafka_connect_run.sh b/docker/kafka_connect_run.sh
new file mode 100644
index 00000000..25cc0b2b
--- /dev/null
+++ b/docker/kafka_connect_run.sh
@@ -0,0 +1,78 @@
+#!/usr/bin/env bash
+
+# Source script: https://github.com/strimzi/strimzi-kafka-operator/blob/main/docker-images/kafka-based/kafka/scripts/kafka_connect_run.sh
+
+set -e
+set +x
+
+# Prepare hostname - for StrimziPodSets we use the Pod DNS name assigned through the headless service
+ADVERTISED_HOSTNAME=$(hostname -f | cut -d "." -f1-4)
+export ADVERTISED_HOSTNAME
+
+# Create dir where keystores and truststores will be stored
+mkdir -p /tmp/kafka
+
+# Generate and print the config file
+echo "Starting Kafka Connect with configuration:"
+tee /tmp/strimzi-connect.properties < "/opt/kafka/custom-config/kafka-connect.properties" | sed -e 's/sasl.jaas.config=.*/sasl.jaas.config=[hidden]/g'
+echo ""
+
+# Disable Kafka's GC logging (which logs to a file)...
+export GC_LOG_ENABLED="false"
+
+if [ -z "$KAFKA_LOG4J_OPTS" ]; then
+ if [[ "${KAFKA_VERSION:0:1}" == "3" ]]
+ then
+ export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$KAFKA_HOME/custom-config/log4j.properties"
+ else
+ export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$KAFKA_HOME/custom-config/log4j2.properties"
+ fi
+fi
+
+# We don't need LOG_DIR because we write no log files, but setting it to a
+# directory avoids trying to create it (and logging a permission denied error)
+export LOG_DIR="$KAFKA_HOME"
+
+# Enable Prometheus JMX Exporter as Java agent
+if [ "$KAFKA_CONNECT_METRICS_ENABLED" = "true" ]; then
+ KAFKA_OPTS="${KAFKA_OPTS} -javaagent:$(ls "$JMX_EXPORTER_HOME"/jmx_prometheus_javaagent*.jar)=9404:$KAFKA_HOME/custom-config/metrics-config.json"
+ export KAFKA_OPTS
+fi
+
+. ./set_kafka_jmx_options.sh "${STRIMZI_JMX_ENABLED}" "${STRIMZI_JMX_USERNAME}" "${STRIMZI_JMX_PASSWORD}"
+
+# Enable Tracing agent (initializes tracing) as Java agent
+if [ "$STRIMZI_TRACING" = "jaeger" ] || [ "$STRIMZI_TRACING" = "opentelemetry" ]; then
+ KAFKA_OPTS="$KAFKA_OPTS -javaagent:$(ls "$KAFKA_HOME"/libs/tracing-agent*.jar)=$STRIMZI_TRACING"
+ export KAFKA_OPTS
+ if [ "$STRIMZI_TRACING" = "opentelemetry" ] && [ -z "$OTEL_TRACES_EXPORTER" ]; then
+ # auto-set OTLP exporter
+ export OTEL_TRACES_EXPORTER="otlp"
+ fi
+fi
+
+if [ -n "$STRIMZI_JAVA_SYSTEM_PROPERTIES" ]; then
+ export KAFKA_OPTS="${KAFKA_OPTS} ${STRIMZI_JAVA_SYSTEM_PROPERTIES}"
+fi
+
+# Disable FIPS if needed
+if [ "$FIPS_MODE" = "disabled" ]; then
+ export KAFKA_OPTS="${KAFKA_OPTS} -Dcom.redhat.fips=false"
+fi
+
+# Configure heap based on the available resources if needed
+. ./dynamic_resources.sh
+
+# Configure Garbage Collection logging
+. ./set_kafka_gc_options.sh
+
+set -x
+
+### BEGIN CUSTOM RADAR KAFKA CONNECT SCRIPT ###
+# Call the ensure script to verify infrastructure, Kafka cluster, schema registry, and other components
+echo "===> Running preflight checks ... "
+"${KAFKA_HOME}/ensure"
+### END CUSTOM RADAR KAFKA CONNECT SCRIPT ###
+
+# starting Kafka server with final configuration
+exec /usr/bin/tini -w -e 143 -- "${KAFKA_HOME}/bin/connect-distributed.sh" /tmp/strimzi-connect.properties
diff --git a/docker/legacy/README.md b/docker/legacy/README.md
new file mode 100644
index 00000000..7daa3e12
--- /dev/null
+++ b/docker/legacy/README.md
@@ -0,0 +1,7 @@
+# Confluentinc image based docker setup (legacy)
+
+Files in this directory are used by Dockerfile-legacy to build a legacy docker images (Confluentinc-based) of the connectors,
+as opposed to the new Strimzi based images.
+
+The legacy setup can be to run the Kafka stack (Kafka, Zookeeper, Schema Registry and Kafka Connectors)
+with docker compose (see [docker-compose.yml](../../docker-compose.yml)).
diff --git a/docker/legacy/ensure b/docker/legacy/ensure
new file mode 100644
index 00000000..13e94dad
--- /dev/null
+++ b/docker/legacy/ensure
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+if [ "$WAIT_FOR_KAFKA" != "1" ]; then
+ echo "Starting without checking for Kafka availability"
+ exit 0
+fi
+
+max_timeout=32
+
+IS_TEMP=0
+
+echo "===> Wait for infrastructure ..."
+
+if [ -z "$COMMAND_CONFIG_FILE_PATH" ]; then
+ COMMAND_CONFIG_FILE_PATH="$(mktemp)"
+ IS_TEMP=1
+fi
+
+if [ ! -f "$COMMAND_CONFIG_FILE_PATH" ] || [ $IS_TEMP = 1 ]; then
+ while IFS='=' read -r -d '' n v; do
+ if [[ "$n" == "CONNECT_"* ]]; then
+ name="${n/CONNECT_/""}" # remove first "CONNECT_"
+ name="${name,,}" # lower case
+ name="${name//_/"."}" # replace all '_' with '.'
+ echo "$name=$v" >> ${COMMAND_CONFIG_FILE_PATH}
+ fi
+ done < <(env -0)
+fi
+
+# Check if variables exist
+if [ -z "$CONNECT_BOOTSTRAP_SERVERS" ]; then
+ echo "CONNECT_BOOTSTRAP_SERVERS is not defined"
+else
+ KAFKA_BROKERS=${KAFKA_BROKERS:-3}
+
+ tries=10
+ timeout=1
+ while true; do
+ KAFKA_CHECK=$(kafka-broker-api-versions --bootstrap-server "$CONNECT_BOOTSTRAP_SERVERS" --command-config "${COMMAND_CONFIG_FILE_PATH}" | grep "(id: " | wc -l)
+
+ if [ "$KAFKA_CHECK" -ge "$KAFKA_BROKERS" ]; then
+ echo "Kafka brokers available."
+ break
+ fi
+
+ tries=$((tries - 1))
+ if [ ${tries} -eq 0 ]; then
+ echo "FAILED: KAFKA BROKERs NOT READY."
+ exit 5
+ fi
+ echo "Expected $KAFKA_BROKERS brokers but found only $KAFKA_CHECK. Waiting $timeout second before retrying ..."
+ sleep ${timeout}
+ if [ ${timeout} -lt ${max_timeout} ]; then
+ timeout=$((timeout * 2))
+ fi
+ done
+
+ echo "Kafka is available."
+fi
+
+if [ -z "$CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL" ]; then
+ echo "CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL is not defined"
+else
+ tries=10
+ timeout=1
+ while true; do
+ if wget --spider -q "${CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL}/subjects" 2>/dev/null; then
+ echo "Schema registry available."
+ break
+ fi
+ tries=$((tries - 1))
+ if [ $tries -eq 0 ]; then
+ echo "FAILED TO REACH SCHEMA REGISTRY."
+ exit 6
+ fi
+ echo "Failed to reach schema registry. Retrying in ${timeout} seconds."
+ sleep ${timeout}
+ if [ ${timeout} -lt ${max_timeout} ]; then
+ timeout=$((timeout * 2))
+ fi
+ done
+
+ echo "Schema registry is available."
+fi
+
+if [ $IS_TEMP = 1 ]; then
+ /bin/rm -f "$COMMAND_CONFIG_FILE_PATH"
+fi
diff --git a/docker/launch b/docker/legacy/launch
old mode 100755
new mode 100644
similarity index 100%
rename from docker/launch
rename to docker/legacy/launch
diff --git a/docker/log4j.properties.template b/docker/legacy/log4j.properties.template
similarity index 100%
rename from docker/log4j.properties.template
rename to docker/legacy/log4j.properties.template
diff --git a/docker/source-fitbit.properties.template b/docker/legacy/source-fitbit.properties.template
similarity index 100%
rename from docker/source-fitbit.properties.template
rename to docker/legacy/source-fitbit.properties.template
diff --git a/docker/source-oura.properties.template b/docker/legacy/source-oura.properties.template
similarity index 100%
rename from docker/source-oura.properties.template
rename to docker/legacy/source-oura.properties.template
diff --git a/kafka-connect-fitbit-source/Dockerfile b/kafka-connect-fitbit-source/Dockerfile
index 33873f6f..ac35b434 100644
--- a/kafka-connect-fitbit-source/Dockerfile
+++ b/kafka-connect-fitbit-source/Dockerfile
@@ -32,7 +32,7 @@ COPY ./kafka-connect-fitbit-source/src/ /code/kafka-connect-fitbit-source/src
RUN gradle jar
-FROM confluentinc/cp-kafka-connect-base:7.8.1
+FROM quay.io/strimzi/kafka:0.46.0-kafka-3.9.0
USER appuser
@@ -40,8 +40,7 @@ LABEL org.opencontainers.image.authors="pim@thehyve.nl"
LABEL description="Kafka REST API Source connector"
-ENV CONNECT_PLUGIN_PATH="/usr/share/java/kafka-connect/plugins" \
- WAIT_FOR_KAFKA="1"
+ENV CONNECT_PLUGIN_PATH=/opt/kafka/plugins
# To isolate the classpath from the plugin path as recommended
COPY --from=builder /code/kafka-connect-rest-source/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-rest-source/
@@ -51,14 +50,11 @@ COPY --from=builder /code/kafka-connect-rest-source/build/libs/*.jar ${CONNECT_P
COPY --from=builder /code/kafka-connect-rest-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
COPY --from=builder /code/kafka-connect-fitbit-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
-# Load topics validator
-COPY --chown=appuser:appuser ./docker/ensure /etc/confluent/docker/ensure
-
-# Load modified launcher
-COPY --chown=appuser:appuser ./docker/launch /etc/confluent/docker/launch
-
-# Overwrite the log4j configuration to include Sentry monitoring.
-COPY ./docker/log4j.properties.template /etc/confluent/docker/log4j.properties.template
# Copy Sentry monitoring jars.
-COPY --from=builder /code/kafka-connect-fitbit-source/build/third-party/sentry-* /etc/kafka-connect/jars
+COPY --from=builder /code/kafka-connect-fitbit-source/build/third-party/sentry-* /opt/kafka/libs/
+
+USER 1001
+COPY --chown=1001:1001 ./docker/ensure /opt/kafka/ensure
+COPY --chown=1001:1001 ./docker/kafka_connect_run.sh /opt/kafka/kafka_connect_run.sh
+RUN chmod +x /opt/kafka/ensure /opt/kafka/kafka_connect_run.sh
diff --git a/kafka-connect-fitbit-source/Dockerfile-legacy b/kafka-connect-fitbit-source/Dockerfile-legacy
new file mode 100644
index 00000000..b7df9f8f
--- /dev/null
+++ b/kafka-connect-fitbit-source/Dockerfile-legacy
@@ -0,0 +1,65 @@
+# Copyright 2018 The Hyve
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM --platform=$BUILDPLATFORM gradle:8.9-jdk17 AS builder
+
+RUN mkdir /code
+WORKDIR /code
+
+ENV GRADLE_USER_HOME=/code/.gradlecache \
+ GRADLE_OPTS="-Dorg.gradle.vfs.watch=false -Djdk.lang.Process.launchMechanism=vfork"
+
+COPY buildSrc /code/buildSrc
+COPY ./build.gradle.kts ./settings.gradle.kts ./gradle.properties /code/
+COPY kafka-connect-rest-source/build.gradle.kts /code/kafka-connect-rest-source/
+COPY kafka-connect-fitbit-source/build.gradle.kts /code/kafka-connect-fitbit-source/
+
+RUN gradle downloadDependencies copyDependencies
+
+COPY ./kafka-connect-rest-source/src/ /code/kafka-connect-rest-source/src
+COPY ./kafka-connect-fitbit-source/src/ /code/kafka-connect-fitbit-source/src
+
+RUN gradle jar
+
+FROM confluentinc/cp-kafka-connect-base:7.8.1
+
+USER appuser
+
+LABEL org.opencontainers.image.authors="pim@thehyve.nl"
+
+LABEL description="Kafka REST API Source connector"
+
+ENV CONNECT_PLUGIN_PATH="/usr/share/java/kafka-connect/plugins" \
+ WAIT_FOR_KAFKA="1"
+
+# To isolate the classpath from the plugin path as recommended
+COPY --from=builder /code/kafka-connect-rest-source/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-rest-source/
+COPY --from=builder /code/kafka-connect-fitbit-source/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
+
+COPY --from=builder /code/kafka-connect-rest-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-rest-source/
+COPY --from=builder /code/kafka-connect-rest-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
+COPY --from=builder /code/kafka-connect-fitbit-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
+
+# Load topics validator
+COPY --chown=appuser:appuser ./docker/legacy/ensure /etc/confluent/docker/ensure
+
+# Load modified launcher
+COPY --chown=appuser:appuser ./docker/legacy/launch /etc/confluent/docker/launch
+
+# Overwrite the log4j configuration to include Sentry monitoring.
+COPY ./docker/legacy/log4j.properties.template /etc/confluent/docker/log4j.properties.template
+# Copy Sentry monitoring jars.
+COPY --from=builder /code/kafka-connect-fitbit-source/build/third-party/sentry-* /etc/kafka-connect/jars
+
+RUN chmod +x /etc/confluent/docker/ensure
diff --git a/kafka-connect-oura-source/Dockerfile b/kafka-connect-oura-source/Dockerfile
index 4c4ba125..a88e8c79 100644
--- a/kafka-connect-oura-source/Dockerfile
+++ b/kafka-connect-oura-source/Dockerfile
@@ -32,7 +32,7 @@ COPY ./oura-library/src/ /code/oura-library/src
RUN gradle jar
-FROM confluentinc/cp-kafka-connect-base:7.8.1
+FROM quay.io/strimzi/kafka:0.46.0-kafka-3.9.0
USER appuser
@@ -40,8 +40,7 @@ LABEL org.opencontainers.image.authors="pauline.conde@kcl.ac.uk"
LABEL description="Kafka Oura REST API Source connector"
-ENV CONNECT_PLUGIN_PATH="/usr/share/java/kafka-connect/plugins" \
- WAIT_FOR_KAFKA="1"
+ENV CONNECT_PLUGIN_PATH=/opt/kafka/plugins
# To isolate the classpath from the plugin path as recommended
COPY --from=builder /code/kafka-connect-oura-source/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-oura-source/
@@ -50,14 +49,11 @@ COPY --from=builder /code/oura-library/build/third-party/*.jar ${CONNECT_PLUGIN_
COPY --from=builder /code/kafka-connect-oura-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-oura-source/
COPY --from=builder /code/oura-library/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-oura-source/
-# Load topics validator
-COPY --chown=appuser:appuser ./docker/ensure /etc/confluent/docker/ensure
-
-# Load modified launcher
-COPY --chown=appuser:appuser ./docker/launch /etc/confluent/docker/launch
-
-# Overwrite the log4j configuration to include Sentry monitoring.
-COPY ./docker/log4j.properties.template /etc/confluent/docker/log4j.properties.template
# Copy Sentry monitoring jars.
-COPY --from=builder /code/kafka-connect-oura-source/build/third-party/sentry-* /etc/kafka-connect/jars
+COPY --from=builder /code/kafka-connect-oura-source/build/third-party/sentry-* /opt/kafka/libs/
+
+USER 1001
+COPY --chown=1001:1001 ./docker/ensure /opt/kafka/ensure
+COPY --chown=1001:1001 ./docker/kafka_connect_run.sh /opt/kafka/kafka_connect_run.sh
+RUN chmod +x /opt/kafka/ensure /opt/kafka/kafka_connect_run.sh
diff --git a/kafka-connect-oura-source/Dockerfile-legacy b/kafka-connect-oura-source/Dockerfile-legacy
new file mode 100644
index 00000000..84b147af
--- /dev/null
+++ b/kafka-connect-oura-source/Dockerfile-legacy
@@ -0,0 +1,61 @@
+# Copyright 2018 The Hyve
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM --platform=$BUILDPLATFORM gradle:8.9-jdk17 AS builder
+
+RUN mkdir /code
+WORKDIR /code
+
+ENV GRADLE_USER_HOME=/code/.gradlecache \
+ GRADLE_OPTS="-Dorg.gradle.vfs.watch=false -Djdk.lang.Process.launchMechanism=vfork"
+
+COPY buildSrc /code/buildSrc
+COPY ./build.gradle.kts ./settings.gradle.kts ./gradle.properties /code/
+COPY kafka-connect-oura-source/build.gradle.kts /code/kafka-connect-oura-source/
+COPY oura-library/build.gradle /code/oura-library/
+
+RUN gradle downloadDependencies copyDependencies
+
+COPY ./kafka-connect-oura-source/src/ /code/kafka-connect-oura-source/src
+COPY ./oura-library/src/ /code/oura-library/src
+
+RUN gradle jar
+
+FROM confluentinc/cp-kafka-connect-base:7.8.1
+
+USER appuser
+
+LABEL org.opencontainers.image.authors="pauline.conde@kcl.ac.uk"
+
+LABEL description="Kafka Oura REST API Source connector"
+
+ENV CONNECT_PLUGIN_PATH="/usr/share/java/kafka-connect/plugins" \
+ WAIT_FOR_KAFKA="1"
+
+# To isolate the classpath from the plugin path as recommended
+COPY --from=builder /code/kafka-connect-oura-source/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-oura-source/
+COPY --from=builder /code/oura-library/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-oura-source/
+
+COPY --from=builder /code/kafka-connect-oura-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-oura-source/
+COPY --from=builder /code/oura-library/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-oura-source/
+
+# Load topics validator
+COPY --chown=appuser:appuser ./docker/legacy/ensure /etc/confluent/docker/ensure
+
+# Load modified launcher
+COPY --chown=appuser:appuser ./docker/legacy/launch /etc/confluent/docker/launch
+
+# Copy Sentry monitoring jars. Sentry monitoring needs to be included in helm chart log4j ConfigMap.
+COPY --from=builder /code/kafka-connect-oura-source/build/third-party/sentry-* /etc/kafka-connect/jars
+