diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..29071a8 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,9 @@ +version: 2 +updates: + - package-ecosystem: "gradle" + directory: "/" + schedule: + interval: "daily" + allow: + - dependency-name: "org.apache.kafka:*" + - dependency-name: "com.solacesystems:*" diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index f06d7b1..e314943 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -7,20 +7,69 @@ on: push: jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + build: + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository_owner == 'SolaceProducts') runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Setup JDK 1.8 - uses: actions/setup-java@v1 with: - java-version: 1.8 + submodules: recursive + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-build-test-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-build-test- + + - name: Setup JDK 8 + uses: actions/setup-java@v2 + with: + distribution: zulu + java-version: 8 + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + - name: Install Test Support + working-directory: solace-integration-test-support + run: ./mvnw clean install -DskipTests + - name: Build and test with Gradle - run: ./gradlew clean integrationTest --tests com.solace.connector.kafka.connect.source.it.SourceConnectorIT + run: ./gradlew clean test integrationTest jacocoFullReport --info + + - name: Upload Test Artifacts + if: always() + uses: actions/upload-artifact@v2 + with: + name: Test Results + path: | + **/build/jacoco/*.exec + **/build/reports/ + **/build/test-results/**/*.xml + - name: Publish artifacts - if: github.event_name == 'push' + # Security Measure: Do not publish artifacts from dependabot builds + if: github.event_name == 'push' && (github.actor != 'dependabot[bot]' || !contains(github.ref, 'dependabot')) run: | if [ ${{ github.ref }} == 'refs/heads/master' ] && [ ${{ github.repository_owner }} == 'SolaceProducts' ] ; then echo "Using master on SolaceProducts" @@ -56,4 +105,30 @@ jobs: git remote add origin-pages https://${{ secrets.GH_TOKEN }}@github.com/${{ github.repository }}.git > /dev/null 2>&1; git push --quiet --set-upstream origin-pages gh-pages; echo "Updated and pushed GH pages!"; - fi + fi + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties + + - name: Publish Unit Test Results + if: github.actor != 'dependabot[bot]' || (github.event_name == 'push' && !contains(github.ref, 'dependabot')) + uses: EnricoMi/publish-unit-test-result-action@v1 + continue-on-error: true + with: + check_name: Unit Test Results + comment_mode: create new + fail_on: nothing + hide_comments: orphaned commits + files: | + **/build/test-results/**/*.xml + + - name: Publish Test Coverage Results + if: github.event_name == 'pull_request' && github.actor != 'dependabot[bot]' && github.event.pull_request.head.repo.full_name == github.repository + uses: madrapps/jacoco-report@v1.2 + with: + paths: build/reports/jacoco/jacocoFullReport/jacocoFullReport.xml + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000..dc2420a --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,96 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "Code Analysis (CodeQL)" + +on: + push: + pull_request: + schedule: + - cron: '38 15 * * 0' + workflow_dispatch: + +jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + analyze: + name: Analyze + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-codeql-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-codeql- + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: java + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏ī¸ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties diff --git a/.github/workflows/pmd-analysis.yml b/.github/workflows/pmd-analysis.yml new file mode 100644 index 0000000..84ecd6e --- /dev/null +++ b/.github/workflows/pmd-analysis.yml @@ -0,0 +1,71 @@ +name: Code Analysis (PMD) + +on: + pull_request: + push: + workflow_dispatch: + +jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + run-analysis: + name: Run PMD Static Code Analysis + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-pmd-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-pmd- + + - name: Setup JDK 8 + uses: actions/setup-java@v2 + with: + distribution: zulu + java-version: 8 + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + - name: Run static code analysis + run: ./gradlew clean pmdMainSarif --info + + - name: Upload Test Artifacts + if: always() + uses: actions/upload-artifact@v2 + with: + name: Static Code Analysis Results (PMD) + path: | + **/build/reports/ + + - name: Upload SARIF file + if: success() || failure() + uses: github/codeql-action/upload-sarif@v1 + with: + sarif_file: build/reports/pmd/main.sarif + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties \ No newline at end of file diff --git a/.github/workflows/spotbugs-analysis.yml b/.github/workflows/spotbugs-analysis.yml new file mode 100644 index 0000000..f1cd934 --- /dev/null +++ b/.github/workflows/spotbugs-analysis.yml @@ -0,0 +1,71 @@ +name: Code Analysis (SpotBugs) + +on: + pull_request: + push: + workflow_dispatch: + +jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + run-analysis: + name: Run SpotBugs Static Code Analysis + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-spotbugs-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-spotbugs- + + - name: Setup JDK 8 + uses: actions/setup-java@v2 + with: + distribution: zulu + java-version: 8 + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + - name: Run static code analysis + run: ./gradlew clean spotbugsMain --info + + - name: Upload Test Artifacts + if: always() + uses: actions/upload-artifact@v2 + with: + name: Static Code Analysis Results (SpotBugs) + path: | + **/build/reports/ + + - name: Upload SARIF file + if: success() || failure() + uses: github/codeql-action/upload-sarif@v1 + with: + sarif_file: build/reports/spotbugs/main.sarif + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties \ No newline at end of file diff --git a/.gitignore b/.gitignore index 6705f1c..5014111 100644 --- a/.gitignore +++ b/.gitignore @@ -36,5 +36,11 @@ local.properties *.launch /build/ +### IntelliJ +.idea +*.iws +*.iml +*.ipr + # Unzipped test connector src/integrationTest/resources/pubsubplus-connector-kafka*/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..846409f --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "solace-integration-test-support"] + path = solace-integration-test-support + url = ../../SolaceDev/solace-integration-test-support.git diff --git a/README.md b/README.md index 64ac1c4..52ca58a 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ [![Actions Status](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/workflows/build/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions?query=workflow%3Abuild+branch%3Amaster) +[![Code Analysis (CodeQL)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/codeql-analysis.yml/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/codeql-analysis.yml) +[![Code Analysis (PMD)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/pmd-analysis.yml/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/pmd-analysis.yml) +[![Code Analysis (SpotBugs)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/spotbugs-analysis.yml/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/spotbugs-analysis.yml) # Solace PubSub+ Connector for Kafka: Source @@ -153,7 +156,7 @@ In this case the IP address is one of the nodes running the distributed mode wor { "class": "com.solace.connector.kafka.connect.source.SolaceSourceConnector", "type": "source", - "version": "2.0.0" + "version": "2.1.0" }, ``` @@ -312,27 +315,38 @@ Kerberos has some very specific requirements to operate correctly. Some addition ## Developers Guide -### Build and Test the Project +### Build the Project JDK 8 or higher is required for this project. First, clone this GitHub repo: -``` +```shell git clone https://github.com/SolaceProducts/pubsubplus-connector-kafka-source.git cd pubsubplus-connector-kafka-source ``` Then run the build script: -``` +```shell gradlew clean build ``` This script creates artifacts in the `build` directory, including the deployable packaged PubSub+ Source Connector archives under `build\distributions`. +### Test the Project + An integration test suite is also included, which spins up a Docker-based deployment environment that includes a PubSub+ event broker, Zookeeper, Kafka broker, Kafka Connect. It deploys the connector to Kafka Connect and runs end-to-end tests. -``` -gradlew clean integrationTest --tests com.solace.connector.kafka.connect.source.it.SourceConnectorIT -``` + +1. Install the test support module: + ```shell + git submodule update --init --recursive + cd solace-integration-test-support + ./mvnw clean install -DskipTests + cd .. + ``` +2. Run the tests: + ```shell + ./gradlew clean test integrationTest + ``` ### Build a New Message Processor diff --git a/build.gradle b/build.gradle index 3623633..e827971 100644 --- a/build.gradle +++ b/build.gradle @@ -1,10 +1,17 @@ -apply plugin: 'java' -apply plugin: 'distribution' -apply plugin: 'org.unbroken-dome.test-sets' +import com.github.spotbugs.snom.SpotBugsTask + +plugins { + id 'java' + id 'distribution' + id 'jacoco' + id 'pmd' + id 'com.github.spotbugs' version '4.7.6' + id 'org.unbroken-dome.test-sets' version '2.2.1' +} ext { - kafkaVersion = '2.4.1' - solaceJavaAPIVersion = '10.6.0' + kafkaVersion = '2.8.1' + solaceJavaAPIVersion = '10.12.0' } repositories { @@ -12,16 +19,12 @@ repositories { mavenCentral() } -buildscript { - repositories { - maven { - url "https://plugins.gradle.org/m2/" +sourceSets { + main { + java { + srcDir "${buildDir}/generated/java" // add generated sources as additional source directory } } - dependencies { - classpath "com.github.spotbugs:spotbugs-gradle-plugin:3.0.0" - classpath "org.unbroken-dome.test-sets:org.unbroken-dome.test-sets.gradle.plugin:2.2.1" - } } testSets { @@ -29,37 +32,62 @@ testSets { } dependencies { - integrationTestImplementation 'junit:junit:4.12' - integrationTestImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.1' - integrationTestImplementation 'org.junit.jupiter:junit-jupiter-engine:5.7.1' - integrationTestImplementation 'org.junit.jupiter:junit-jupiter-params:5.7.1' - integrationTestImplementation 'org.junit.platform:junit-platform-engine:1.7.1' - integrationTestImplementation 'org.mockito:mockito-core:3.7.7' - integrationTestImplementation 'org.mockito:mockito-junit-jupiter:3.7.7' - integrationTestImplementation 'org.testcontainers:testcontainers:1.15.1' - integrationTestImplementation 'org.testcontainers:junit-jupiter:1.15.1' - integrationTestImplementation 'org.slf4j:slf4j-api:1.7.28' - integrationTestImplementation 'org.slf4j:slf4j-simple:1.7.28' + integrationTestImplementation 'org.junit.jupiter:junit-jupiter:5.8.1' + integrationTestImplementation 'org.junit-pioneer:junit-pioneer:1.4.2' + integrationTestImplementation 'org.mockito:mockito-junit-jupiter:3.12.4' + integrationTestImplementation 'org.testcontainers:testcontainers:1.16.0' + integrationTestImplementation 'org.testcontainers:junit-jupiter:1.16.0' + integrationTestImplementation 'org.testcontainers:kafka:1.16.0' + integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.5.0' + integrationTestImplementation 'org.slf4j:slf4j-api:1.7.32' + integrationTestImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' integrationTestImplementation 'org.apache.commons:commons-configuration2:2.6' integrationTestImplementation 'commons-beanutils:commons-beanutils:1.9.4' integrationTestImplementation 'com.google.code.gson:gson:2.3.1' integrationTestImplementation 'commons-io:commons-io:2.4' integrationTestImplementation 'com.squareup.okhttp3:okhttp:4.9.1' - integrationTestImplementation 'org.apache.kafka:kafka-clients:$kafkaVersion' + integrationTestImplementation "org.apache.kafka:kafka-clients:$kafkaVersion" + testImplementation 'org.junit.jupiter:junit-jupiter:5.8.1' + testImplementation 'org.hamcrest:hamcrest-all:1.3' + testImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' compile "org.apache.kafka:connect-api:$kafkaVersion" compile "com.solacesystems:sol-jcsmp:$solaceJavaAPIVersion" } +pmd { + consoleOutput = true + rulesMinimumPriority = 2 + toolVersion = '6.38.0' +} + +spotbugs { + effort 'max' + reportLevel 'high' // Decrease to medium once medium errors are fixed +} + +task('jacocoFullReport', type: JacocoReport) { + description 'Generates code coverage report for all tests.' + executionData tasks.withType(Test) + sourceSets sourceSets.main + reports { + xml.required = true + } +} + task('prepDistForIntegrationTesting') { dependsOn assembleDist doLast { - copy { - from zipTree(file('build/distributions').listFiles().findAll {it.name.endsWith('.zip')}[0]) - into (file('src/integrationTest/resources')) + copy { + from zipTree(file(distsDirectory).listFiles().findAll { + it.name.endsWith("-${project.version}.zip") + }[0]) + into sourceSets.integrationTest.resources.srcDirs[0] } copy { - from zipTree(file('build/distributions').listFiles().findAll {it.name.endsWith('.zip')}[0]) - into (file('build/resources/integrationTest')) + from zipTree(file(distsDirectory).listFiles().findAll { + it.name.endsWith("-${project.version}.zip") + }[0]) + into sourceSets.integrationTest.output.resourcesDir } } } @@ -78,6 +106,81 @@ project.integrationTest { } } +project.test { + useJUnitPlatform() +} + +tasks.withType(SpotBugsTask) { + reports { + sarif { + enabled = true + } + } +} + +// Workaround to generate Sarif report +// Based off https://github.com/gradle/gradle/blob/v6.9.1/subprojects/code-quality/src/main/groovy/org/gradle/api/plugins/quality/internal/PmdInvoker.groovy +task('pmdMainSarif') { + PmdExtension extension = project.extensions.getByType(PmdExtension) + dependsOn classes + outputs.dir extension.getReportsDir() + doLast { + ant.taskdef(name: 'pmd', + classname: 'net.sourceforge.pmd.ant.PMDTask', + classpath: project.configurations.pmd.asPath) + ant.pmd(failOnRuleViolation: false, + failuresPropertyName: "pmdFailureCount", + minimumPriority: extension.rulesMinimumPriority.get()) { + sourceSets.main.allJava.srcDirs.each { + fileset(dir: it) + } + + extension.ruleSets.each { + ruleset(it) + } + + extension.ruleSetFiles.each { + ruleset(it) + } + + if (extension.ruleSetConfig != null) { + ruleset(extension.ruleSetConfig.asFile()) + } + + Provider reportsDir = project.getLayout() + .file(project.getProviders().provider({a -> extension.getReportsDir()}) as Provider) + formatter(type: 'sarif', toFile: new File(reportsDir.get().getAsFile(), 'main.sarif')) + formatter(type: 'html', toFile: new File(reportsDir.get().getAsFile(), 'main.html')) + + if (extension.consoleOutput) { + formatter(type: 'textcolor', toConsole: true) + } + } + + def failureCount = ant.project.properties["pmdFailureCount"] + if (failureCount) { + def message = "$failureCount PMD rule violations were found." + if (extension.ignoreFailures || ((failureCount as Integer) <= extension.maxFailures.get())) { + logger.warn(message) + } else { + throw new GradleException(message) + } + } + } +} + +task('generateJava', type: Copy) { + def templateContext = [version: project.version] + inputs.properties templateContext // Register context as input so that task doesn't skip when props are updated + from 'src/template/java' + into "${buildDir}/generated/java" + expand templateContext +} + +project.compileJava { + dependsOn generateJava +} + distributions { main { contents { diff --git a/etc/solace_source.properties b/etc/solace_source.properties index 73b7d48..0290118 100644 --- a/etc/solace_source.properties +++ b/etc/solace_source.properties @@ -32,6 +32,9 @@ sol.topics=sourcetest # Refer to https://github.com/SolaceProducts/pubsubplus-connector-kafka-source sol.message_processor_class=com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor +# If enabled, messages that throw message processor errors will be discarded. +#sol.message_processor.error.ignore=false + # When using SolaceSampleKeyedMessageProcessor, defines which part of a # PubSub+ message shall be converted to a Kafka record key # Allowable values include: NONE, DESTINATION, CORRELATION_ID, CORRELATION_ID_AS_BYTES diff --git a/gradle.properties b/gradle.properties index fb7cb53..16cc23c 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1 @@ -version=2.0.2 \ No newline at end of file +version=2.1.0 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index cc4fdc2..e708b1c 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 1b16c34..3ab0b72 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.9.1-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index 2fe81a7..4f906e0 100755 --- a/gradlew +++ b/gradlew @@ -82,6 +82,7 @@ esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then @@ -129,6 +130,7 @@ fi if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath diff --git a/gradlew.bat b/gradlew.bat index 24467a1..ac1b06f 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -29,6 +29,9 @@ if "%DIRNAME%" == "" set DIRNAME=. set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @@ -37,7 +40,7 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -51,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -61,28 +64,14 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell diff --git a/solace-integration-test-support b/solace-integration-test-support new file mode 160000 index 0000000..c411ac2 --- /dev/null +++ b/solace-integration-test-support @@ -0,0 +1 @@ +Subproject commit c411ac2e0f82af25ece2994691352cb0d6235142 diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java deleted file mode 100644 index e2a1294..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java +++ /dev/null @@ -1,63 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.BindMode; -import org.testcontainers.containers.FixedHostPortGenericContainer; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.junit.jupiter.Container; - -public class DockerizedPlatformSetupApache implements MessagingServiceFullLocalSetupApache { - - @Container - public final static GenericContainer KAFKA_CONNECT_REST = new FixedHostPortGenericContainer<>("bitnami/kafka:2") - .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", dockerIpAddress + ":2181") - .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") - .withCommand("/bin/sh", "-c", //"sleep 10000") - "sed -i 's/bootstrap.servers=.*/bootstrap.servers=" + dockerIpAddress - + ":39092/g' /opt/bitnami/kafka/config/connect-distributed.properties; " - + "echo 'plugin.path=/opt/bitnami/kafka/jars' >> /opt/bitnami/kafka/config/connect-distributed.properties; " - + "echo 'rest.port=28083' >> /opt/bitnami/kafka/config/connect-distributed.properties; " - + "/opt/bitnami/kafka/bin/connect-distributed.sh /opt/bitnami/kafka/config/connect-distributed.properties") - .withFixedExposedPort(28083,28083) - .withExposedPorts(28083) -//// -// // Enable remote debug session at default port 5005 -// .withEnv("KAFKA_DEBUG", "y") -// .withEnv("DEBUG_SUSPEND_FLAG", "y") -//// - .withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", - "/opt/bitnami/kafka/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY) -// .withStartupTimeout(Duration.ofSeconds(120)) - .waitingFor( Wait.forLogMessage(".*Finished starting connectors and tasks.*", 1) ) - ; - - @BeforeAll - static void setUp() { - assert(KAFKA_CONNECT_REST != null); // Required to instantiate - } - - @DisplayName("Local MessagingService connection tests") - @Nested - class MessagingServiceConnectionTests { - @DisplayName("Setup the dockerized platform") - @Test - @Disabled - void setupDockerizedPlatformTest() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - try { - Thread.sleep(36000000l); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java deleted file mode 100644 index 10a2703..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java +++ /dev/null @@ -1,75 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.BindMode; -import org.testcontainers.containers.FixedHostPortGenericContainer; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.junit.jupiter.Container; - -public class DockerizedPlatformSetupConfluent implements MessagingServiceFullLocalSetupConfluent { - - @Container - public final static GenericContainer connector = new FixedHostPortGenericContainer<>( - "confluentinc/cp-kafka-connect-base:5.4.0") - .withEnv("CONNECT_BOOTSTRAP_SERVERS", COMPOSE_CONTAINER_KAFKA.getServiceHost("kafka_1", 39092) + ":39092") - .withFixedExposedPort(28083, 28083).withFixedExposedPort(5005, 5005).withExposedPorts(28083, 5005) - .withEnv("CONNECT_REST_PORT", "28083") -// -// // Enable remote debug session at default port 5005 -// .withEnv("KAFKA_DEBUG", "y") -// .withEnv("DEBUG_SUSPEND_FLAG", "y") -// - .withEnv("CONNECT_GROUP_ID", "testconnect-avro") - .withEnv("CONNECT_CONFIG_STORAGE_TOPIC", "testconnect-avro-config") - .withEnv("CONNECT_OFFSET_STORAGE_TOPIC", "testconnect-avro-offsets") - .withEnv("CONNECT_STATUS_STORAGE_TOPIC", "testconnect-avro-status") - .withEnv("CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR", "1") - .withEnv("CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR", "1") - .withEnv("CONNECT_STATUS_STORAGE_REPLICATION_FACTOR", "1") -// .withEnv("CONNECT_OFFSET_FLUSH_INTERVAL_MS", "100") - .withEnv("CONNECT_KEY_CONVERTER", "io.confluent.connect.avro.AvroConverter") - .withEnv("CONNECT_VALUE_CONVERTER", "io.confluent.connect.avro.AvroConverter") - .withEnv("CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL", - "http://" + COMPOSE_CONTAINER_KAFKA.getServiceHost("schema-registry_1", 8081) + ":8081") - .withEnv("CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL", - "http://" + COMPOSE_CONTAINER_KAFKA.getServiceHost("schema-registry_1", 8081) + ":8081") - .withEnv("CONNECT_INTERNAL_KEY_CONVERTER", "org.apache.kafka.connect.json.JsonConverter") - .withEnv("CONNECT_INTERNAL_VALUE_CONVERTER", "org.apache.kafka.connect.json.JsonConverter") -// - .withEnv("CONNECT_REST_ADVERTISED_HOST_NAME", "localhost").withEnv("CONNECT_LOG4J_ROOT_LOGLEVEL", "INFO") - .withEnv("CONNECT_PLUGIN_PATH", "/usr/share/java,/etc/kafka-connect/jars") - .withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", - "/etc/kafka-connect/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY) -// .waitingFor( Wait.forHealthcheck() ); - .waitingFor(Wait.forLogMessage(".*Kafka Connect started.*", 1)); - - @BeforeAll - static void setUp() { - assert(connector != null); - } - - @DisplayName("Local MessagingService connection tests") - @Nested - class MessagingServiceConnectionTests { - @DisplayName("Setup the dockerized platform") - @Test -// @Disabled - void setupDockerizedPlatformTest() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - try { - Thread.sleep(36000000l); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java deleted file mode 100644 index 6e300cd..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java +++ /dev/null @@ -1,46 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import java.io.File; -import org.junit.jupiter.api.BeforeAll; -import org.testcontainers.containers.DockerComposeContainer; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; -import org.testcontainers.containers.wait.strategy.Wait; - -@Testcontainers -public interface MessagingServiceFullLocalSetupApache extends TestConstants { - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_PUBSUBPLUS = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-solace.yml")) - .withEnv("PUBSUB_NETWORK_NAME", PUBSUB_NETWORK_NAME) - .withEnv("PUBSUB_HOSTNAME", PUBSUB_HOSTNAME) - .withEnv("PUBSUB_TAG", PUBSUB_TAG) - .withServices(SERVICES) - .withLocalCompose(true) - .withPull(false) - .waitingFor("solbroker_1", - Wait.forLogMessage(".*System startup complete.*", 1) ); - - public static final String dockerReportedAddress = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - public static final String dockerIpAddress = (dockerReportedAddress == "localhost" || dockerReportedAddress == "127.0.0.1" ? - Tools.getIpAddress() : dockerReportedAddress); - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_KAFKA = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-kafka-apache.yml")) - .withEnv("KAFKA_TOPIC", KAFKA_SOURCE_TOPIC) - .withEnv("KAFKA_HOST", dockerIpAddress) - .withLocalCompose(true); - - @BeforeAll - static void checkContainer() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - } -} - diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java deleted file mode 100644 index 727deb4..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import java.io.File; -import org.junit.jupiter.api.BeforeAll; -import org.testcontainers.containers.DockerComposeContainer; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; -import org.testcontainers.containers.wait.strategy.Wait; - -@Testcontainers -public interface MessagingServiceFullLocalSetupConfluent extends TestConstants { - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_PUBSUBPLUS = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-solace.yml")) - .withEnv("PUBSUB_NETWORK_NAME", PUBSUB_NETWORK_NAME) - .withEnv("PUBSUB_HOSTNAME", PUBSUB_HOSTNAME) - .withEnv("PUBSUB_TAG", PUBSUB_TAG) - .withServices(SERVICES) - .withLocalCompose(true) - .withPull(false) - .waitingFor("solbroker_1", - Wait.forLogMessage(".*System startup complete.*", 1) ); - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_KAFKA = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-kafka-confluent.yml")) - .withEnv("KAFKA_TOPIC", KAFKA_SOURCE_TOPIC) - .withEnv("KAFKA_HOST", COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080)) - .withLocalCompose(true) - .waitingFor("schema-registry_1", - Wait.forHttp("/subjects").forStatusCode(200)); - - @BeforeAll - static void checkContainer() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - } -} - diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java deleted file mode 100644 index a0c64a9..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java +++ /dev/null @@ -1,74 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import com.solace.connector.kafka.connect.source.SolaceSourceTask; -import com.solacesystems.jcsmp.JCSMPChannelProperties; -import com.solacesystems.jcsmp.JCSMPProperties; -import com.solacesystems.jcsmp.JCSMPSession; - -public class ParameterTesting implements TestConstants { - - - @DisplayName("Default Parameter test") - @Test - void CheckDefaultParams() { - SolaceSourceTask testSourceTask = new SolaceSourceTask(); - - Map props = new HashMap(); - /* ("sol.host", "tcp://" + MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_PUBSUBPLUS - .getServiceHost("solbroker_1", 55555) + ":55555"); - jobject.addProperty("sol.username", SOL_ADMINUSER_NAME); - jobject.addProperty("sol.password", SOL_ADMINUSER_PW); - jobject.addProperty("sol.vpn_name", SOL_VPN); */ - props.put("sol.host", "tcp://" + MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_PUBSUBPLUS - .getServiceHost("solbroker_1", 55555) + ":55555"); - props.put("sol.username", SOL_ADMINUSER_NAME); - props.put("sol.password", SOL_ADMINUSER_PW); - props.put("sol.vpn_name", SOL_VPN); - - testSourceTask.start(props); - JCSMPSession solSession = testSourceTask.getSolSession(); - assert(!solSession.isClosed()); - JCSMPChannelProperties chanProperties = - (JCSMPChannelProperties) solSession.getProperty(JCSMPProperties.CLIENT_CHANNEL_PROPERTIES); - boolean GENERATE_SEND_TIMESTAMPS = (boolean) solSession.getProperty(JCSMPProperties.GENERATE_SEND_TIMESTAMPS); - solSession.getProperty(JCSMPProperties.GENERATE_RCV_TIMESTAMPS); - solSession.getProperty(JCSMPProperties.GENERATE_SEQUENCE_NUMBERS); - solSession.getProperty(JCSMPProperties.CALCULATE_MESSAGE_EXPIRATION); - solSession.getProperty(JCSMPProperties.PUB_MULTI_THREAD); - solSession.getProperty(JCSMPProperties.MESSAGE_CALLBACK_ON_REACTOR); - solSession.getProperty(JCSMPProperties.IGNORE_DUPLICATE_SUBSCRIPTION_ERROR); - solSession.getProperty(JCSMPProperties.IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR); - solSession.getProperty(JCSMPProperties.NO_LOCAL); - solSession.getProperty(JCSMPProperties.SUB_ACK_WINDOW_SIZE); - solSession.getProperty(JCSMPProperties.SUBSCRIBER_LOCAL_PRIORITY); - solSession.getProperty(JCSMPProperties.SUBSCRIBER_NETWORK_PRIORITY); - solSession.getProperty(JCSMPProperties.REAPPLY_SUBSCRIPTIONS); - solSession.getProperty(JCSMPProperties.AUTHENTICATION_SCHEME); - solSession.getProperty(JCSMPProperties.KRB_SERVICE_NAME); - solSession.getProperty(JCSMPProperties.SSL_CONNECTION_DOWNGRADE_TO); - solSession.getProperty(JCSMPProperties.SSL_CIPHER_SUITES); - solSession.getProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE); - solSession.getProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE_DATE); - solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE); - solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE_PASSWORD); - solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE_FORMAT); - solSession.getProperty(JCSMPProperties.SSL_TRUSTED_COMMON_NAME_LIST); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_PASSWORD); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_FORMAT); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_NORMALIZED_FORMAT); - solSession.getProperty(JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD); - - - - - - testSourceTask.stop(); - } - -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java index 1e936cc..1c5871c 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java @@ -1,44 +1,56 @@ package com.solace.connector.kafka.connect.source.it; -import java.io.File; -import java.io.IOException; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; - -import org.apache.commons.io.FileUtils; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.NewTopic; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; - +import com.solace.connector.kafka.connect.source.SolaceSourceConnector; +import com.solace.connector.kafka.connect.source.VersionUtil; +import com.solace.connector.kafka.connect.source.it.util.KafkaConnection; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; +import okhttp3.ResponseBody; +import org.apache.commons.io.FileUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.time.Duration; +import java.util.Iterator; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.atomic.AtomicReference; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; +import static org.junit.jupiter.api.Assertions.assertTrue; public class SolaceConnectorDeployment implements TestConstants { - static Logger logger = LoggerFactory.getLogger(SolaceConnectorDeployment.class.getName()); + static Logger logger = LoggerFactory.getLogger(SolaceConnectorDeployment.class); - static String kafkaTestTopic = KAFKA_SOURCE_TOPIC + "-" + Instant.now().getEpochSecond(); - OkHttpClient client = new OkHttpClient(); - String connectorAddress = new TestConfigProperties().getProperty("kafka.connect_rest_url"); + private final OkHttpClient client = new OkHttpClient(); + private final KafkaConnection kafkaConnection; + private final String kafkaTopic; + + public SolaceConnectorDeployment(KafkaConnection kafkaConnection, String kafkaTopic) { + this.kafkaConnection = kafkaConnection; + this.kafkaTopic = kafkaTopic; + } public void waitForConnectorRestIFUp() { - Request request = new Request.Builder().url("http://" + connectorAddress + "/connector-plugins").build(); + Request request = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connector-plugins").build(); Response response = null; do { try { - Thread.sleep(1000l); + Thread.sleep(1000L); response = client.newCall(request).execute(); } catch (IOException | InterruptedException e) { // Continue looping @@ -46,25 +58,12 @@ public void waitForConnectorRestIFUp() { } while (response == null || !response.isSuccessful()); } - public void provisionKafkaTestTopic() { - // Create a new kafka test topic to use - String bootstrapServers = new TestConfigProperties().getProperty("kafka.bootstrap_servers"); - Properties properties = new Properties(); - properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - AdminClient adminClient = AdminClient.create(properties); - NewTopic newTopic = new NewTopic(kafkaTestTopic, 5, (short) 1); // new NewTopic(topicName, numPartitions, - // replicationFactor) - List newTopics = new ArrayList(); - newTopics.add(newTopic); - adminClient.createTopics(newTopics); - adminClient.close(); - } - - void startConnector() { - startConnector(null); // Defaults only, no override + void startConnector(Properties props) { + startConnector(props, false); } - void startConnector(Properties props) { + void startConnector(Properties props, boolean expectStartFail) { + Gson gson = new GsonBuilder().setPrettyPrinting().create(); String configJson = null; // Prep config files try { @@ -76,11 +75,7 @@ void startConnector(Properties props) { JsonElement jconfig = jtree.getAsJsonObject().get("config"); JsonObject jobject = jconfig.getAsJsonObject(); // Set properties defaults - jobject.addProperty("sol.host", "tcp://" + new TestConfigProperties().getProperty("sol.host") + ":55555"); - jobject.addProperty("sol.username", SOL_ADMINUSER_NAME); - jobject.addProperty("sol.password", SOL_ADMINUSER_PW); - jobject.addProperty("sol.vpn_name", SOL_VPN); - jobject.addProperty("kafka.topic", kafkaTestTopic); + jobject.addProperty("kafka.topic", kafkaTopic); jobject.addProperty("sol.topics", SOL_TOPICS); jobject.addProperty("sol.queue", SOL_QUEUE); jobject.addProperty("sol.message_processor_class", CONN_MSGPROC_CLASS); @@ -89,12 +84,7 @@ void startConnector(Properties props) { jobject.addProperty("key.converter", "org.apache.kafka.connect.storage.StringConverter"); jobject.addProperty("tasks.max", "1"); // Override properties if provided - if (props != null) { - props.forEach((key, value) -> { - jobject.addProperty((String) key, (String) value); - }); - } - Gson gson = new Gson(); + props.forEach((key, value) -> jobject.addProperty((String) key, (String) value)); configJson = gson.toJson(jtree); } catch (IOException e) { e.printStackTrace(); @@ -104,38 +94,52 @@ void startConnector(Properties props) { try { // check presence of Solace plugin: curl // http://18.218.82.209:8083/connector-plugins | jq - Request request = new Request.Builder().url("http://" + connectorAddress + "/connector-plugins").build(); - Response response = client.newCall(request).execute(); - assert (response.isSuccessful()); - String results = response.body().string(); - logger.info("Available connector plugins: " + results); - assert (results.contains("solace")); + Request request = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connector-plugins").build(); + try (Response response = client.newCall(request).execute()) { + assertTrue(response.isSuccessful()); + JsonArray results = responseBodyToJson(response.body()).getAsJsonArray(); + logger.info("Available connector plugins: " + gson.toJson(results)); + boolean hasConnector = false; + for (Iterator resultsIter = results.iterator(); !hasConnector && resultsIter.hasNext();) { + JsonObject connectorPlugin = resultsIter.next().getAsJsonObject(); + if (connectorPlugin.get("class").getAsString().equals(SolaceSourceConnector.class.getName())) { + hasConnector = true; + assertEquals("source", connectorPlugin.get("type").getAsString()); + assertEquals(VersionUtil.getVersion(), connectorPlugin.get("version").getAsString()); + } + } + assertTrue(hasConnector, String.format("Could not find connector %s : %s", + SolaceSourceConnector.class.getName(), gson.toJson(results))); + } // Delete a running connector, if any - Request deleterequest = new Request.Builder().url("http://" + connectorAddress + "/connectors/solaceSourceConnector") - .delete().build(); - Response deleteresponse = client.newCall(deleterequest).execute(); - logger.info("Delete response: " + deleteresponse); + deleteConnector(); // configure plugin: curl -X POST -H "Content-Type: application/json" -d // @solace_source_properties.json http://18.218.82.209:8083/connectors - Request configrequest = new Request.Builder().url("http://" + connectorAddress + "/connectors") + Request configrequest = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connectors") .post(RequestBody.create(configJson, MediaType.parse("application/json"))).build(); - Response configresponse = client.newCall(configrequest).execute(); - // if (!configresponse.isSuccessful()) throw new IOException("Unexpected code " - // + configresponse); - String configresults = configresponse.body().string(); - logger.info("Connector config results: " + configresults); + try (ResponseBody configresponse = client.newCall(configrequest).execute().body()) { + assertNotNull(configresponse); + String configresults = configresponse.string(); + logger.info("Connector config results: " + configresults); + } // check success - Request statusrequest = new Request.Builder() - .url("http://" + connectorAddress + "/connectors/solaceSourceConnector/status").build(); - Response statusresponse; - long starttime = System.currentTimeMillis(); - do { - statusresponse = client.newCall(statusrequest).execute(); - assert (System.currentTimeMillis() - starttime < 10000l); // don't wait forever - } while (!statusresponse.body().string().contains("state\":\"RUNNING")); + AtomicReference statusResponse = new AtomicReference<>(new JsonObject()); + assertTimeoutPreemptively(Duration.ofSeconds(10), () -> { + JsonObject connectorStatus; + do { + connectorStatus = getConnectorStatus(); + statusResponse.set(connectorStatus); + } while (!(expectStartFail ? "FAILED" : "RUNNING").equals(Optional.ofNullable(connectorStatus) + .map(a -> a.getAsJsonArray("tasks")) + .map(a -> a.size() > 0 ? a.get(0) : null) + .map(JsonElement::getAsJsonObject) + .map(a -> a.get("state")) + .map(JsonElement::getAsString) + .orElse(""))); + }, () -> "Timed out while waiting for connector to start: " + gson.toJson(statusResponse.get())); Thread.sleep(10000); // Give some extra time to start logger.info("Connector is now RUNNING"); } catch (IOException e) { @@ -145,4 +149,35 @@ void startConnector(Properties props) { e.printStackTrace(); } } + + public void deleteConnector() throws IOException { + Request request = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connectors/solaceSourceConnector") + .delete().build(); + try (Response response = client.newCall(request).execute()) { + logger.info("Delete response: " + response); + } + } + + public JsonObject getConnectorStatus() { + Request request = new Request.Builder() + .url(kafkaConnection.getConnectUrl() + "/connectors/solaceSourceConnector/status").build(); + return assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (true) { + try (Response response = client.newCall(request).execute()) { + if (!response.isSuccessful()) { + continue; + } + + return responseBodyToJson(response.body()).getAsJsonObject(); + } + } + }); + } + + private JsonElement responseBodyToJson(ResponseBody responseBody) { + return Optional.ofNullable(responseBody) + .map(ResponseBody::charStream) + .map(s -> new JsonParser().parse(s)) + .orElseGet(JsonObject::new); + } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceSourceTaskIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceSourceTaskIT.java new file mode 100644 index 0000000..edf45c8 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceSourceTaskIT.java @@ -0,0 +1,188 @@ +package com.solace.connector.kafka.connect.source.it; + +import com.solace.connector.kafka.connect.source.SolMessageProcessorIF; +import com.solace.connector.kafka.connect.source.SolaceSourceConstants; +import com.solace.connector.kafka.connect.source.SolaceSourceTask; +import com.solace.connector.kafka.connect.source.it.util.extensions.NetworkPubSubPlusExtension; +import com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor; +import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension; +import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension.ExecSvc; +import com.solace.test.integration.junit.jupiter.extension.LogCaptorExtension; +import com.solace.test.integration.junit.jupiter.extension.LogCaptorExtension.LogCaptor; +import com.solace.test.integration.semp.v2.SempV2Api; +import com.solacesystems.jcsmp.BytesXMLMessage; +import com.solacesystems.jcsmp.JCSMPErrorResponseException; +import com.solacesystems.jcsmp.JCSMPErrorResponseSubcodeEx; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPFactory; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.JCSMPStreamingPublishCorrelatingEventHandler; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.TextMessage; +import com.solacesystems.jcsmp.XMLMessageProducer; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.kafka.connect.errors.ConnectException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.IOException; +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.instanceOf; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; + +@ExtendWith(ExecutorServiceExtension.class) +@ExtendWith(LogCaptorExtension.class) +@ExtendWith(NetworkPubSubPlusExtension.class) +public class SolaceSourceTaskIT { + private SolaceSourceTask solaceSourceTask; + private Map connectorProperties; + + private static final Logger logger = LoggerFactory.getLogger(SolaceSourceTaskIT.class); + + @BeforeEach + void setUp(JCSMPProperties jcsmpProperties) { + solaceSourceTask = new SolaceSourceTask(); + + connectorProperties = new HashMap<>(); + connectorProperties.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, TestConstants.CONN_MSGPROC_CLASS); + connectorProperties.put(SolaceSourceConstants.SOL_HOST, jcsmpProperties.getStringProperty(JCSMPProperties.HOST)); + connectorProperties.put(SolaceSourceConstants.SOL_VPN_NAME, jcsmpProperties.getStringProperty(JCSMPProperties.VPN_NAME)); + connectorProperties.put(SolaceSourceConstants.SOL_USERNAME, jcsmpProperties.getStringProperty(JCSMPProperties.USERNAME)); + connectorProperties.put(SolaceSourceConstants.SOL_PASSWORD, jcsmpProperties.getStringProperty(JCSMPProperties.PASSWORD)); + } + + @AfterEach + void tearDown() { + solaceSourceTask.stop(); + } + + @Test + public void testFailTopicListenerInit() { + String topicName = RandomStringUtils.randomAlphanumeric(100); + connectorProperties.put(SolaceSourceConstants.SOL_TOPICS, String.join(",", topicName, topicName)); + + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(connectorProperties)); + assertThat(thrown.getMessage(), containsString("Failed to start topic consumer")); + assertThat(thrown.getCause(), instanceOf(JCSMPErrorResponseException.class)); + assertEquals(JCSMPErrorResponseSubcodeEx.SUBSCRIPTION_ALREADY_PRESENT, + ((JCSMPErrorResponseException)thrown.getCause()).getSubcodeEx()); + } + + @Test + public void testFailQueueConsumerInit() { + connectorProperties.put(SolaceSourceConstants.SOL_QUEUE, RandomStringUtils.randomAlphanumeric(10)); + + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(connectorProperties)); + assertThat(thrown.getMessage(), containsString("Failed to start queue consumer")); + assertThat(thrown.getCause(), instanceOf(JCSMPErrorResponseException.class)); + assertEquals(JCSMPErrorResponseSubcodeEx.UNKNOWN_QUEUE_NAME, + ((JCSMPErrorResponseException)thrown.getCause()).getSubcodeEx()); + } + + @ParameterizedTest(name = "[{index}] ignoreMessageProcessorError={0}") + @ValueSource(booleans = { true, false }) + public void testMessageProcessorError(boolean ignoreMessageProcessorError, + JCSMPSession jcsmpSession, + SempV2Api sempV2Api, + Queue queue, + @ExecSvc ExecutorService executorService, + @LogCaptor(SolaceSourceTask.class) BufferedReader logReader) throws Exception { + String vpnName = connectorProperties.get(SolaceSourceConstants.SOL_VPN_NAME); + + connectorProperties.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, BadMessageProcessor.class.getName()); + connectorProperties.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR_IGNORE_ERROR, Boolean.toString(ignoreMessageProcessorError)); + connectorProperties.put(SolaceSourceConstants.SOL_QUEUE, queue.getName()); + solaceSourceTask.start(connectorProperties); + + XMLMessageProducer messageProducer = jcsmpSession.getMessageProducer(new JCSMPStreamingPublishCorrelatingEventHandler() { + @Override + public void responseReceivedEx(Object o) { + + } + + @Override + public void handleErrorEx(Object o, JCSMPException e, long l) { + + } + }); + + try { + TextMessage message = JCSMPFactory.onlyInstance().createMessage(TextMessage.class); + message.setText("Test payload"); + messageProducer.send(message, queue); + } finally { + messageProducer.close(); + } + + assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) + .getData().getTxUnackedMsgCount() == 0) { + logger.info("Waiting for queue {} to deliver messages", queue.getName()); + Thread.sleep(Duration.ofSeconds(1).toMillis()); + } + }, String.format("Timed out while waiting for queue %s to deliver its messages", queue.getName())); + + if (ignoreMessageProcessorError) { + Future future = executorService.submit(() -> { + String logLine; + do { + try { + logger.info("Waiting for error log message"); + logLine = logReader.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } while (!logLine.contains("Encountered exception in message processing")); + }); + assertThat(solaceSourceTask.poll(), empty()); + future.get(30, TimeUnit.SECONDS); + solaceSourceTask.commit(); + assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (!sempV2Api.monitor() + .getMsgVpnQueueMsgs(vpnName, queue.getName(), 1, null, null, null) + .getData() + .isEmpty()) { + logger.info("Waiting for queue {} to be empty", queue.getName()); + Thread.sleep(Duration.ofSeconds(1).toMillis()); + } + }); + } else { + ConnectException thrown = assertThrows(ConnectException.class, () -> solaceSourceTask.poll()); + assertThat(thrown.getMessage(), containsString("Encountered exception in message processing")); + assertEquals(BadMessageProcessor.TEST_EXCEPTION, thrown.getCause()); + solaceSourceTask.commit(); + Thread.sleep(Duration.ofSeconds(5).toMillis()); + assertEquals(1, sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) + .getData().getTxUnackedMsgCount()); + } + } + + public static class BadMessageProcessor extends SolSampleSimpleMessageProcessor { + static final RuntimeException TEST_EXCEPTION = new RuntimeException("Some processing failure"); + + @Override + public SolMessageProcessorIF process(String skey, BytesXMLMessage message) { + throw TEST_EXCEPTION; + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java index 515da08..d8f9836 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java @@ -1,67 +1,85 @@ package com.solace.connector.kafka.connect.source.it; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonObject; +import com.solace.connector.kafka.connect.source.SolaceSourceConstants; +import com.solace.connector.kafka.connect.source.it.util.extensions.KafkaArgumentsProvider; +import com.solace.connector.kafka.connect.source.it.util.extensions.KafkaArgumentsProvider.KafkaArgumentSource; +import com.solace.connector.kafka.connect.source.it.util.extensions.KafkaArgumentsProvider.KafkaContext; +import com.solace.connector.kafka.connect.source.it.util.extensions.NetworkPubSubPlusExtension; +import com.solacesystems.jcsmp.BytesMessage; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.Message; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.TextMessage; +import com.solacesystems.jcsmp.Topic; +import com.solacesystems.jcsmp.impl.AbstractDestination; +import org.apache.commons.lang3.RandomStringUtils; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.solacesystems.jcsmp.BytesMessage; -import com.solacesystems.jcsmp.JCSMPException; -import com.solacesystems.jcsmp.Message; -import com.solacesystems.jcsmp.Queue; -import com.solacesystems.jcsmp.TextMessage; -import com.solacesystems.jcsmp.Topic; -import com.solacesystems.jcsmp.impl.AbstractDestination; import java.nio.ByteBuffer; +import java.time.Duration; import java.util.Arrays; import java.util.Properties; -import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; -public class SourceConnectorIT extends DockerizedPlatformSetupApache implements TestConstants { +@ExtendWith(NetworkPubSubPlusExtension.class) +@ExtendWith(KafkaArgumentsProvider.AutoDeleteSolaceConnectorDeploymentAfterEach.class) +public class SourceConnectorIT implements TestConstants { + + private Properties connectorProps; + private static final Logger LOG = LoggerFactory.getLogger(SourceConnectorIT.class); + static TestSolaceProducer solaceProducer; - static Logger logger = LoggerFactory.getLogger(SourceConnectorIT.class.getName()); - // Connectordeployment creates a Kafka topic "kafkaTestTopic", which is used - // next - static SolaceConnectorDeployment connectorDeployment = new SolaceConnectorDeployment(); - static TestKafkaConsumer kafkaConsumer = new TestKafkaConsumer(SolaceConnectorDeployment.kafkaTestTopic); - static TestSolaceProducer solaceProducer = new TestSolaceProducer(); - //////////////////////////////////////////////////// // Main setup/teardown @BeforeAll - static void setUp() { - connectorDeployment.waitForConnectorRestIFUp(); - connectorDeployment.provisionKafkaTestTopic(); - solaceProducer.setup(); - kafkaConsumer.run(); - try { - Thread.sleep(1000l); - } catch (InterruptedException e) { - e.printStackTrace(); - } + static void setUp(JCSMPSession jcsmpSession) throws Exception { + solaceProducer = new TestSolaceProducer(jcsmpSession); + solaceProducer.start(); + } + + @BeforeEach + public void beforeEach(JCSMPProperties jcsmpProperties) { + connectorProps = new Properties(); + connectorProps.setProperty(SolaceSourceConstants.SOL_HOST, String.format("tcp://%s:55555", NetworkPubSubPlusExtension.DOCKER_NET_PUBSUB_ALIAS)); + connectorProps.setProperty(SolaceSourceConstants.SOL_USERNAME, jcsmpProperties.getStringProperty(JCSMPProperties.USERNAME)); + connectorProps.setProperty(SolaceSourceConstants.SOL_PASSWORD, jcsmpProperties.getStringProperty(JCSMPProperties.PASSWORD)); + connectorProps.setProperty(SolaceSourceConstants.SOL_VPN_NAME, jcsmpProperties.getStringProperty(JCSMPProperties.VPN_NAME)); } @AfterAll static void cleanUp() { - kafkaConsumer.stop(); solaceProducer.close(); } //////////////////////////////////////////////////// // Test types - void messageToKafkaTest(Message msg, AbstractDestination destination, String expectedValue, Object expectedKey) { + void messageToKafkaTest(Message msg, AbstractDestination destination, String expectedValue, Object expectedKey, KafkaContext kafkaContext) { try { - // Clean catch queue first - // TODO: fix possible concurrency issue with cleaning/wring the queue later - TestKafkaConsumer.kafkaReceivedMessages.clear(); // Send Solace message if (destination instanceof Topic) { solaceProducer.sendMessageToTopic((Topic) destination, msg); @@ -69,11 +87,13 @@ void messageToKafkaTest(Message msg, AbstractDestination destination, String exp solaceProducer.sendMessageToQueue((Queue) destination, msg); } // Wait for Kafka to report message - ConsumerRecord record = TestKafkaConsumer.kafkaReceivedMessages.poll(5, TimeUnit.SECONDS); + ConsumerRecords records = kafkaContext.getConsumer().poll(Duration.ofSeconds(5)); + assertEquals(1, records.count()); + ConsumerRecord record = records.iterator().next(); // Evaluate message - assert (record != null); - logger.info("Kafka message received - Key=" + record.key() + ", Value=" + record.value()); - assert record.value().equals(expectedValue); + assertNotNull(record); + LOG.info("Kafka message received - Key=" + record.key() + ", Value=" + record.value()); + assertEquals(expectedValue, record.value()); // Check key if (expectedKey == null) { assert (record.key() == null); @@ -90,8 +110,6 @@ void messageToKafkaTest(Message msg, AbstractDestination destination, String exp } } catch (JCSMPException e1) { e1.printStackTrace(); - } catch (InterruptedException e) { - e.printStackTrace(); } } @@ -106,76 +124,86 @@ class SolaceConnectorSimpleMessageProcessorTests { //////////////////////////////////////////////////// // Scenarios - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic"); - prop.setProperty("sol.username", "test"); - prop.setProperty("sol.password", "test"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic"); + connectorProps.setProperty("sol.username", "test"); + connectorProps.setProperty("sol.password", "test"); } @DisplayName("TextMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("1-Hello TextMessageToTopicTest world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "1-Hello TextMessageToTopicTest world!", null); + "1-Hello TextMessageToTopicTest world!", null, kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(new byte[] { '2', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "2-Hello Topic world!", null); + "2-Hello Topic world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { '3', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "3-Hello attached world!", null); + "3-Hello attached world!", null, kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("4-Hello TextmessageToKafkaTest world!"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "4-Hello TextmessageToKafkaTest world!", null); + "4-Hello TextmessageToKafkaTest world!", null, kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(new byte[] { '5', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "5-Hello Queue world!", null); + "5-Hello Queue world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Queue-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { '6', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "6-Hello attached world!", null); + "6-Hello attached world!", null, kafkaContext); } } @@ -187,75 +215,85 @@ void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorNoneKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "NONE"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "NONE"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", null); + "Hello TextMessageToTopicTest1 world!", null, kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", null); + "Hello Topic world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello attached world!", null); + "Hello attached world!", null, kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", null); + "Hello TextmessageToKafkaTest world!", null, kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", null); + "Hello Queue world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello attached world!", null); + "Hello attached world!", null, kafkaContext); } } @@ -267,71 +305,81 @@ void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorDestinationKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "DESTINATION"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "DESTINATION"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", "TestTopic1/SubTopic"); + "Hello TextMessageToTopicTest1 world!", "TestTopic1/SubTopic", kafkaContext); } @DisplayName("TextMessage-Topic-wildcard-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest2() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest2(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest2 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic2/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest2 world!", "TestTopic2/SubTopic"); + "Hello TextMessageToTopicTest2 world!", "TestTopic2/SubTopic", kafkaContext); } @DisplayName("TextMessage-Topic-multi-level-wildcard-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest3() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest3(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest3 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic3/SubTopic/SubSubTopic"), // expected value & key: - "Hello TextMessageToTopicTest3 world!", "TestTopic3/SubTopic/SubSubTopic"); + "Hello TextMessageToTopicTest3 world!", "TestTopic3/SubTopic/SubSubTopic", kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", "TestTopic1/SubTopic"); + "Hello Topic world!", "TestTopic1/SubTopic", kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", SOL_QUEUE); + "Hello TextmessageToKafkaTest world!", SOL_QUEUE, kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", SOL_QUEUE); + "Hello Queue world!", SOL_QUEUE, kafkaContext); } } @@ -342,57 +390,63 @@ void kafkaConsumerBytesmessageToKafkaTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorCorrelationIdKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "CORRELATION_ID"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "CORRELATION_ID"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); msg.setCorrelationId("test"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", "test"); + "Hello TextMessageToTopicTest1 world!", "test", kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test2"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", "test2"); + "Hello Topic world!", "test2", kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); msg.setCorrelationId("test3"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", "test3"); + "Hello TextmessageToKafkaTest world!", "test3", kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test4"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", "test4"); + "Hello Queue world!", "test4", kafkaContext); } } @@ -403,58 +457,64 @@ void kafkaConsumerBytesmessageToKafkaTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorCorrelationIdAsBytesKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "CORRELATION_ID_AS_BYTES"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - prop.setProperty("key.converter", "org.apache.kafka.connect.converters.ByteArrayConverter"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "CORRELATION_ID_AS_BYTES"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); + connectorProps.setProperty("key.converter", "org.apache.kafka.connect.converters.ByteArrayConverter"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); msg.setCorrelationId(new String(new byte[] { 1, 2, 3, 4 })); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", new String(new byte[] { 1, 2, 3, 4 })); + "Hello TextMessageToTopicTest1 world!", new String(new byte[] { 1, 2, 3, 4 }), kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test2"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", "test2"); + "Hello Topic world!", "test2", kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); msg.setCorrelationId("test3"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", "test3"); + "Hello TextmessageToKafkaTest world!", "test3", kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test4"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", "test4"); + "Hello Queue world!", "test4", kafkaContext); } } @@ -466,46 +526,81 @@ void kafkaConsumerBytesmessageToKafkaTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorSharedSubscriptionsTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); - prop.setProperty("sol.topics", "#share/group1/TestTopic1/SubTopic"); - prop.setProperty("tasks.max", "5"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.topics", "#share/group1/TestTopic1/SubTopic"); + connectorProps.setProperty("tasks.max", "5"); } @DisplayName("TextMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest world!", null); + "Hello TextMessageToTopicTest world!", null, kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", null); + "Hello Topic world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello attached world!", null); + "Hello attached world!", null, kafkaContext); } } + + @DisplayName("Solace connector provisioning tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorProvisioningTests { + private final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); + + @BeforeEach + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + } + + @ParameterizedTest + @KafkaArgumentSource + void testFailPubSubConnection(KafkaContext kafkaContext) { + connectorProps.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); + connectorProps.setProperty("sol.vpn_name", RandomStringUtils.randomAlphanumeric(10)); + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps, true); + AtomicReference connectorStatus = new AtomicReference<>(new JsonObject()); + assertTimeoutPreemptively(Duration.ofMinutes(1), () -> { + JsonObject taskStatus; + do { + JsonObject status = kafkaContext.getSolaceConnectorDeployment().getConnectorStatus(); + connectorStatus.set(status); + taskStatus = status.getAsJsonArray("tasks").get(0).getAsJsonObject(); + } while (!taskStatus.get("state").getAsString().equals("FAILED")); + assertThat(taskStatus.get("trace").getAsString(), containsString("Message VPN Not Allowed")); + }, () -> "Timed out waiting for connector to fail: " + GSON.toJson(connectorStatus.get())); + } + } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java deleted file mode 100644 index 06b1c9d..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java +++ /dev/null @@ -1,64 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import java.io.FileReader; -import java.io.IOException; -import java.util.Properties; - -public class TestConfigProperties { - - static String testConfigPropertiesFile = "src/integrationTest/resources/manual-setup.properties"; - // This class helps determine the docker host's IP address and avoids getting "localhost" - static class DockerHost { - static public String getIpAddress() { - String dockerReportedAddress = MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA - .getServiceHost("kafka_1", 9092); - if (dockerReportedAddress == "localhost" || dockerReportedAddress == "127.0.0.1") { - return Tools.getIpAddress(); - } else { - return MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA - .getServiceHost("kafka_1", 9092); - } - } - } - - - private Properties properties = new Properties(); - - TestConfigProperties() { - try(FileReader fileReader = new FileReader(testConfigPropertiesFile)){ - properties.load(fileReader); - } catch (IOException e) { - e.printStackTrace(); - } - } - - String getProperty(String name) { - String configuredProperty = properties.getProperty(name); - if (configuredProperty != null) { - return configuredProperty; - } - switch(name) { - case "sol.host": - // No port here - return DockerHost.getIpAddress(); - - case "sol.username": - return "default"; - - case "sol.password": - return "default"; - - case "sol.vpn_name": - return "default"; - - case "kafka.connect_rest_url": - return (DockerHost.getIpAddress() + ":28083"); - - case "kafka.bootstrap_servers": - return (DockerHost.getIpAddress() + ":39092"); - - default: - return null; - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java index 22a21ec..7ff108c 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java @@ -1,30 +1,14 @@ package com.solace.connector.kafka.connect.source.it; +import com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor; + public interface TestConstants { + String UNZIPPEDCONNECTORDESTINATION = "src/integrationTest/resources"; + String CONNECTORJSONPROPERTIESFILE = "etc/solace_source_properties.json"; - public static final String PUBSUB_TAG = "latest"; - public static final String PUBSUB_HOSTNAME = "solbroker"; - public static final String PUBSUB_NETWORK_NAME = "solace_msg_network"; - public static final String FULL_DOCKER_COMPOSE_FILE_PATH = "src/integrationTest/resources/"; - public static final String[] SERVICES = new String[]{"solbroker"}; - public static final long MAX_STARTUP_TIMEOUT_MSEC = 120000l; - public static final String DIRECT_MESSAGING_HTTP_HEALTH_CHECK_URI = "/health-check/direct-active"; - public static final int DIRECT_MESSAGING_HTTP_HEALTH_CHECK_PORT = 5550; - public static final String GUARANTEED_MESSAGING_HTTP_HEALTH_CHECK_URI = "/health-check/guaranteed-active"; - public static final int GUARANTEED_MESSAGING_HTTP_HEALTH_CHECK_PORT = 5550; + String SOL_TOPICS = "pubsubplus-test-topic"; + String SOL_QUEUE = "pubsubplus-test-queue"; + String CONN_MSGPROC_CLASS = SolSampleSimpleMessageProcessor.class.getName(); + String CONN_KAFKA_MSGKEY = "DESTINATION"; - public static final String CONNECTORSOURCE = "build/distributions/pubsubplus-connector-kafka-source.zip"; - public static final String UNZIPPEDCONNECTORDESTINATION = "src/integrationTest/resources"; - public static final String CONNECTORPROPERTIESFILE = "etc/solace_source.properties"; - public static final String CONNECTORJSONPROPERTIESFILE = "etc/solace_source_properties.json"; - - public static final String SOL_ADMINUSER_NAME = "default"; - public static final String SOL_ADMINUSER_PW = "default"; - public static final String SOL_VPN = "default"; - public static final String KAFKA_SOURCE_TOPIC = "kafka-source-test-topic"; - public static final String SOL_TOPICS = "pubsubplus-test-topic"; - public static final String SOL_QUEUE = "pubsubplus-test-queue"; - public static final String CONN_MSGPROC_CLASS = "com.solace.source.connector.msgprocessors.SolSampleSimpleMessageProcessor"; - public static final String CONN_KAFKA_MSGKEY = "DESTINATION"; - } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java deleted file mode 100644 index ea7519f..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java +++ /dev/null @@ -1,117 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.errors.WakeupException; -import org.apache.kafka.common.serialization.ByteBufferDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Duration; -import java.util.Arrays; -import java.util.Properties; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; - -public class TestKafkaConsumer { - - // Queue to communicate received messages - public static BlockingQueue> kafkaReceivedMessages = new ArrayBlockingQueue<>(10); - - private Runnable myConsumerRunnable; - private String kafkaTopic; - Logger logger = LoggerFactory.getLogger(TestKafkaConsumer.class.getName()); - CountDownLatch latch = new CountDownLatch(1); - - public TestKafkaConsumer(String kafkaTestTopic) { - kafkaTopic = kafkaTestTopic; - } - - public void run() { - String bootstrapServers = MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA.getServiceHost("kafka_1", 39092) - + ":39092"; - String groupId = "test"; - - // latch for dealing with multiple threads - - // create the consumer runnable - logger.info("Creating the consumer thread"); - myConsumerRunnable = new ConsumerRunnable(bootstrapServers, groupId, kafkaTopic, latch); - - // start the thread - Thread myThread = new Thread(myConsumerRunnable); - myThread.start(); - try { - latch.await(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - public void stop() { - logger.info("Stopping consumer"); - ((ConsumerRunnable) myConsumerRunnable).shutdown(); - try { - latch.await(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - logger.info("Consumer has been stoppped"); - } - - public class ConsumerRunnable implements Runnable { - - private CountDownLatch latch; - private KafkaConsumer consumer; - private Logger logger = LoggerFactory.getLogger(ConsumerRunnable.class.getName()); - - public ConsumerRunnable(String bootstrapServers, String groupId, String topic, CountDownLatch latch) { - this.latch = latch; - - // create consumer configs - Properties properties = new Properties(); - properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class.getName()); - properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId); - properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - - // create consumer - consumer = new KafkaConsumer(properties); - // subscribe consumer to our topic(s) - consumer.subscribe(Arrays.asList(topic)); - } - - @Override - public void run() { - // poll for new data - try { - while (true) { - ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); - latch.countDown(); - for (ConsumerRecord record : records) { - kafkaReceivedMessages.put(record); - logger.info("Key: " + record.key() + ", Value: " + record.value()); - logger.info("Partition: " + record.partition() + ", Offset:" + record.offset()); - } - } - } catch (WakeupException e) { - logger.info("Received shutdown signal!"); - } catch (InterruptedException e) { - e.printStackTrace(); - } finally { - consumer.close(); - } - } - - public void shutdown() { - // the wakeup() method is a special method to interrupt consumer.poll() - // it will throw the exception WakeUpException - consumer.wakeup(); - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java index f29a946..e2a3c34 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java @@ -1,54 +1,45 @@ package com.solace.connector.kafka.connect.source.it; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.solacesystems.jcsmp.BytesMessage; import com.solacesystems.jcsmp.DeliveryMode; import com.solacesystems.jcsmp.EndpointProperties; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; -import com.solacesystems.jcsmp.JCSMPProperties; import com.solacesystems.jcsmp.JCSMPSession; -import com.solacesystems.jcsmp.JCSMPStreamingPublishEventHandler; +import com.solacesystems.jcsmp.JCSMPStreamingPublishCorrelatingEventHandler; import com.solacesystems.jcsmp.Message; import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.TextMessage; import com.solacesystems.jcsmp.Topic; import com.solacesystems.jcsmp.XMLMessageProducer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -public class TestSolaceProducer { - - static Logger logger = LoggerFactory.getLogger(SourceConnectorIT.class.getName()); - private JCSMPSession session; +public class TestSolaceProducer implements AutoCloseable { + + private static final Logger logger = LoggerFactory.getLogger(TestSolaceProducer.class); + private final JCSMPSession session; private XMLMessageProducer producer; - public void setup() { - TestConfigProperties configProps = new TestConfigProperties(); - final JCSMPProperties properties = new JCSMPProperties(); - properties.setProperty(JCSMPProperties.HOST, "tcp://" + configProps.getProperty("sol.host") + ":55555"); // host:port - properties.setProperty(JCSMPProperties.USERNAME, configProps.getProperty("sol.username")); // client-username - properties.setProperty(JCSMPProperties.VPN_NAME, configProps.getProperty("sol.vpn_name")); // message-vpn - properties.setProperty(JCSMPProperties.PASSWORD, configProps.getProperty("sol.password")); // client-password - try { - session = JCSMPFactory.onlyInstance().createSession(properties); - session.connect(); - producer = session.getMessageProducer(new JCSMPStreamingPublishEventHandler() { - @Override - public void responseReceived(String messageID) { - logger.info("Producer received response for msg: " + messageID); - } - @Override - public void handleError(String messageID, JCSMPException e, long timestamp) { - logger.info("Producer received error for msg: %s@%s - %s%n", - messageID,timestamp,e); - } - }); - } catch (JCSMPException e1) { - e1.printStackTrace(); - } + public TestSolaceProducer(JCSMPSession session) { + this.session = session; } - + + + public void start() throws JCSMPException { + producer = session.getMessageProducer(new JCSMPStreamingPublishCorrelatingEventHandler() { + @Override + public void responseReceivedEx(Object correlationKey) { + logger.info("Producer received response for msg: " + correlationKey); + } + + @Override + public void handleErrorEx(Object correlationKey, JCSMPException e, long timestamp) { + logger.error("Producer received error for msg: {} {}", correlationKey, timestamp, e); + } + }); + } + public TextMessage createTextMessage(String contents) { TextMessage textMessage = JCSMPFactory.onlyInstance().createMessage(TextMessage.class); textMessage.setText(contents); @@ -60,20 +51,20 @@ public BytesMessage createBytesMessage(byte[] contents) { bytesMessage.setData(contents); return bytesMessage; } - + public Topic defineTopic(String topicName) { return JCSMPFactory.onlyInstance().createTopic(topicName); } - + public Queue defineQueue(String queueName) { return JCSMPFactory.onlyInstance().createQueue(queueName); } - + public void sendMessageToTopic(Topic topic, Message msg) throws JCSMPException { producer.send(msg,topic); logger.info("Message sent to Solace topic " + topic.toString()); } - + public void resetQueue(String queueName) { try { final Queue queue = JCSMPFactory.onlyInstance().createQueue(queueName); @@ -89,14 +80,15 @@ public void resetQueue(String queueName) { e.printStackTrace(); } } - + public void sendMessageToQueue(Queue queue, Message msg) throws JCSMPException { msg.setDeliveryMode(DeliveryMode.PERSISTENT); producer.send(msg,queue); logger.info("Message sent to Solace queue " + queue.toString()); } - + + @Override public void close() { - session.closeSession(); + producer.close(); } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java index ed08533..9b604a6 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java @@ -1,39 +1,21 @@ package com.solace.connector.kafka.connect.source.it; +import com.solace.connector.kafka.connect.source.VersionUtil; + import java.io.IOException; -import java.net.InterfaceAddress; -import java.net.NetworkInterface; -import java.net.SocketException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; public class Tools { - static public String getIpAddress() { - Set HostAddresses = new HashSet<>(); - try { - for (NetworkInterface ni : Collections.list(NetworkInterface.getNetworkInterfaces())) { - if (!ni.isLoopback() && ni.isUp() && ni.getHardwareAddress() != null) { - for (InterfaceAddress ia : ni.getInterfaceAddresses()) { - if (ia.getBroadcast() != null) { //If limited to IPV4 - HostAddresses.add(ia.getAddress().getHostAddress()); - } - } - } - } - } catch (SocketException e) { } - return (String) HostAddresses.toArray()[0]; - } static public String getUnzippedConnectorDirName() { String connectorUnzippedPath = null; try { DirectoryStream dirs = Files.newDirectoryStream( - Paths.get(TestConstants.UNZIPPEDCONNECTORDESTINATION), "pubsubplus-connector-kafka-*"); + Paths.get(TestConstants.UNZIPPEDCONNECTORDESTINATION), + "pubsubplus-connector-kafka-source-" + VersionUtil.getVersion()); for (Path entry: dirs) { connectorUnzippedPath = entry.toString(); break; //expecting only one diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/KafkaConnection.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/KafkaConnection.java new file mode 100644 index 0000000..872aa74 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/KafkaConnection.java @@ -0,0 +1,52 @@ +package com.solace.connector.kafka.connect.source.it.util; + +import org.testcontainers.containers.GenericContainer; + +import java.util.Objects; + +public class KafkaConnection { + private final String bootstrapServers; + private final String connectUrl; + private final GenericContainer kafkaContainer; + private final GenericContainer connectContainer; + + public KafkaConnection(String bootstrapServers, String connectUrl, GenericContainer kafkaContainer, + GenericContainer connectContainer) { + this.bootstrapServers = bootstrapServers; + this.connectUrl = connectUrl; + this.kafkaContainer = kafkaContainer; + this.connectContainer = connectContainer; + } + + public String getBootstrapServers() { + return bootstrapServers; + } + + public String getConnectUrl() { + return connectUrl; + } + + public GenericContainer getKafkaContainer() { + return kafkaContainer; + } + + public GenericContainer getConnectContainer() { + return connectContainer; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + KafkaConnection that = (KafkaConnection) o; + return Objects.equals(bootstrapServers, that.bootstrapServers) && + Objects.equals(connectUrl, that.connectUrl) && + Objects.equals(kafkaContainer, that.kafkaContainer) && + Objects.equals(connectContainer, that.connectContainer); + } + + @Override + public int hashCode() { + return Objects.hash(bootstrapServers, connectUrl, kafkaContainer, connectContainer); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/KafkaArgumentsProvider.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/KafkaArgumentsProvider.java new file mode 100644 index 0000000..2b022b3 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/KafkaArgumentsProvider.java @@ -0,0 +1,406 @@ +package com.solace.connector.kafka.connect.source.it.util.extensions; + +import com.solace.connector.kafka.connect.source.it.SolaceConnectorDeployment; +import com.solace.connector.kafka.connect.source.it.util.KafkaConnection; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.BitnamiKafkaConnectContainer; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.ConfluentKafkaConnectContainer; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.ConfluentKafkaControlCenterContainer; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.ConfluentKafkaSchemaRegistryContainer; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.DeleteTopicsResult; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.errors.UnknownTopicOrPartitionException; +import org.apache.kafka.common.serialization.ByteBufferDeserializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.junit.jupiter.api.extension.ExtensionContext.Store.CloseableResource; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junitpioneer.jupiter.CartesianAnnotationConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.utility.DockerImageName; + +import java.io.IOException; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.Collections; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.stream.Stream; + +public class KafkaArgumentsProvider implements ArgumentsProvider, + CartesianAnnotationConsumer { + private static final Logger LOG = LoggerFactory.getLogger(KafkaArgumentsProvider.class); + + @Override + public Stream provideArguments(ExtensionContext context) { + KafkaConnection bitnamiCxn = context.getRoot() + .getStore(KafkaNamespace.BITNAMI.getNamespace()) + .getOrComputeIfAbsent(BitnamiResource.class, c -> { + LOG.info("Creating Bitnami Kafka"); + BitnamiKafkaConnectContainer container = new BitnamiKafkaConnectContainer() + .withNetwork(NetworkPubSubPlusExtension.DOCKER_NET); + if (!container.isCreated()) { + container.start(); + } + return new BitnamiResource(container); + }, BitnamiResource.class) + .getKafkaConnection(); + + KafkaConnection confluentCxn = context.getRoot() + .getStore(KafkaNamespace.CONFLUENT.getNamespace()) + .getOrComputeIfAbsent(ConfluentResource.class, c -> { + LOG.info("Creating Confluent Kafka"); + KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka") + .withTag("6.2.1")) + .withNetwork(NetworkPubSubPlusExtension.DOCKER_NET) + .withNetworkAliases("kafka"); + if (!kafkaContainer.isCreated()) { + kafkaContainer.start(); + } + + ConfluentKafkaSchemaRegistryContainer schemaRegistryContainer = + new ConfluentKafkaSchemaRegistryContainer(kafkaContainer) + .withNetworkAliases("schema-registry"); + if (!schemaRegistryContainer.isCreated()) { + schemaRegistryContainer.start(); + } + + ConfluentKafkaControlCenterContainer controlCenterContainer = + new ConfluentKafkaControlCenterContainer(kafkaContainer, schemaRegistryContainer); + if (!controlCenterContainer.isCreated()) { + controlCenterContainer.start(); + } + + ConfluentKafkaConnectContainer connectContainer = + new ConfluentKafkaConnectContainer(kafkaContainer, schemaRegistryContainer); + if (!connectContainer.isCreated()) { + connectContainer.start(); + } + return new ConfluentResource( + new KafkaContainerResource<>(kafkaContainer), + new KafkaContainerResource<>(schemaRegistryContainer), + new KafkaContainerResource<>(controlCenterContainer), + new KafkaContainerResource<>(connectContainer)); + }, ConfluentResource.class) + .getKafkaConnection(); + + return Stream.of( + Arguments.of(createKafkaContext(bitnamiCxn, KafkaNamespace.BITNAMI, context)), + Arguments.of(createKafkaContext(confluentCxn, KafkaNamespace.CONFLUENT, context)) + ); + } + + private KafkaContext createKafkaContext(KafkaConnection connection, KafkaNamespace namespace, + ExtensionContext context) { + AdminClient adminClient = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(AdminClientResource.class, c -> { + LOG.info("Creating Kafka admin client for {}", connection.getBootstrapServers()); + Properties properties = new Properties(); + properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, connection.getBootstrapServers()); + AdminClient newAdminClient = AdminClient.create(properties); + return new AdminClientResource(newAdminClient); + }, AdminClientResource.class) + .getAdminClient(); + + String kafkaTopic = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(TopicResource.class, c -> { + String topicName = RandomStringUtils.randomAlphanumeric(100); + LOG.info("Creating Kafka topic {}", topicName); + try { + adminClient.createTopics(Collections.singleton(new NewTopic(topicName, 5, (short) 1))) + .all().get(5, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + throw new RuntimeException(e); + } + return new TopicResource(topicName, adminClient); + }, TopicResource.class) + .getTopicName(); + + SolaceConnectorDeployment connectorDeployment = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(ConnectorDeploymentResource.class, c -> { + SolaceConnectorDeployment deployment = new SolaceConnectorDeployment(connection, kafkaTopic); + deployment.waitForConnectorRestIFUp(); + return new ConnectorDeploymentResource(deployment); + }, ConnectorDeploymentResource.class) + .getDeployment(); + + KafkaConsumer consumer = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(ConsumerResource.class, c -> { + LOG.info("Creating Kafka consumer for {}", connection.getBootstrapServers()); + Properties properties = new Properties(); + properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, connection.getBootstrapServers()); + properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class + .getName()); + properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class + .getName()); + properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, RandomStringUtils.randomAlphanumeric(50)); + properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + + // create consumer + KafkaConsumer newConsumer = new KafkaConsumer<>(properties); + // subscribe consumer to our topic(s) + newConsumer.subscribe(Collections.singleton(kafkaTopic)); + return new ConsumerResource(newConsumer); + }, ConsumerResource.class) + .getConsumer(); + + return new KafkaContext(namespace, connection, adminClient, connectorDeployment, consumer); + } + + @Override + public void accept(KafkaArgumentSource kafkaArgumentSource) { + + } + + @Target(ElementType.METHOD) + @Retention(RetentionPolicy.RUNTIME) + @ArgumentsSource(KafkaArgumentsProvider.class) + public @interface KafkaArgumentSource { + + } + + public static class AutoDeleteSolaceConnectorDeploymentAfterEach implements AfterEachCallback { + @Override + public void afterEach(ExtensionContext context) throws Exception { + for (KafkaNamespace namespace : KafkaNamespace.values()) { + ConnectorDeploymentResource deploymentResource = context.getRoot() + .getStore(namespace.getNamespace()) + .get(ConnectorDeploymentResource.class, ConnectorDeploymentResource.class); + if (deploymentResource != null) { + deploymentResource.close(); + } + } + } + } + + public static class KafkaContext { + private final KafkaNamespace namespace; + private final KafkaConnection connection; + private final AdminClient adminClient; + private final SolaceConnectorDeployment solaceConnectorDeployment; + private final KafkaConsumer consumer; + + private KafkaContext(KafkaNamespace namespace, KafkaConnection connection, AdminClient adminClient, + SolaceConnectorDeployment solaceConnectorDeployment, + KafkaConsumer consumer) { + this.namespace = namespace; + this.connection = connection; + this.consumer = consumer; + this.solaceConnectorDeployment = solaceConnectorDeployment; + this.adminClient = adminClient; + } + + public KafkaConnection getConnection() { + return connection; + } + + public AdminClient getAdminClient() { + return adminClient; + } + + public SolaceConnectorDeployment getSolaceConnectorDeployment() { + return solaceConnectorDeployment; + } + + public KafkaConsumer getConsumer() { + return consumer; + } + + @Override + public String toString() { + return namespace.name(); + } + } + + private static class ConsumerResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(ConsumerResource.class); + private final KafkaConsumer consumer; + + private ConsumerResource(KafkaConsumer consumer) { + this.consumer = consumer; + } + + + public KafkaConsumer getConsumer() { + return consumer; + } + + @Override + public void close() { + LOG.info("Closing Kafka consumer"); + consumer.close(); + } + } + + private static class TopicResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(TopicResource.class); + private final String topicName; + private final AdminClient adminClient; + + private TopicResource(String topicName, AdminClient adminClient) { + this.topicName = topicName; + this.adminClient = adminClient; + } + + public String getTopicName() { + return topicName; + } + + @Override + public void close() throws Throwable { + LOG.info("Deleting Kafka topic {}", topicName); + DeleteTopicsResult result = adminClient.deleteTopics(Collections.singleton(topicName)); + for (Map.Entry> entry : result.values().entrySet()) { + try { + entry.getValue().get(1, TimeUnit.MINUTES); + } catch (ExecutionException e) { + if (!(e.getCause() instanceof UnknownTopicOrPartitionException)) { + throw e; + } + } + } + } + } + + private static class AdminClientResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(AdminClientResource.class); + private final AdminClient adminClient; + + private AdminClientResource(AdminClient adminClient) { + this.adminClient = adminClient; + } + + public AdminClient getAdminClient() { + return adminClient; + } + + @Override + public void close() { + LOG.info("Closing Kafka admin client"); + adminClient.close(); + } + } + + private static class ConnectorDeploymentResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(ConnectorDeploymentResource.class); + private final SolaceConnectorDeployment deployment; + + private ConnectorDeploymentResource(SolaceConnectorDeployment deployment) { + this.deployment = deployment; + } + + public SolaceConnectorDeployment getDeployment() { + return deployment; + } + + @Override + public void close() throws IOException { + LOG.info("Closing Kafka connector deployment"); + deployment.deleteConnector(); + } + } + + private static class BitnamiResource extends KafkaContainerResource { + + private BitnamiResource(BitnamiKafkaConnectContainer container) { + super(container); + } + + public KafkaConnection getKafkaConnection() { + return new KafkaConnection(getContainer().getBootstrapServers(), getContainer().getConnectUrl(), + getContainer(), getContainer()); + } + } + + private static class ConfluentResource implements CloseableResource { + private final KafkaContainerResource kafka; + private final KafkaContainerResource schemaRegistry; + private final KafkaContainerResource controlCenter; + private final KafkaContainerResource connect; + + private ConfluentResource(KafkaContainerResource kafka, + KafkaContainerResource schemaRegistry, + KafkaContainerResource controlCenter, + KafkaContainerResource connect) { + this.kafka = kafka; + this.schemaRegistry = schemaRegistry; + this.controlCenter = controlCenter; + this.connect = connect; + } + + public KafkaConnection getKafkaConnection() { + return new KafkaConnection(kafka.getContainer().getBootstrapServers(), + connect.getContainer().getConnectUrl(), kafka.container, connect.container); + } + + public KafkaContainerResource getKafka() { + return kafka; + } + + public KafkaContainerResource getConnect() { + return connect; + } + + @Override + public void close() { + connect.close(); + controlCenter.close(); + schemaRegistry.close(); + kafka.close(); + } + } + + private static class KafkaContainerResource> implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(KafkaContainerResource.class); + private final T container; + + private KafkaContainerResource(T container) { + this.container = container; + } + + public T getContainer() { + return container; + } + + @Override + public void close() { + LOG.info("Closing container {}", container.getContainerName()); + container.close(); + } + } + + private enum KafkaNamespace { + BITNAMI, CONFLUENT; + + private final Namespace namespace; + + KafkaNamespace() { + this.namespace = Namespace.create(KafkaArgumentsProvider.class, name()); + } + + public Namespace getNamespace() { + return namespace; + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/NetworkPubSubPlusExtension.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/NetworkPubSubPlusExtension.java new file mode 100644 index 0000000..9e58cee --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/NetworkPubSubPlusExtension.java @@ -0,0 +1,20 @@ +package com.solace.connector.kafka.connect.source.it.util.extensions; + +import com.solace.test.integration.junit.jupiter.extension.PubSubPlusExtension; +import com.solace.test.integration.testcontainer.PubSubPlusContainer; +import org.testcontainers.containers.Network; + +public class NetworkPubSubPlusExtension extends PubSubPlusExtension { + public static final Network DOCKER_NET = Network.newNetwork(); + public static final String DOCKER_NET_PUBSUB_ALIAS = "solace-pubsubplus"; + + public NetworkPubSubPlusExtension() { + super(() -> new PubSubPlusContainer() + .withNetwork(DOCKER_NET) + .withNetworkAliases(DOCKER_NET_PUBSUB_ALIAS)); + } + + public Network getDockerNetwork() { + return DOCKER_NET; + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/BitnamiKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/BitnamiKafkaConnectContainer.java new file mode 100644 index 0000000..81a4aef --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/BitnamiKafkaConnectContainer.java @@ -0,0 +1,145 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import com.github.dockerjava.api.command.InspectContainerResponse; +import com.solace.connector.kafka.connect.source.SolaceSourceTask; +import com.solace.connector.kafka.connect.source.it.Tools; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.images.builder.Transferable; +import org.testcontainers.utility.DockerImageName; + +import java.nio.charset.StandardCharsets; +import java.util.Comparator; + +public class BitnamiKafkaConnectContainer extends GenericContainer { + private static final String BROKER_LISTENER_NAME = "PLAINTEXT"; + private static final int BROKER_LISTENER_PORT = 9092; + private static final String BOOTSTRAP_LISTENER_NAME = "PLAINTEXT_HOST"; + public static final int BOOTSTRAP_LISTENER_PORT = 29092; + public static final int CONNECT_PORT = 28083; + private static final int ZOOKEEPER_PORT = 2181; + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("bitnami/kafka"); + private static final String DEFAULT_IMAGE_TAG = "2"; + private static final String STARTER_SCRIPT = "/testcontainers_start.sh"; + private DockerImageName zookeeperDockerImageName = DockerImageName.parse("bitnami/zookeeper:3"); + private GenericContainer zookeeperContainer; + + public BitnamiKafkaConnectContainer() { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG)); + } + + public BitnamiKafkaConnectContainer(String dockerImageName) { + this(DockerImageName.parse(dockerImageName)); + } + + public BitnamiKafkaConnectContainer(DockerImageName dockerImageName) { + super(dockerImageName); + + withNetwork(Network.newNetwork()); + withExposedPorts(CONNECT_PORT, BROKER_LISTENER_PORT, BOOTSTRAP_LISTENER_PORT); + withEnv("KAFKA_CFG_BROKER_ID", "1"); + withEnv("ALLOW_PLAINTEXT_LISTENER", "yes"); + withEnv("KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP", String.join(",", + BROKER_LISTENER_NAME + ":PLAINTEXT", BOOTSTRAP_LISTENER_NAME + ":PLAINTEXT")); + withEnv("KAFKA_CFG_LISTENERS", String.join(",", + BROKER_LISTENER_NAME + "://:" + BROKER_LISTENER_PORT, BOOTSTRAP_LISTENER_NAME + "://:" + BOOTSTRAP_LISTENER_PORT)); + withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", + "/opt/bitnami/kafka/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY); + waitingFor(Wait.forLogMessage(".*Finished starting connectors and tasks.*", 1)); + } + + @Override + public void start() { + if (zookeeperDockerImageName != null) { + String zookeeperNetworkAlias = "zookeeper"; + zookeeperContainer = new GenericContainer<>(zookeeperDockerImageName) + .withNetwork(getNetwork()) + .withNetworkAliases(zookeeperNetworkAlias) + .withEnv("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_PORT)) + .withEnv("ZOOKEEPER_TICK_TIME", "2000") + .withEnv("ALLOW_ANONYMOUS_LOGIN", "yes"); + dependsOn(zookeeperContainer); + withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", String.format("%s:%s", zookeeperNetworkAlias, ZOOKEEPER_PORT)); + } + super.start(); + } + + @Override + protected void doStart() { + // Delay starting Kafka until after container has started + withCommand("sh", "-c", "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + STARTER_SCRIPT); + super.doStart(); + } + + @Override + protected void containerIsStarting(InspectContainerResponse containerInfo) { + String command = "/bin/sh\n" + + "set -e\n" + + "echo 'plugin.path=/opt/bitnami/kafka/jars' >> /opt/bitnami/kafka/config/connect-distributed.properties\n" + + "echo 'rest.port=" + CONNECT_PORT + "' >> /opt/bitnami/kafka/config/connect-distributed.properties\n" + + "echo 'log4j.logger.org.apache.kafka.connect.runtime.WorkerSinkTask=DEBUG' >> /opt/bitnami/kafka/config/connect-log4j.properties\n" + + "echo 'log4j.logger." + SolaceSourceTask.class.getName() + "=TRACE' >> /opt/bitnami/kafka/config/connect-log4j.properties\n" + + "export KAFKA_CFG_ADVERTISED_LISTENERS=" + advertisedListeners(containerInfo) + "\n" + + "/opt/bitnami/scripts/kafka/setup.sh\n" + + "/opt/bitnami/scripts/kafka/run.sh &\n" + + "/opt/bitnami/kafka/bin/connect-distributed.sh /opt/bitnami/kafka/config/connect-distributed.properties\n"; + copyFileToContainer(Transferable.of(command.getBytes(StandardCharsets.UTF_8), 0777), STARTER_SCRIPT); + super.containerIsStarting(containerInfo); + } + + @Override + public void close() { + super.close(); + if (zookeeperContainer != null) { + zookeeperContainer.close(); + } + } + + public String getBootstrapServers() { + return String.format("%s:%s", getHost(), getMappedPort(BitnamiKafkaConnectContainer.BOOTSTRAP_LISTENER_PORT)); + } + + public String getConnectUrl() { + return String.format("http://%s:%s", getHost(), getMappedPort(BitnamiKafkaConnectContainer.CONNECT_PORT)); + } + + public BitnamiKafkaConnectContainer withZookeeper(DockerImageName dockerImageName) { + zookeeperDockerImageName = dockerImageName; + return this; + } + + private String advertisedListeners(InspectContainerResponse containerInfo) { + return String.join(",", + String.format("%s://%s:%s", BROKER_LISTENER_NAME, getExternalIpAddress(containerInfo), BROKER_LISTENER_PORT), + String.format("%s://%s:%s", BOOTSTRAP_LISTENER_NAME, getHost(), getMappedPort(BOOTSTRAP_LISTENER_PORT))); + } + + /** + * @see org.testcontainers.containers.KafkaContainer + */ + private String getExternalIpAddress(InspectContainerResponse containerInfo) { + // Kafka supports only one INTER_BROKER listener, so we have to pick one. + // The current algorithm uses the following order of resolving the IP: + // 1. Custom network's IP set via `withNetwork` + // 2. Bridge network's IP + // 3. Best effort fallback to getNetworkSettings#ipAddress + return containerInfo.getNetworkSettings().getNetworks().entrySet() + .stream() + .filter(it -> it.getValue().getIpAddress() != null) + .max(Comparator.comparingInt(entry -> { + if (getNetwork().getId().equals(entry.getValue().getNetworkID())) { + return 2; + } + + if ("bridge".equals(entry.getKey())) { + return 1; + } + + return 0; + })) + .map(it -> it.getValue().getIpAddress()) + .orElseGet(() -> containerInfo.getNetworkSettings().getIpAddress()); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaConnectContainer.java new file mode 100644 index 0000000..b882e0a --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaConnectContainer.java @@ -0,0 +1,63 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import com.solace.connector.kafka.connect.source.it.Tools; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.DockerImageName; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ConfluentKafkaConnectContainer extends GenericContainer { + public static final int CONNECT_PORT = 28083; + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-kafka-connect-base"); + private static final String DEFAULT_IMAGE_TAG = "6.2.1"; + + public ConfluentKafkaConnectContainer(KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG), kafka, schemaRegistry); + } + + public ConfluentKafkaConnectContainer(DockerImageName dockerImageName, + KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + super(dockerImageName); + assertThat(kafka.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertThat(schemaRegistry.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertEquals(kafka.getNetwork(), schemaRegistry.getNetwork()); + + dependsOn(kafka, schemaRegistry); + withNetwork(kafka.getNetwork()); + withExposedPorts(CONNECT_PORT); + withEnv("CONNECT_REST_PORT", Integer.toString(CONNECT_PORT)); + withEnv("CONNECT_GROUP_ID", "quickstart-avro"); + withEnv("CONNECT_CONFIG_STORAGE_TOPIC", "quickstart-avro-config"); + withEnv("CONNECT_OFFSET_STORAGE_TOPIC", "quickstart-avro-offsets"); + withEnv("CONNECT_STATUS_STORAGE_TOPIC", "quickstart-avro-status"); + withEnv("CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR", "1"); + withEnv("CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR", "1"); + withEnv("CONNECT_STATUS_STORAGE_REPLICATION_FACTOR", "1"); + withEnv("CONNECT_KEY_CONVERTER", "io.confluent.connect.avro.AvroConverter"); + withEnv("CONNECT_VALUE_CONVERTER", "io.confluent.connect.avro.AvroConverter"); + withEnv("CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL", String.format("http://%s:%s", + schemaRegistry.getNetworkAliases().get(1), ConfluentKafkaSchemaRegistryContainer.REGISTRY_PORT)); + withEnv("CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL", String.format("http://%s:%s", + schemaRegistry.getNetworkAliases().get(1), ConfluentKafkaSchemaRegistryContainer.REGISTRY_PORT)); + withEnv("CONNECT_BOOTSTRAP_SERVERS", String.format("%s:9092", kafka.getNetworkAliases().get(1))); + withEnv("CONNECT_INTERNAL_KEY_CONVERTER", "org.apache.kafka.connect.json.JsonConverter"); + withEnv("CONNECT_INTERNAL_VALUE_CONVERTER", "org.apache.kafka.connect.json.JsonConverter"); + withEnv("CONNECT_REST_ADVERTISED_HOST_NAME", "localhost"); + withEnv("CONNECT_LOG4J_ROOT_LOGLEVEL", "INFO"); + withEnv("CONNECT_PLUGIN_PATH", "/usr/share/java,/etc/kafka-connect/jars"); + withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", + "/etc/kafka-connect/jars", BindMode.READ_ONLY); + waitingFor( Wait.forLogMessage(".*Kafka Connect started.*", 1) ); + } + + public String getConnectUrl() { + return String.format("http://%s:%s", getHost(), getMappedPort(ConfluentKafkaConnectContainer.CONNECT_PORT)); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaControlCenterContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaControlCenterContainer.java new file mode 100644 index 0000000..f7ba130 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaControlCenterContainer.java @@ -0,0 +1,40 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.utility.DockerImageName; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ConfluentKafkaControlCenterContainer extends GenericContainer { + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-enterprise-control-center"); + private static final String DEFAULT_IMAGE_TAG = "6.2.1"; + + public ConfluentKafkaControlCenterContainer(KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG), kafka, schemaRegistry); + } + + public ConfluentKafkaControlCenterContainer(DockerImageName dockerImageName, + KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + super(dockerImageName); + assertThat(kafka.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertThat(schemaRegistry.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertEquals(kafka.getNetwork(), schemaRegistry.getNetwork()); + + dependsOn(kafka, schemaRegistry); + withNetwork(kafka.getNetwork()); + withEnv("CONTROL_CENTER_REPLICATION_FACTOR", "1"); + withEnv("CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS", "1"); + withEnv("CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS", "1"); + withEnv("CONFLUENT_METRICS_TOPIC_REPLICATION", "1"); + withEnv("CONTROL_CENTER_SCHEMA_REGISTRY_URL", String.format("http://%s:%s", + schemaRegistry.getNetworkAliases().get(1), ConfluentKafkaSchemaRegistryContainer.REGISTRY_PORT)); + withEnv("CONTROL_CENTER_BOOTSTRAP_SERVERS", String.format("%s:9092", kafka.getNetworkAliases().get(1))); + withEnv("CONTROL_CENTER_ZOOKEEPER_CONNECT", String.format("%s:%s", kafka.getNetworkAliases().get(1), + KafkaContainer.ZOOKEEPER_PORT)); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaSchemaRegistryContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaSchemaRegistryContainer.java new file mode 100644 index 0000000..6740f0d --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaSchemaRegistryContainer.java @@ -0,0 +1,40 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.DockerImageName; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class ConfluentKafkaSchemaRegistryContainer extends GenericContainer { + public static final int REGISTRY_PORT = 8081; + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-schema-registry"); + private static final String DEFAULT_IMAGE_TAG = "6.2.1"; + + public ConfluentKafkaSchemaRegistryContainer(KafkaContainer kafka) { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG), kafka); + } + + public ConfluentKafkaSchemaRegistryContainer(DockerImageName dockerImageName, KafkaContainer kafka) { + super(dockerImageName); + + assertNotNull(kafka.getNetwork()); + assertThat(kafka.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + + dependsOn(kafka); + withNetwork(kafka.getNetwork()); + withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:" + REGISTRY_PORT); + withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", String.format("PLAINTEXT://%s:9092", + kafka.getNetworkAliases().get(1))); + waitingFor(Wait.forHttp("/subjects").forStatusCode(200)); + } + + @Override + protected void doStart() { + withEnv("SCHEMA_REGISTRY_HOST_NAME", getNetworkAliases().size() > 1 ? getNetworkAliases().get(1) : getHost()); + super.doStart(); + } +} diff --git a/src/integrationTest/resources/docker-compose-kafka-apache.yml b/src/integrationTest/resources/docker-compose-kafka-apache.yml deleted file mode 100644 index afa48bf..0000000 --- a/src/integrationTest/resources/docker-compose-kafka-apache.yml +++ /dev/null @@ -1,29 +0,0 @@ -version: '3' - -services: - zookeeper: - image: bitnami/zookeeper:3 - ports: - - 2181:2181 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - ALLOW_ANONYMOUS_LOGIN: 'yes' - kafka: - image: bitnami/kafka:2 - ports: - - 9092:9092 - - 29092:29092 - - 39092:39092 - environment: - KAFKA_CFG_BROKER_ID: 1 - KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 - ALLOW_PLAINTEXT_LISTENER: 'yes' - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAINTEXT_EXTHOST:PLAINTEXT - KAFKA_CFG_LISTENERS: PLAINTEXT://:9092,PLAINTEXT_HOST://:29092,PLAINTEXT_EXTHOST://:39092 - KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 -# KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 -# KAFKA_CFG_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 -# KAFKA_CFG_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - depends_on: - - zookeeper diff --git a/src/integrationTest/resources/docker-compose-kafka-confluent.yml b/src/integrationTest/resources/docker-compose-kafka-confluent.yml deleted file mode 100644 index 2982b01..0000000 --- a/src/integrationTest/resources/docker-compose-kafka-confluent.yml +++ /dev/null @@ -1,71 +0,0 @@ -version: '3' - -services: - zookeeper: - image: confluentinc/cp-zookeeper:5.4.0 - ports: - - 2181:2181 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - kafka: - image: confluentinc/cp-kafka:5.4.0 - ports: - - 9092:9092 - - 29092:29092 - - 39092:39092 - environment: - KAFKA_BROKER_ID: 1 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAINTEXT_EXTHOST:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - KAFKA_TOPIC: $KAFKA_TOPIC - depends_on: - - zookeeper - kafka-setup: - image: confluentinc/cp-kafka:5.4.0 - hostname: kafka-setup - depends_on: - - kafka - - zookeeper - command: "bash -c 'echo Waiting for Kafka to be ready... && \ - cub kafka-ready -b kafka:9092 1 30 && \ - kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $KAFKA_TOPIC && \ - sleep 30'" - environment: - # The following settings are listed here only to satisfy the image's requirements. - # We override the image's `command` anyways, hence this container will not start a broker. - KAFKA_BROKER_ID: ignored - KAFKA_ZOOKEEPER_CONNECT: ignored - - schema-registry: - image: confluentinc/cp-schema-registry:5.4.0 - ports: - - 8081:8081 - environment: - SCHEMA_REGISTRY_HOST_NAME: localhost - SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 - SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka:9092 - depends_on: - - kafka - - control-center: - image: confluentinc/cp-enterprise-control-center:latest - hostname: control-center - depends_on: - - zookeeper - - kafka - - schema-registry - ports: - - "9021:9021" - environment: - CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' - CONTROL_CENTER_ZOOKEEPER_CONNECT: 'zookeeper:2181' - CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" - CONTROL_CENTER_REPLICATION_FACTOR: 1 - CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1 - CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1 - CONFLUENT_METRICS_TOPIC_REPLICATION: 1 - PORT: 9021 \ No newline at end of file diff --git a/src/integrationTest/resources/docker-compose-solace.yml b/src/integrationTest/resources/docker-compose-solace.yml deleted file mode 100644 index fad702e..0000000 --- a/src/integrationTest/resources/docker-compose-solace.yml +++ /dev/null @@ -1,25 +0,0 @@ -version: '3' - -services: - solbroker: - image: solace/solace-pubsub-standard:$PUBSUB_TAG - hostname: $PUBSUB_HOSTNAME - env_file: - - ./solace.env - ports: - - "2222:2222" - - "8080:8080" - - "55003:55003" - - "55443:55443" - - "55445:55445" - - "55555:55555" - - "55556:55556" - - "5672:5672" - - "5550:5550" - - "8008:8008" - shm_size: 2g - ulimits: - memlock: -1 - nofile: - soft: 2448 - hard: 42192 diff --git a/src/integrationTest/resources/log4j2.xml b/src/integrationTest/resources/log4j2.xml new file mode 100644 index 0000000..e61d599 --- /dev/null +++ b/src/integrationTest/resources/log4j2.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/integrationTest/resources/logback-test.xml b/src/integrationTest/resources/logback-test.xml deleted file mode 100644 index 985c68e..0000000 --- a/src/integrationTest/resources/logback-test.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger -%msg%n%rEx{full, org} - - - - - - - - - - \ No newline at end of file diff --git a/src/integrationTest/resources/solace.env b/src/integrationTest/resources/solace.env deleted file mode 100644 index 863a835..0000000 --- a/src/integrationTest/resources/solace.env +++ /dev/null @@ -1,4 +0,0 @@ -username_admin_globalaccesslevel=admin -username_admin_password=admin -system_scaling_maxconnectioncount=100 -logging_debug_output=all \ No newline at end of file diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java index 5df1099..ff0b92c 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java @@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory; public class SolFlowEventCallBackHandler implements FlowEventHandler { - final Logger log = LoggerFactory.getLogger(SolFlowEventCallBackHandler.class); + private static final Logger log = LoggerFactory.getLogger(SolFlowEventCallBackHandler.class); @Override public void handleEvent(Object obj, FlowEventArgs event) { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java index ec0c2a4..e8f620a 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java @@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory; public class SolReconnectCallbackHandler implements JCSMPReconnectEventHandler { - final Logger log = LoggerFactory.getLogger(SolReconnectCallbackHandler.class); + private static final Logger log = LoggerFactory.getLogger(SolReconnectCallbackHandler.class); @Override public void postReconnect() throws JCSMPException { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java index 7b32572..abdbeef 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java @@ -27,7 +27,7 @@ import org.slf4j.LoggerFactory; public class SolSessionEventCallbackHandler implements SessionEventHandler { - final Logger log = LoggerFactory.getLogger(SolSessionEventCallbackHandler.class); + private static final Logger log = LoggerFactory.getLogger(SolSessionEventCallbackHandler.class); @Override public void handleEvent(SessionEventArgs event) { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java index c16b61d..cd29211 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java @@ -19,7 +19,8 @@ package com.solace.connector.kafka.connect.source; -import com.solacesystems.jcsmp.InvalidPropertiesException; +import com.solacesystems.jcsmp.Context; +import com.solacesystems.jcsmp.ContextProperties; import com.solacesystems.jcsmp.JCSMPChannelProperties; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; @@ -27,14 +28,13 @@ import com.solacesystems.jcsmp.JCSMPSession; import com.solacesystems.jcsmp.JCSMPSessionStats; import com.solacesystems.jcsmp.statistics.StatType; -import com.solacesystems.jcsmp.Context; -import com.solacesystems.jcsmp.ContextProperties; - -import java.util.Enumeration; - +import org.apache.kafka.common.config.types.Password; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Enumeration; +import java.util.Optional; + public class SolSessionHandler { private static final Logger log = LoggerFactory.getLogger(SolSessionHandler.class); @@ -57,11 +57,12 @@ public SolSessionHandler(SolaceSourceConnectorConfig connectorConfig) { */ public void configureSession() { // Required Properties - properties.setProperty(JCSMPProperties.USERNAME, + properties.setProperty(JCSMPProperties.USERNAME, connectorConfig.getString(SolaceSourceConstants.SOL_USERNAME)); - properties.setProperty(JCSMPProperties.PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_PASSWORD)); - properties.setProperty(JCSMPProperties.VPN_NAME, + properties.setProperty(JCSMPProperties.PASSWORD, + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_PASSWORD)) + .map(Password::value).orElse(null)); + properties.setProperty(JCSMPProperties.VPN_NAME, connectorConfig.getString(SolaceSourceConstants.SOL_VPN_NAME)); properties.setProperty(JCSMPProperties.HOST, connectorConfig.getString(SolaceSourceConstants.SOL_HOST)); @@ -141,7 +142,8 @@ public void configureSession() { properties.setProperty(JCSMPProperties.SSL_TRUST_STORE, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE)); properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD)); + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD)) + .map(Password::value).orElse(null)); properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_FORMAT, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT)); properties.setProperty(JCSMPProperties.SSL_TRUSTED_COMMON_NAME_LIST, @@ -149,13 +151,15 @@ public void configureSession() { properties.setProperty(JCSMPProperties .SSL_KEY_STORE, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE)); properties.setProperty(JCSMPProperties.SSL_KEY_STORE_PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD)); + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD)) + .map(Password::value).orElse(null)); properties.setProperty(JCSMPProperties.SSL_KEY_STORE_FORMAT, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_FORMAT)); properties.setProperty(JCSMPProperties.SSL_KEY_STORE_NORMALIZED_FORMAT, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT)); properties.setProperty(JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD)); + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD)) + .map(Password::value).orElse(null)); // } } @@ -163,15 +167,15 @@ public void configureSession() { /** * Connect JCSMPSession. * @return boolean result - * @throws JCSMPException + * @throws JCSMPException */ public void connectSession() throws JCSMPException { - + System.setProperty("java.security.auth.login.config", connectorConfig.getString(SolaceSourceConstants.SOL_KERBEROS_LOGIN_CONFIG)); System.setProperty("java.security.krb5.conf", connectorConfig.getString(SolaceSourceConstants.SOL_KERBEROS_KRB5_CONFIG)); - + session = JCSMPFactory.onlyInstance().createSession(properties, ctx, new SolSessionEventCallbackHandler()); session.connect(); } @@ -191,13 +195,13 @@ public void printStats() { log.info("\n"); } } - + /** * Shutdown the session. * @return return shutdown boolean result */ public boolean shutdown() { - + Context context = JCSMPFactory.onlyInstance().getDefaultContext(); if ( session != null ) { session.closeSession(); diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java index 7e66d25..d74a030 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java @@ -31,7 +31,7 @@ public class SolaceSourceConnectorConfig extends AbstractConfig { private static final Logger log = LoggerFactory.getLogger(SolaceSourceConnectorConfig.class); - + /** * Constructor to create Solace Configuration details for Source Connector. */ @@ -45,17 +45,17 @@ public SolaceSourceConnectorConfig(Map properties) { * Returns a ConfigDef to be used for Source Task. */ public static ConfigDef solaceConfigDef() { - - + + // TODO: Revise defaults to JCSMP defaults - - + + return new ConfigDef() .define(SolaceSourceConstants.KAFKA_TOPIC, Type.STRING, "default", Importance.HIGH, "Kafka topic to consume from") - .define(SolaceSourceConstants.SOL_USERNAME, Type.STRING, "default", + .define(SolaceSourceConstants.SOL_USERNAME, Type.STRING, "default", Importance.HIGH, "Solace username") - .define(SolaceSourceConstants.SOL_PASSWORD, Type.STRING, "default", + .define(SolaceSourceConstants.SOL_PASSWORD, Type.PASSWORD, "default", Importance.HIGH, "Solace user password") .define(SolaceSourceConstants.SOL_HOST, Type.STRING, null, Importance.HIGH, "host to connect with, can be comma delimited for HA/DR") @@ -68,6 +68,9 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, Type.CLASS, SolMessageProcessorIF.class, Importance.HIGH, "default Solace message processor to use") + .define(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR_IGNORE_ERROR, Type.BOOLEAN, false, + Importance.MEDIUM, + "If enabled, messages that throw message processor errors will be discarded") .define(SolaceSourceConstants.SOL_LOCALHOST, Type.STRING, null, Importance.LOW, "The hostname or IP address of the machine on which the application is running. " + "On a multihomed machine, it is strongly recommended to provide this parameter " @@ -81,28 +84,28 @@ public static ConfigDef solaceConfigDef() { "If enabled, the API maintains a local cache of subscriptions and " + "reapplies them when the subscriber " + "connection is reestablished") - .define(SolaceSourceConstants.SOL_GENERATE_SEND_TIMESTAMPS, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_GENERATE_SEND_TIMESTAMPS, Type.BOOLEAN, false, Importance.LOW, "indicates whether to generate a send timestamp in outgoing messages") - .define(SolaceSourceConstants.SOL_GENERATE_RCV_TIMESTAMPS, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_GENERATE_RCV_TIMESTAMPS, Type.BOOLEAN, false, Importance.LOW, "Indicates whether to generate a receive timestamp on incoming messages") - .define(SolaceSourceConstants.SOL_GENERATE_SEQUENCE_NUMBERS, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_GENERATE_SEQUENCE_NUMBERS, Type.BOOLEAN, false, Importance.LOW, "Indicates whether to generate a sequence number in outgoing messages") - .define(SolaceSourceConstants.SOL_CALCULATE_MESSAGE_EXPIRATION, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_CALCULATE_MESSAGE_EXPIRATION, Type.BOOLEAN, false, Importance.LOW, "Indicates whether to calculate message expiration time in outgoing " + "messages and incoming messages") .define(SolaceSourceConstants.SOL_PUB_MULTI_THREAD, Type.BOOLEAN, true, Importance.LOW, "If enabled (default), the XMLMessageProducer is safe to access from multiple threads") - .define(SolaceSourceConstants.SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF, Type.BOOLEAN, true, Importance.LOW, "If enabled, during send operations, the XMLMessageProducer " + "concatenates all published data. " + "This can result in higher throughput for certain send operations. It can, however, " + "lead to performance degradation for some scenarios with large messages") - .define(SolaceSourceConstants.SOL_MESSAGE_CALLBACK_ON_REACTOR, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_MESSAGE_CALLBACK_ON_REACTOR, Type.BOOLEAN, false, Importance.LOW, "If enabled, messages delivered asynchronously to an XMLMessageListener " + "are delivered directly from the I/O thread " @@ -111,10 +114,10 @@ public static ConfigDef solaceConfigDef() { + "MUST return quickly " + "from the onReceive() callback, and MUST NOT call ANY session" + " methods from the I/O thread") - .define(SolaceSourceConstants.SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR, Type.BOOLEAN, false, Importance.LOW, "ignore errors caused by subscriptions being already presents") - .define(SolaceSourceConstants.SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR, Type.BOOLEAN, false, Importance.LOW, "When removing subscriptions ignore errors caused by subscriptions not being found.") .define(SolaceSourceConstants.SOL_NO_LOCAL, Type.BOOLEAN, false, Importance.LOW, @@ -126,72 +129,72 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_AUTHENTICATION_SCHEME, Type.STRING, "AUTHENTICATION_SCHEME_BASIC", Importance.MEDIUM, "String property specifying the authentication scheme.") - .define(SolaceSourceConstants.SOL_KRB_SERVICE_NAME, Type.STRING, "solace", + .define(SolaceSourceConstants.SOL_KRB_SERVICE_NAME, Type.STRING, "solace", Importance.MEDIUM, "This property is used to specify the ServiceName portion " + "of the Service Principal Name (SPN) " + "that has a format of ServiceName/ApplianceName@REALM.") - .define(SolaceSourceConstants.SOL_SSL_CONNECTION_DOWNGRADE_TO, Type.STRING, "", + .define(SolaceSourceConstants.SOL_SSL_CONNECTION_DOWNGRADE_TO, Type.STRING, "", Importance.MEDIUM, "Session property specifying a transport protocol that SSL session connection will be " + "downgraded to after client authentication. " + "Allowed values: TRANSPORT_PROTOCOL_PLAIN_TEXT.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectTimeoutInMillis, Type.INT, 30000, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectTimeoutInMillis, Type.INT, 30000, Importance.MEDIUM, "Timeout value (in ms) for creating an initial connection to Solace") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_readTimeoutInMillis, Type.INT, 10000, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_readTimeoutInMillis, Type.INT, 10000, Importance.MEDIUM, "Timeout value (in ms) for reading a reply from Solace") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetries, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetries, Type.INT, 0, Importance.MEDIUM, "The number of times to attempt and retry a connection to the host appliance " + "(or list of appliances) " + "during initial connection setup") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Type.INT, 0, Importance.MEDIUM, "The number of times to attempt to reconnect to the appliance (or list of appliances)" + " after an initial " + "connected session goes down") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetriesPerHost, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetriesPerHost, Type.INT, 0, Importance.MEDIUM, "This property defines how many times to try to connect or reconnect " + "to a single host before" + " moving to the next host in the list") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis, Type.INT, 3000, Importance.MEDIUM, "How much time in (MS) to wait between each attempt to " + "connect or reconnect to a host") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis, - Type.INT, 3000, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis, + Type.INT, 3000, Importance.MEDIUM, "The amount of time (in ms) to wait between sending out keep-alive messages") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveLimit, Type.INT, 10, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveLimit, Type.INT, 10, Importance.MEDIUM, "The maximum number of consecutive keep-alive messages that can be sent without " + "receiving a response " + "before the connection is closed by the API") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_sendBuffer, Type.INT, 65536, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_sendBuffer, Type.INT, 65536, Importance.MEDIUM, "The size (in bytes) of the send socket buffer.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_receiveBuffer, Type.INT, 65536, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_receiveBuffer, Type.INT, 65536, Importance.MEDIUM, "The size (in bytes) of the receive socket buffer.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_tcpNoDelay, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_tcpNoDelay, Type.BOOLEAN, true, Importance.LOW, "Whether to set the TCP_NODELAY option. When enabled, this option " + "disables the Nagle's algorithm.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_compressionLevel, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_compressionLevel, Type.INT, 0, Importance.MEDIUM, "A compressionLevel setting of 1-9 sets the ZLIB compression level to use; " + "a setting of 0 disables compression entirely.") .define(SolaceSourceConstants.SOL_SUBSCRIBER_LOCAL_PRIORITY, Type.INT, 1, Importance.MEDIUM, "Subscriber priority is used to choose a client to receive messages " + "sent with the DeliverToOne property set.") - .define(SolaceSourceConstants.SOL_SUBSCRIBER_NETWORK_PRIORITY, Type.INT, 1, + .define(SolaceSourceConstants.SOL_SUBSCRIBER_NETWORK_PRIORITY, Type.INT, 1, Importance.MEDIUM, "Subscriber priority is used to choose a client to receive messages s" + "ent with the DeliverToOne property set.") - .define(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE, Type.BOOLEAN, true, Importance.LOW, "When adding topic subscriptions override DTO processing for any " + "messages with DTO flags .") @@ -204,36 +207,36 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_SSL_CIPHER_SUITES, Type.STRING, "", Importance.LOW, "This property is used to specify a comma separated list of cipher suites in order of " + "preference used for SSL connections. ") - .define(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE, Type.BOOLEAN, true, Importance.LOW, "This property is used to specify whether the API should validate server certificates ") .define(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE_DATE, Type.BOOLEAN, true, Importance.LOW, "This property is used to specify whether the API should validate server " + "certificate's expiry") - .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE, Type.STRING, - "/lib/security/jssecacerts", + .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE, Type.STRING, + "/lib/security/jssecacerts", Importance.LOW, "This property is used to specify the truststore file to use in URL or path format.") - .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD, Type.STRING, "", Importance.LOW, + .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD, Type.PASSWORD, "", Importance.LOW, "This property is used to specify the password of the truststore given " + "in SSL_TRUST_STORE") - .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT, Type.STRING, "JKS", + .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT, Type.STRING, "JKS", Importance.LOW, "This property is used to specify the format of the truststore given in " + "SSL_TRUST_STORE.") - .define(SolaceSourceConstants.SOL_SSL_TRUSTED_COMMON_NAME_LIST, Type.STRING, "", + .define(SolaceSourceConstants.SOL_SSL_TRUSTED_COMMON_NAME_LIST, Type.STRING, "", Importance.LOW, "This property is used to specify a comma separated list of acceptable common names " + "for matching with server certificates.") .define(SolaceSourceConstants.SOL_SSL_KEY_STORE, Type.STRING, "", Importance.LOW, "This property is used to specify the keystore file to use in URL or path format.") - .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD, Type.STRING, "", Importance.LOW, + .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD, Type.PASSWORD, "", Importance.LOW, "This property is used to specify the password of the keystore specified " + "by SSL_KEY_STORE.") .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_FORMAT, Type.STRING, "JKS", Importance.LOW, "This property is used to specify the format of the keystore given in SSL_KEY_STORE.") - .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT, Type.STRING, "JKS", + .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT, Type.STRING, "JKS", Importance.LOW, "This property is used to specify the format of an internal normalized " + "representation of the keystore " @@ -241,7 +244,7 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_ALIAS, Type.STRING, "", Importance.LOW, "This property is used to specify the alias of the private key to use " + "for client certificate authentication.") - .define(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD, Type.STRING, "", Importance.LOW, + .define(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD, Type.PASSWORD, "", Importance.LOW, "This property is used to specify the password that deciphers the " + "private key from the key store.") .define(SolaceSourceConstants.SOL_KERBEROS_KRB5_CONFIG, Type.STRING, "", Importance.LOW, @@ -250,7 +253,7 @@ public static ConfigDef solaceConfigDef() { "Location of the Kerberos Login Configuration File") .define(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY, Type.STRING, "NONE", Importance.MEDIUM, "This propert determines if a Kafka key record is created and the key to be used"); - + } diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java index 35e95be..d22247b 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java @@ -33,14 +33,17 @@ public class SolaceSourceConstants { // High Importance Solace Message processor public static final String SOL_MESSAGE_PROCESSOR = "sol.message_processor_class"; + // Medium Importance Solace Message processor + public static final String SOL_MESSAGE_PROCESSOR_IGNORE_ERROR = "sol.message_processor.error.ignore"; + // High Importance Solace public static final String SOL_HOST = "sol.host"; public static final String SOL_USERNAME = "sol.username"; public static final String SOL_PASSWORD = "sol.password"; - + // TODO: SOL_MESSAGE_ACK_MODE is not used! public static final String SOL_MESSAGE_ACK_MODE = "sol.message_ack_mode"; - + public static final String SOL_VPN_NAME = "sol.vpn_name"; public static final String SOL_TOPICS = "sol.topics"; public static final String SOL_QUEUE = "sol.queue"; @@ -56,12 +59,12 @@ public class SolaceSourceConstants { public static final String SOL_CALCULATE_MESSAGE_EXPIRATION = "sol.calculate_message_expiration"; public static final String SOL_REAPPLY_SUBSCRIPTIONS = "sol.reapply_subscriptions"; public static final String SOL_PUB_MULTI_THREAD = "sol.pub_multi_thread"; - public static final String SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF + public static final String SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF = "sol.pub_use_immediate_direct_pub"; public static final String SOL_MESSAGE_CALLBACK_ON_REACTOR = "sol.message_callback_on_reactor"; - public static final String SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR + public static final String SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR = "sol.ignore_duplicate_subscription_error"; - public static final String SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR + public static final String SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR = "sol.ignore_subscription_not_found_error"; public static final String SOL_NO_LOCAL = "sol.no_local"; public static final String SOL_ACK_EVENT_MODE = "sol.ack_event_mode"; @@ -82,34 +85,34 @@ public class SolaceSourceConstants { public static final String SOL_SSL_KEY_STORE = "sol.ssl_key_store"; public static final String SOL_SSL_KEY_STORE_PASSWORD = "sol.ssl_key_store_password"; public static final String SOL_SSL_KEY_STORE_FORMAT = "sol.ssl_key_store_format"; - public static final String SOL_SSL_KEY_STORE_NORMALIZED_FORMAT + public static final String SOL_SSL_KEY_STORE_NORMALIZED_FORMAT = "sol.ssl_key_store_normalized_format"; public static final String SOL_SSL_PRIVATE_KEY_ALIAS = "sol.ssl_private_key_alias"; public static final String SOL_SSL_PRIVATE_KEY_PASSWORD = "sol.ssl_private_key_password"; // Low Importance Solace Channel Properties - public static final String SOL_CHANNEL_PROPERTY_connectTimeoutInMillis + public static final String SOL_CHANNEL_PROPERTY_connectTimeoutInMillis = "sol.channel_properties.connect_timout_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_readTimeoutInMillis + public static final String SOL_CHANNEL_PROPERTY_readTimeoutInMillis = "sol.channel_properties.read_timeout_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_connectRetries + public static final String SOL_CHANNEL_PROPERTY_connectRetries = "sol.channel_properties.connect_retries"; - public static final String SOL_CHANNEL_PROPERTY_reconnectRetries + public static final String SOL_CHANNEL_PROPERTY_reconnectRetries = "sol.channel_properties.reconnect_retries"; - public static final String SOL_CHANNEL_PROPERTY_connectRetriesPerHost + public static final String SOL_CHANNEL_PROPERTY_connectRetriesPerHost = "sol.channnel_properties.connect_retries_per_host"; - public static final String SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis + public static final String SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis = "sol.channel_properties.reconnect_retry_wait_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis + public static final String SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis = "sol.channel_properties.keep_alive_interval_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_keepAliveLimit + public static final String SOL_CHANNEL_PROPERTY_keepAliveLimit = "sol.channel_properties.keep_alive_limit"; public static final String SOL_CHANNEL_PROPERTY_sendBuffer = "sol.channel_properties.send_buffer"; - public static final String SOL_CHANNEL_PROPERTY_receiveBuffer + public static final String SOL_CHANNEL_PROPERTY_receiveBuffer = "sol.channel_properties.receive_buffer"; - public static final String SOL_CHANNEL_PROPERTY_tcpNoDelay + public static final String SOL_CHANNEL_PROPERTY_tcpNoDelay = "sol.channel_properties.tcp_no_delay"; - public static final String SOL_CHANNEL_PROPERTY_compressionLevel + public static final String SOL_CHANNEL_PROPERTY_compressionLevel = "sol.channel_properties.compression_level"; // Low Importance Persistent Message Properties @@ -130,10 +133,10 @@ public class SolaceSourceConstants { // Allowable values include: NONE, DESTINATION, CORRELATION_ID, // CORRELATION_ID_AS_BYTES public static final String SOL_KAFKA_MESSAGE_KEY = "sol.kafka_message_key"; - + //Low importance Kerberos details public static final String SOL_KERBEROS_LOGIN_CONFIG = "sol.kerberos.login.conf"; - public static final String SOL_KERBEROS_KRB5_CONFIG = "sol.kerberos.krb5.conf"; - + public static final String SOL_KERBEROS_KRB5_CONFIG = "sol.kerberos.krb5.conf"; + } diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java index 643f33b..bfb493f 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java @@ -46,7 +46,7 @@ public class SolaceSourceQueueConsumer { this.solSessionHandler = solSessionHandler; } - public boolean init(BlockingQueue squeue) { + public void init(BlockingQueue squeue) throws JCSMPException { solQueue = JCSMPFactory.onlyInstance().createQueue(lconfig.getString(SolaceSourceConstants.SOL_QUEUE)); final ConsumerFlowProperties flow_prop = new ConsumerFlowProperties(); flow_prop.setEndpoint(solQueue); @@ -54,16 +54,10 @@ public boolean init(BlockingQueue squeue) { flow_prop.setStartState(true); EndpointProperties endpointProps = new EndpointProperties(); endpointProps.setAccessType(EndpointProperties.ACCESSTYPE_NONEXCLUSIVE); - try { - callbackhandler = new SolMessageQueueCallbackHandler(squeue); - recv = solSessionHandler.getSession().createFlow(callbackhandler, flow_prop, endpointProps, - new SolFlowEventCallBackHandler()); - recv.start(); - } catch (JCSMPException je) { - log.info("=========== JCSMP Exception while creating Solace Flow to Queue " + "in SolaceSourceQueueConsumer {} \n", - je.getLocalizedMessage()); - } - return true; + callbackhandler = new SolMessageQueueCallbackHandler(squeue); + recv = solSessionHandler.getSession().createFlow(callbackhandler, flow_prop, endpointProps, + new SolFlowEventCallBackHandler()); + recv.start(); } public void stop() { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java index 3132569..f76f5d2 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java @@ -22,32 +22,30 @@ import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.DeliveryMode; import com.solacesystems.jcsmp.JCSMPException; -import com.solacesystems.jcsmp.JCSMPProperties; import com.solacesystems.jcsmp.JCSMPSession; +import org.apache.kafka.connect.errors.ConnectException; +import org.apache.kafka.connect.source.SourceRecord; +import org.apache.kafka.connect.source.SourceTask; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; -import org.apache.kafka.connect.source.SourceRecord; -import org.apache.kafka.connect.source.SourceTask; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public class SolaceSourceTask extends SourceTask { // implements XMLMessageListener{ private static final Logger log = LoggerFactory.getLogger(SolaceSourceTask.class); - final JCSMPProperties properties = new JCSMPProperties(); - SolaceSourceConnectorConfig connectorConfig; private SolSessionHandler solSessionHandler = null; - BlockingQueue ingressMessages + BlockingQueue ingressMessages = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for any incoming message from PS+ topics and queue - BlockingQueue outstandingAckList + BlockingQueue outstandingAckList = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for Solace Flow messages String skafkaTopic; SolaceSourceTopicListener topicListener = null; @@ -72,10 +70,7 @@ public void start(Map props) { .getConfiguredInstance(SolaceSourceConstants .SOL_MESSAGE_PROCESSOR, SolMessageProcessorIF.class); } catch (Exception e) { - log.info( - "================ Encountered exception in creating the message processor." - + " Cause: {}, Stacktrace: {} ", - e.getCause(), e.getStackTrace()); + throw new ConnectException("Encountered exception in creating the message processor.", e); } skafkaTopic = connectorConfig.getString(SolaceSourceConstants.KAFKA_TOPIC); solSessionHandler = new SolSessionHandler(connectorConfig); @@ -83,24 +78,23 @@ public void start(Map props) { solSessionHandler.configureSession(); solSessionHandler.connectSession(); } catch (JCSMPException e) { - log.info("Received Solace exception {}, with the " - + "following: {} ", e.getCause(), e.getStackTrace()); - log.info("================ Failed to create JCSMPSession Session"); - stop(); + throw new ConnectException("Failed to create JCSMPSession", e); } log.info("================ JCSMPSession Connected"); if (connectorConfig.getString(SolaceSourceConstants.SOL_TOPICS) != null) { topicListener = new SolaceSourceTopicListener(connectorConfig, solSessionHandler); - if (!topicListener.init(ingressMessages)) { - log.info("================ Failed to start topic consumer ... shutting down"); - stop(); + try { + topicListener.init(ingressMessages); + } catch (JCSMPException e) { + throw new ConnectException("Failed to start topic consumer", e); } } if (connectorConfig.getString(SolaceSourceConstants.SOL_QUEUE) != null) { queueConsumer = new SolaceSourceQueueConsumer(connectorConfig, solSessionHandler); - if (!queueConsumer.init(ingressMessages)) { - log.info("================ Failed to start queue consumer ... shutting down"); - stop(); + try { + queueConsumer.init(ingressMessages); + } catch (JCSMPException e) { + throw new ConnectException("Failed to start queue consumer", e); } } } @@ -119,31 +113,37 @@ public synchronized List poll() throws InterruptedException { // There is at least one message to process spinTurns = 0; // init spinTurns again List records = new ArrayList<>(); - int processedInIhisBatch = 0; - int count = 0; + int processedInThisBatch; + int discarded = 0; int arraySize = ingressMessages.size(); - while (count < arraySize) { + for (processedInThisBatch = 0; processedInThisBatch < arraySize; processedInThisBatch++) { BytesXMLMessage msg = ingressMessages.take(); try { processor.process(connectorConfig.getString(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY), msg); } catch (Exception e) { - log.info( - "================ Encountered exception in message processing....discarded." - + " Cause: {}, Stacktrace: {} ", - e.getCause(), e.getStackTrace()); + if (connectorConfig.getBoolean(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR_IGNORE_ERROR)) { + log.warn("================ Encountered exception in message processing....discarded.", e); + scheduleForAck(msg); + discarded++; + continue; + } else { + throw new ConnectException("Encountered exception in message processing", e); + } } Collections.addAll(records, processor.getRecords(skafkaTopic)); - count++; - processedInIhisBatch++; - if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT - || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { - outstandingAckList.add(msg); // enqueue messages received from guaranteed messaging endpoint for later ack - } + scheduleForAck(msg); } - log.debug("Processed {} records in this batch.", processedInIhisBatch); + log.debug("Processed {} records in this batch. Discarded {}", processedInThisBatch - discarded, discarded); return records; } + private synchronized void scheduleForAck(BytesXMLMessage msg) { + if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT + || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { + outstandingAckList.add(msg); // enqueue messages received from guaranteed messaging endpoint for later ack + } + } + /** * Kafka Connect method that write records to disk. */ @@ -181,5 +181,5 @@ public synchronized void stop() { public JCSMPSession getSolSession() { return solSessionHandler.getSession(); } - + } diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java index 1a4a30e..89624ab 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java @@ -50,45 +50,30 @@ public SolaceSourceTopicListener(SolaceSourceConnectorConfig lconfig, SolSession this.solSessionHandler = solSessionHandler; } - public boolean init(BlockingQueue squeue) { - boolean topicListenerStarted = true; + public void init(BlockingQueue squeue) throws JCSMPException { solaceTopics = lconfig.getString(SolaceSourceConstants.SOL_TOPICS); topics = solaceTopics.split(","); - try { - callbackhandler = new SolMessageTopicCallbackHandler(lconfig, squeue); - cons = solSessionHandler.getSession().getMessageConsumer(new SolReconnectCallbackHandler(), callbackhandler); - } catch (JCSMPException je) { - log.info("JCSMP Exception in SolaceSourceTopicListener {} \n", je.getLocalizedMessage()); - } - try { - Topic topic; - int counter = 0; - log.info("Number of topics to add: {} ", topics.length); - while (topics.length > counter) { - log.info("Adding subscription for topic {} ", topics[counter].trim()); - TopicProperties tproperties = new TopicProperties(); - tproperties.setName(topics[counter].trim()); - // Only used for legacy PubSub+ versions - tproperties.setRxAllDeliverToOne(lconfig.getBoolean(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE)); - topic = JCSMPFactory.onlyInstance().createTopic(tproperties); - solSessionHandler.getSession().addSubscription(topic, true); - counter++; - } - } catch (JCSMPException je) { - log.info("JCSMP Exception in SolaceSourceTopicListener {} \n", je.getLocalizedMessage()); + callbackhandler = new SolMessageTopicCallbackHandler(lconfig, squeue); + cons = solSessionHandler.getSession().getMessageConsumer(new SolReconnectCallbackHandler(), callbackhandler); + + Topic topic; + int counter = 0; + log.info("Number of topics to add: {} ", topics.length); + while (topics.length > counter) { + log.info("Adding subscription for topic {} ", topics[counter].trim()); + TopicProperties tproperties = new TopicProperties(); + tproperties.setName(topics[counter].trim()); + // Only used for legacy PubSub+ versions + tproperties.setRxAllDeliverToOne(lconfig.getBoolean(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE)); + topic = JCSMPFactory.onlyInstance().createTopic(tproperties); + solSessionHandler.getSession().addSubscription(topic, true); + counter++; } - try { - cons.start(); - } catch (JCSMPException je) { - log.info("JCSMP Exception in SolaceSourceTopicListener {} \n", je.getLocalizedMessage()); - topicListenerStarted = false; - } + cons.start(); log.info("================Session is Connected"); - return topicListenerStarted; - } public void shutdown() { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java b/src/template/java/com/solace/connector/kafka/connect/source/VersionUtil.java similarity index 88% rename from src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java rename to src/template/java/com/solace/connector/kafka/connect/source/VersionUtil.java index 258df9b..839a546 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java +++ b/src/template/java/com/solace/connector/kafka/connect/source/VersionUtil.java @@ -1,14 +1,12 @@ package com.solace.connector.kafka.connect.source; public class VersionUtil { - + /** * Returns the projects version number for the connector. */ public static String getVersion() { - - return "2.0.2"; - + return "${version}"; } } diff --git a/src/test/java/com/solace/connector/kafka/connect/source/SolSessionHandlerTest.java b/src/test/java/com/solace/connector/kafka/connect/source/SolSessionHandlerTest.java new file mode 100644 index 0000000..643a0cc --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/SolSessionHandlerTest.java @@ -0,0 +1,43 @@ +package com.solace.connector.kafka.connect.source; + +import com.solacesystems.jcsmp.JCSMPProperties; +import org.apache.commons.lang.RandomStringUtils; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import java.util.HashMap; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class SolSessionHandlerTest { + @ParameterizedTest + @CsvSource({ + SolaceSourceConstants.SOL_PASSWORD + ',' + JCSMPProperties.PASSWORD, + SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD + ',' + JCSMPProperties.SSL_KEY_STORE_PASSWORD, + SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD + ',' + JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, + SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD + ',' + JCSMPProperties.SSL_TRUST_STORE_PASSWORD + }) + public void testConfigurePasswords(String connectorProperty, String jcsmpProperty) { + Map properties = new HashMap<>(); + properties.put(connectorProperty, RandomStringUtils.randomAlphanumeric(30)); + SolSessionHandler sessionHandler = new SolSessionHandler(new SolaceSourceConnectorConfig(properties)); + sessionHandler.configureSession(); + assertEquals(properties.get(connectorProperty), sessionHandler.properties.getStringProperty(jcsmpProperty)); + } + + @ParameterizedTest + @CsvSource({ + SolaceSourceConstants.SOL_PASSWORD + ',' + JCSMPProperties.PASSWORD, + SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD + ',' + JCSMPProperties.SSL_KEY_STORE_PASSWORD, + SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD + ',' + JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, + SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD + ',' + JCSMPProperties.SSL_TRUST_STORE_PASSWORD + }) + public void testConfigureNullPasswords(String connectorProperty, String jcsmpProperty) { + Map properties = new HashMap<>(); + properties.put(connectorProperty, null); + SolSessionHandler sessionHandler = new SolSessionHandler(new SolaceSourceConnectorConfig(properties)); + sessionHandler.configureSession(); + assertEquals(properties.get(connectorProperty), sessionHandler.properties.getStringProperty(jcsmpProperty)); + } +} diff --git a/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfigTest.java b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfigTest.java new file mode 100644 index 0000000..ddd00ec --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfigTest.java @@ -0,0 +1,29 @@ +package com.solace.connector.kafka.connect.source; + +import org.apache.commons.lang.RandomStringUtils; +import org.apache.kafka.common.config.types.Password; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import java.util.HashMap; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class SolaceSourceConnectorConfigTest { + @ParameterizedTest + @ValueSource(strings = { + SolaceSourceConstants.SOL_PASSWORD, + SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD, + SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD, + SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD + }) + public void testPasswordsObfuscation(String property) { + Map properties = new HashMap<>(); + properties.put(property, RandomStringUtils.randomAlphanumeric(30)); + SolaceSourceConnectorConfig config = new SolaceSourceConnectorConfig(properties); + Password password = config.getPassword(property); + assertEquals(Password.HIDDEN, password.toString()); + assertEquals(properties.get(property), password.value()); + } +} diff --git a/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceTaskTest.java b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceTaskTest.java new file mode 100644 index 0000000..71667bf --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceTaskTest.java @@ -0,0 +1,53 @@ +package com.solace.connector.kafka.connect.source; + +import com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor; +import com.solacesystems.jcsmp.JCSMPException; +import org.apache.kafka.common.KafkaException; +import org.apache.kafka.connect.errors.ConnectException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + +public class SolaceSourceTaskTest { + private SolaceSourceTask solaceSourceTask; + + @BeforeEach + void setUp() { + solaceSourceTask = new SolaceSourceTask(); + } + + @AfterEach + void tearDown() { + solaceSourceTask.stop(); + } + + @Test + public void testNoProvidedMessageProcessor() { + Map props = Collections.emptyMap(); + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(props)); + assertThat(thrown.getMessage(), containsString("Encountered exception in creating the message processor.")); + assertThat(thrown.getCause(), instanceOf(KafkaException.class)); + assertThat(thrown.getCause().getMessage(), containsString( + "Could not find a public no-argument constructor for " + SolMessageProcessorIF.class.getName())); + } + + @Test + public void testFailSessionConnect() { + Map props = new HashMap<>(); + props.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, SolSampleSimpleMessageProcessor.class.getName()); + + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(props)); + assertThat(thrown.getMessage(), containsString("Failed to create JCSMPSession")); + assertThat(thrown.getCause(), instanceOf(JCSMPException.class)); + assertThat(thrown.getCause().getMessage(), containsString("Null value was passed in for property (host)")); + } +} diff --git a/src/test/java/com/solace/connector/kafka/connect/source/VersionUtilTest.java b/src/test/java/com/solace/connector/kafka/connect/source/VersionUtilTest.java new file mode 100644 index 0000000..ede8998 --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/VersionUtilTest.java @@ -0,0 +1,16 @@ +package com.solace.connector.kafka.connect.source; + +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.regex.Pattern; + +import static org.junit.jupiter.api.Assertions.assertLinesMatch; + +public class VersionUtilTest { + @Test + public void testGetVersion() { + assertLinesMatch(Collections.singletonList(Pattern.compile("^[0-9]+\\.[0-9]+\\.[0-9]+$").pattern()), + Collections.singletonList(VersionUtil.getVersion())); + } +} diff --git a/src/test/resources/log4j2.xml b/src/test/resources/log4j2.xml new file mode 100644 index 0000000..e61d599 --- /dev/null +++ b/src/test/resources/log4j2.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file