From 45bfdf260fae29fa7abdc8feca2ae3122db6eb52 Mon Sep 17 00:00:00 2001 From: Jeffrey D <11084623+Nephery@users.noreply.github.com> Date: Thu, 21 Oct 2021 12:44:33 -0400 Subject: [PATCH] 2.1.0 Release (#35) # Improvements * Upgrade Kafka to 2.8.1 * Upgrade JCSMP to 10.12.0 # Bug fixes * use ephemeral ports for test infrastructure setup (#29) * obfuscate passwords (#30) * Fix start error propagation (#32) * Add `sol.message_processor.error.ignore` config option to discard messages that produce message processor errors (#33) * Fixed Gradle `integrationTest` task and added `test` task # Misc * Add Dependabot * Add Jacoco code coverage * Add static code analysis tools (CodeQL, PMD, SpotBugs) * Upload test results to Github Action --- .github/dependabot.yml | 9 + .github/workflows/build-test.yml | 87 +++- .github/workflows/codeql-analysis.yml | 96 ++++ .github/workflows/pmd-analysis.yml | 71 +++ .github/workflows/spotbugs-analysis.yml | 71 +++ .gitignore | 6 + .gitmodules | 3 + README.md | 28 +- build.gradle | 163 +++++-- etc/solace_source.properties | 3 + gradle.properties | 2 +- gradle/wrapper/gradle-wrapper.jar | Bin 58702 -> 59203 bytes gradle/wrapper/gradle-wrapper.properties | 2 +- gradlew | 2 + gradlew.bat | 25 +- solace-integration-test-support | 1 + .../it/DockerizedPlatformSetupApache.java | 63 --- .../it/DockerizedPlatformSetupConfluent.java | 75 ---- .../MessagingServiceFullLocalSetupApache.java | 46 -- ...ssagingServiceFullLocalSetupConfluent.java | 44 -- .../connect/source/it/ParameterTesting.java | 74 --- .../source/it/SolaceConnectorDeployment.java | 181 +++++--- .../connect/source/it/SolaceSourceTaskIT.java | 188 ++++++++ .../connect/source/it/SourceConnectorIT.java | 421 +++++++++++------- .../source/it/TestConfigProperties.java | 64 --- .../connect/source/it/TestConstants.java | 32 +- .../connect/source/it/TestKafkaConsumer.java | 117 ----- .../connect/source/it/TestSolaceProducer.java | 74 ++- .../kafka/connect/source/it/Tools.java | 26 +- .../source/it/util/KafkaConnection.java | 52 +++ .../extensions/KafkaArgumentsProvider.java | 406 +++++++++++++++++ .../NetworkPubSubPlusExtension.java | 20 + .../BitnamiKafkaConnectContainer.java | 145 ++++++ .../ConfluentKafkaConnectContainer.java | 63 +++ .../ConfluentKafkaControlCenterContainer.java | 40 ++ ...ConfluentKafkaSchemaRegistryContainer.java | 40 ++ .../resources/docker-compose-kafka-apache.yml | 29 -- .../docker-compose-kafka-confluent.yml | 71 --- .../resources/docker-compose-solace.yml | 25 -- src/integrationTest/resources/log4j2.xml | 14 + .../resources/logback-test.xml | 14 - src/integrationTest/resources/solace.env | 4 - .../source/SolFlowEventCallBackHandler.java | 2 +- .../source/SolReconnectCallbackHandler.java | 2 +- .../SolSessionEventCallbackHandler.java | 2 +- .../connect/source/SolSessionHandler.java | 40 +- .../source/SolaceSourceConnectorConfig.java | 87 ++-- .../connect/source/SolaceSourceConstants.java | 43 +- .../source/SolaceSourceQueueConsumer.java | 16 +- .../connect/source/SolaceSourceTask.java | 78 ++-- .../source/SolaceSourceTopicListener.java | 49 +- .../kafka/connect/source/VersionUtil.java | 6 +- .../connect/source/SolSessionHandlerTest.java | 43 ++ .../SolaceSourceConnectorConfigTest.java | 29 ++ .../connect/source/SolaceSourceTaskTest.java | 53 +++ .../kafka/connect/source/VersionUtilTest.java | 16 + src/test/resources/log4j2.xml | 14 + 57 files changed, 2196 insertions(+), 1181 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/codeql-analysis.yml create mode 100644 .github/workflows/pmd-analysis.yml create mode 100644 .github/workflows/spotbugs-analysis.yml create mode 100644 .gitmodules create mode 160000 solace-integration-test-support delete mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java delete mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java delete mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java delete mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java delete mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceSourceTaskIT.java delete mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java delete mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/KafkaConnection.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/KafkaArgumentsProvider.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/NetworkPubSubPlusExtension.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/BitnamiKafkaConnectContainer.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaConnectContainer.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaControlCenterContainer.java create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaSchemaRegistryContainer.java delete mode 100644 src/integrationTest/resources/docker-compose-kafka-apache.yml delete mode 100644 src/integrationTest/resources/docker-compose-kafka-confluent.yml delete mode 100644 src/integrationTest/resources/docker-compose-solace.yml create mode 100644 src/integrationTest/resources/log4j2.xml delete mode 100644 src/integrationTest/resources/logback-test.xml delete mode 100644 src/integrationTest/resources/solace.env rename src/{main => template}/java/com/solace/connector/kafka/connect/source/VersionUtil.java (88%) create mode 100644 src/test/java/com/solace/connector/kafka/connect/source/SolSessionHandlerTest.java create mode 100644 src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfigTest.java create mode 100644 src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceTaskTest.java create mode 100644 src/test/java/com/solace/connector/kafka/connect/source/VersionUtilTest.java create mode 100644 src/test/resources/log4j2.xml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..29071a8 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,9 @@ +version: 2 +updates: + - package-ecosystem: "gradle" + directory: "/" + schedule: + interval: "daily" + allow: + - dependency-name: "org.apache.kafka:*" + - dependency-name: "com.solacesystems:*" diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index f06d7b1..e314943 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -7,20 +7,69 @@ on: push: jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + build: + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/master' && github.repository_owner == 'SolaceProducts') runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Setup JDK 1.8 - uses: actions/setup-java@v1 with: - java-version: 1.8 + submodules: recursive + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-build-test-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-build-test- + + - name: Setup JDK 8 + uses: actions/setup-java@v2 + with: + distribution: zulu + java-version: 8 + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + - name: Install Test Support + working-directory: solace-integration-test-support + run: ./mvnw clean install -DskipTests + - name: Build and test with Gradle - run: ./gradlew clean integrationTest --tests com.solace.connector.kafka.connect.source.it.SourceConnectorIT + run: ./gradlew clean test integrationTest jacocoFullReport --info + + - name: Upload Test Artifacts + if: always() + uses: actions/upload-artifact@v2 + with: + name: Test Results + path: | + **/build/jacoco/*.exec + **/build/reports/ + **/build/test-results/**/*.xml + - name: Publish artifacts - if: github.event_name == 'push' + # Security Measure: Do not publish artifacts from dependabot builds + if: github.event_name == 'push' && (github.actor != 'dependabot[bot]' || !contains(github.ref, 'dependabot')) run: | if [ ${{ github.ref }} == 'refs/heads/master' ] && [ ${{ github.repository_owner }} == 'SolaceProducts' ] ; then echo "Using master on SolaceProducts" @@ -56,4 +105,30 @@ jobs: git remote add origin-pages https://${{ secrets.GH_TOKEN }}@github.com/${{ github.repository }}.git > /dev/null 2>&1; git push --quiet --set-upstream origin-pages gh-pages; echo "Updated and pushed GH pages!"; - fi + fi + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties + + - name: Publish Unit Test Results + if: github.actor != 'dependabot[bot]' || (github.event_name == 'push' && !contains(github.ref, 'dependabot')) + uses: EnricoMi/publish-unit-test-result-action@v1 + continue-on-error: true + with: + check_name: Unit Test Results + comment_mode: create new + fail_on: nothing + hide_comments: orphaned commits + files: | + **/build/test-results/**/*.xml + + - name: Publish Test Coverage Results + if: github.event_name == 'pull_request' && github.actor != 'dependabot[bot]' && github.event.pull_request.head.repo.full_name == github.repository + uses: madrapps/jacoco-report@v1.2 + with: + paths: build/reports/jacoco/jacocoFullReport/jacocoFullReport.xml + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000..dc2420a --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,96 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "Code Analysis (CodeQL)" + +on: + push: + pull_request: + schedule: + - cron: '38 15 * * 0' + workflow_dispatch: + +jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + analyze: + name: Analyze + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-codeql-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-codeql- + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: java + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏ī¸ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties diff --git a/.github/workflows/pmd-analysis.yml b/.github/workflows/pmd-analysis.yml new file mode 100644 index 0000000..84ecd6e --- /dev/null +++ b/.github/workflows/pmd-analysis.yml @@ -0,0 +1,71 @@ +name: Code Analysis (PMD) + +on: + pull_request: + push: + workflow_dispatch: + +jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + run-analysis: + name: Run PMD Static Code Analysis + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-pmd-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-pmd- + + - name: Setup JDK 8 + uses: actions/setup-java@v2 + with: + distribution: zulu + java-version: 8 + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + - name: Run static code analysis + run: ./gradlew clean pmdMainSarif --info + + - name: Upload Test Artifacts + if: always() + uses: actions/upload-artifact@v2 + with: + name: Static Code Analysis Results (PMD) + path: | + **/build/reports/ + + - name: Upload SARIF file + if: success() || failure() + uses: github/codeql-action/upload-sarif@v1 + with: + sarif_file: build/reports/pmd/main.sarif + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties \ No newline at end of file diff --git a/.github/workflows/spotbugs-analysis.yml b/.github/workflows/spotbugs-analysis.yml new file mode 100644 index 0000000..f1cd934 --- /dev/null +++ b/.github/workflows/spotbugs-analysis.yml @@ -0,0 +1,71 @@ +name: Code Analysis (SpotBugs) + +on: + pull_request: + push: + workflow_dispatch: + +jobs: + dupe_check: + name: Check for Duplicate Workflow Run + runs-on: ubuntu-latest + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v3.4.0 + with: + concurrent_skipping: same_content + do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]' + + run-analysis: + name: Run SpotBugs Static Code Analysis + needs: + - dupe_check + if: needs.dupe_check.outputs.should_skip != 'true' + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - name: Cache Gradle + uses: actions/cache@v2 + with: + path: | + ~/.gradle/caches + ~/.gradle/wrapper + key: ${{ runner.os }}-gradle-spotbugs-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} + restore-keys: | + ${{ runner.os }}-gradle-spotbugs- + + - name: Setup JDK 8 + uses: actions/setup-java@v2 + with: + distribution: zulu + java-version: 8 + + - name: Validate Gradle wrapper + uses: gradle/wrapper-validation-action@v1 + + - name: Run static code analysis + run: ./gradlew clean spotbugsMain --info + + - name: Upload Test Artifacts + if: always() + uses: actions/upload-artifact@v2 + with: + name: Static Code Analysis Results (SpotBugs) + path: | + **/build/reports/ + + - name: Upload SARIF file + if: success() || failure() + uses: github/codeql-action/upload-sarif@v1 + with: + sarif_file: build/reports/spotbugs/main.sarif + + - name: Cleanup Gradle Cache + # Remove some files from the Gradle cache, so they aren't cached by GitHub Actions. + # Restoring these files from a GitHub Actions cache might cause problems for future builds. + run: | + rm -f ~/.gradle/caches/modules-2/modules-2.lock + rm -f ~/.gradle/caches/modules-2/gc.properties \ No newline at end of file diff --git a/.gitignore b/.gitignore index 6705f1c..5014111 100644 --- a/.gitignore +++ b/.gitignore @@ -36,5 +36,11 @@ local.properties *.launch /build/ +### IntelliJ +.idea +*.iws +*.iml +*.ipr + # Unzipped test connector src/integrationTest/resources/pubsubplus-connector-kafka*/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..846409f --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "solace-integration-test-support"] + path = solace-integration-test-support + url = ../../SolaceDev/solace-integration-test-support.git diff --git a/README.md b/README.md index 64ac1c4..52ca58a 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ [![Actions Status](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/workflows/build/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions?query=workflow%3Abuild+branch%3Amaster) +[![Code Analysis (CodeQL)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/codeql-analysis.yml/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/codeql-analysis.yml) +[![Code Analysis (PMD)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/pmd-analysis.yml/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/pmd-analysis.yml) +[![Code Analysis (SpotBugs)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/spotbugs-analysis.yml/badge.svg?branch=master)](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source/actions/workflows/spotbugs-analysis.yml) # Solace PubSub+ Connector for Kafka: Source @@ -153,7 +156,7 @@ In this case the IP address is one of the nodes running the distributed mode wor { "class": "com.solace.connector.kafka.connect.source.SolaceSourceConnector", "type": "source", - "version": "2.0.0" + "version": "2.1.0" }, ``` @@ -312,27 +315,38 @@ Kerberos has some very specific requirements to operate correctly. Some addition ## Developers Guide -### Build and Test the Project +### Build the Project JDK 8 or higher is required for this project. First, clone this GitHub repo: -``` +```shell git clone https://github.com/SolaceProducts/pubsubplus-connector-kafka-source.git cd pubsubplus-connector-kafka-source ``` Then run the build script: -``` +```shell gradlew clean build ``` This script creates artifacts in the `build` directory, including the deployable packaged PubSub+ Source Connector archives under `build\distributions`. +### Test the Project + An integration test suite is also included, which spins up a Docker-based deployment environment that includes a PubSub+ event broker, Zookeeper, Kafka broker, Kafka Connect. It deploys the connector to Kafka Connect and runs end-to-end tests. -``` -gradlew clean integrationTest --tests com.solace.connector.kafka.connect.source.it.SourceConnectorIT -``` + +1. Install the test support module: + ```shell + git submodule update --init --recursive + cd solace-integration-test-support + ./mvnw clean install -DskipTests + cd .. + ``` +2. Run the tests: + ```shell + ./gradlew clean test integrationTest + ``` ### Build a New Message Processor diff --git a/build.gradle b/build.gradle index 3623633..e827971 100644 --- a/build.gradle +++ b/build.gradle @@ -1,10 +1,17 @@ -apply plugin: 'java' -apply plugin: 'distribution' -apply plugin: 'org.unbroken-dome.test-sets' +import com.github.spotbugs.snom.SpotBugsTask + +plugins { + id 'java' + id 'distribution' + id 'jacoco' + id 'pmd' + id 'com.github.spotbugs' version '4.7.6' + id 'org.unbroken-dome.test-sets' version '2.2.1' +} ext { - kafkaVersion = '2.4.1' - solaceJavaAPIVersion = '10.6.0' + kafkaVersion = '2.8.1' + solaceJavaAPIVersion = '10.12.0' } repositories { @@ -12,16 +19,12 @@ repositories { mavenCentral() } -buildscript { - repositories { - maven { - url "https://plugins.gradle.org/m2/" +sourceSets { + main { + java { + srcDir "${buildDir}/generated/java" // add generated sources as additional source directory } } - dependencies { - classpath "com.github.spotbugs:spotbugs-gradle-plugin:3.0.0" - classpath "org.unbroken-dome.test-sets:org.unbroken-dome.test-sets.gradle.plugin:2.2.1" - } } testSets { @@ -29,37 +32,62 @@ testSets { } dependencies { - integrationTestImplementation 'junit:junit:4.12' - integrationTestImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.1' - integrationTestImplementation 'org.junit.jupiter:junit-jupiter-engine:5.7.1' - integrationTestImplementation 'org.junit.jupiter:junit-jupiter-params:5.7.1' - integrationTestImplementation 'org.junit.platform:junit-platform-engine:1.7.1' - integrationTestImplementation 'org.mockito:mockito-core:3.7.7' - integrationTestImplementation 'org.mockito:mockito-junit-jupiter:3.7.7' - integrationTestImplementation 'org.testcontainers:testcontainers:1.15.1' - integrationTestImplementation 'org.testcontainers:junit-jupiter:1.15.1' - integrationTestImplementation 'org.slf4j:slf4j-api:1.7.28' - integrationTestImplementation 'org.slf4j:slf4j-simple:1.7.28' + integrationTestImplementation 'org.junit.jupiter:junit-jupiter:5.8.1' + integrationTestImplementation 'org.junit-pioneer:junit-pioneer:1.4.2' + integrationTestImplementation 'org.mockito:mockito-junit-jupiter:3.12.4' + integrationTestImplementation 'org.testcontainers:testcontainers:1.16.0' + integrationTestImplementation 'org.testcontainers:junit-jupiter:1.16.0' + integrationTestImplementation 'org.testcontainers:kafka:1.16.0' + integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.5.0' + integrationTestImplementation 'org.slf4j:slf4j-api:1.7.32' + integrationTestImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' integrationTestImplementation 'org.apache.commons:commons-configuration2:2.6' integrationTestImplementation 'commons-beanutils:commons-beanutils:1.9.4' integrationTestImplementation 'com.google.code.gson:gson:2.3.1' integrationTestImplementation 'commons-io:commons-io:2.4' integrationTestImplementation 'com.squareup.okhttp3:okhttp:4.9.1' - integrationTestImplementation 'org.apache.kafka:kafka-clients:$kafkaVersion' + integrationTestImplementation "org.apache.kafka:kafka-clients:$kafkaVersion" + testImplementation 'org.junit.jupiter:junit-jupiter:5.8.1' + testImplementation 'org.hamcrest:hamcrest-all:1.3' + testImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' compile "org.apache.kafka:connect-api:$kafkaVersion" compile "com.solacesystems:sol-jcsmp:$solaceJavaAPIVersion" } +pmd { + consoleOutput = true + rulesMinimumPriority = 2 + toolVersion = '6.38.0' +} + +spotbugs { + effort 'max' + reportLevel 'high' // Decrease to medium once medium errors are fixed +} + +task('jacocoFullReport', type: JacocoReport) { + description 'Generates code coverage report for all tests.' + executionData tasks.withType(Test) + sourceSets sourceSets.main + reports { + xml.required = true + } +} + task('prepDistForIntegrationTesting') { dependsOn assembleDist doLast { - copy { - from zipTree(file('build/distributions').listFiles().findAll {it.name.endsWith('.zip')}[0]) - into (file('src/integrationTest/resources')) + copy { + from zipTree(file(distsDirectory).listFiles().findAll { + it.name.endsWith("-${project.version}.zip") + }[0]) + into sourceSets.integrationTest.resources.srcDirs[0] } copy { - from zipTree(file('build/distributions').listFiles().findAll {it.name.endsWith('.zip')}[0]) - into (file('build/resources/integrationTest')) + from zipTree(file(distsDirectory).listFiles().findAll { + it.name.endsWith("-${project.version}.zip") + }[0]) + into sourceSets.integrationTest.output.resourcesDir } } } @@ -78,6 +106,81 @@ project.integrationTest { } } +project.test { + useJUnitPlatform() +} + +tasks.withType(SpotBugsTask) { + reports { + sarif { + enabled = true + } + } +} + +// Workaround to generate Sarif report +// Based off https://github.com/gradle/gradle/blob/v6.9.1/subprojects/code-quality/src/main/groovy/org/gradle/api/plugins/quality/internal/PmdInvoker.groovy +task('pmdMainSarif') { + PmdExtension extension = project.extensions.getByType(PmdExtension) + dependsOn classes + outputs.dir extension.getReportsDir() + doLast { + ant.taskdef(name: 'pmd', + classname: 'net.sourceforge.pmd.ant.PMDTask', + classpath: project.configurations.pmd.asPath) + ant.pmd(failOnRuleViolation: false, + failuresPropertyName: "pmdFailureCount", + minimumPriority: extension.rulesMinimumPriority.get()) { + sourceSets.main.allJava.srcDirs.each { + fileset(dir: it) + } + + extension.ruleSets.each { + ruleset(it) + } + + extension.ruleSetFiles.each { + ruleset(it) + } + + if (extension.ruleSetConfig != null) { + ruleset(extension.ruleSetConfig.asFile()) + } + + Provider reportsDir = project.getLayout() + .file(project.getProviders().provider({a -> extension.getReportsDir()}) as Provider) + formatter(type: 'sarif', toFile: new File(reportsDir.get().getAsFile(), 'main.sarif')) + formatter(type: 'html', toFile: new File(reportsDir.get().getAsFile(), 'main.html')) + + if (extension.consoleOutput) { + formatter(type: 'textcolor', toConsole: true) + } + } + + def failureCount = ant.project.properties["pmdFailureCount"] + if (failureCount) { + def message = "$failureCount PMD rule violations were found." + if (extension.ignoreFailures || ((failureCount as Integer) <= extension.maxFailures.get())) { + logger.warn(message) + } else { + throw new GradleException(message) + } + } + } +} + +task('generateJava', type: Copy) { + def templateContext = [version: project.version] + inputs.properties templateContext // Register context as input so that task doesn't skip when props are updated + from 'src/template/java' + into "${buildDir}/generated/java" + expand templateContext +} + +project.compileJava { + dependsOn generateJava +} + distributions { main { contents { diff --git a/etc/solace_source.properties b/etc/solace_source.properties index 73b7d48..0290118 100644 --- a/etc/solace_source.properties +++ b/etc/solace_source.properties @@ -32,6 +32,9 @@ sol.topics=sourcetest # Refer to https://github.com/SolaceProducts/pubsubplus-connector-kafka-source sol.message_processor_class=com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor +# If enabled, messages that throw message processor errors will be discarded. +#sol.message_processor.error.ignore=false + # When using SolaceSampleKeyedMessageProcessor, defines which part of a # PubSub+ message shall be converted to a Kafka record key # Allowable values include: NONE, DESTINATION, CORRELATION_ID, CORRELATION_ID_AS_BYTES diff --git a/gradle.properties b/gradle.properties index fb7cb53..16cc23c 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1 @@ -version=2.0.2 \ No newline at end of file +version=2.1.0 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index cc4fdc293d0e50b0ad9b65c16e7ddd1db2f6025b..e708b1c023ec8b20f512888fe07c5bd3ff77bb8f 100644 GIT binary patch delta 18978 zcmY&fV{@PlkPJ5)+qP}nwr$&ZV%xTD+uGRNVB>7e4eq`Bc2)Barh0m&r+aQ1d}I#1 zF#sCA44g!o7Xk!i0}2F0IK{~rBc=Z*bV@a}0ia;$cOLt5HS1a4@*VT_shE|gj|F)G_Q`4pn)*lCWG0!y+Cgg6 zvR4x*bZ%013&osqCFlqib0Yg~auk(8d~R0Id*-CETJ*z@aX39Dra2_%UKj0GIDGGU zMxbIwfRUS|@Ax4M@}S~jI@7q47FsgZn+Pl3!P99&*9AWzgW=s^6EjU_uO4Dpz1|no zugS`Hb$T5805c4isS)p>pjDrj>z9*27i`c(@Ku({PegC>!>!Z~edi(`LD~;#fy=~l z?i=(j)-iuwf%Vh_qq%w}H-s44-U|p`aloR{g`rLqp+OCTJ9jH?z34kqqD(r~rd?z( z#KznzzQfsQg%&2PcHVV4iz+lYI0gP0MffDN0+}^u;HxPn3n>0KL|Q=st|C z1qWk79y&a6!KMq&+vqT8G&;O-MxzvaVczkxFz^1die0bg~rDxSUs<~=wa6^}~)iHbAcu zcs20JA|-eCu!q;xPBA3QEn`j^dFdfZ-jN2xxAq-!Asv%>)mQ@}d5S0PJoWfQ6|Wvc zk)|$IlDX}5D-@@Gj?&m>HgP%O%nT7CIj8G?;QqL{gtvO$j0 zj4nK_(?x`@CFbJljF{~wfRiCEEz_N|>l{^~H>%yRh*b5PTT$C%AMj4sq?pmVB^RXJ zU{+3KSdD6iKC&MK1xyp}a+TI^cMhA|P$r4CR&3TD^dGg(HRte80B^=63KLHexQ)C&jZ;BbfresuW5r&E0;Ps#PQPwJxdOiXPlq9QyjQRqTs^KRCO0wV(Fd!g5e}eosBS03r6|U&OK|lt7q!1$T z0_wY}D9UKx`eK;t;!#Sf52WniYD)i>*i@OPMaY$wugt^ncZT(K(pP{=%S+I_-1DMx zM&C0;oAUD=bZjxnxCq}l{!`Dn&%BmX1B9=D4+x{5jN)Dkl*Rds1dp1efkf*+#NbS% z2IY~#UZ6Yw4B9i?M0Um)LrLIuPOK_}0;VK?P4z-y5xuIRK~=#-;M7g4%d?N*&<*xc zLikH5(ZjnVtEpYP;@HQu64#f^oFyMaa}AQ=({5lTdb;W@rZCwb>#nDAyXvjVvlHca zjS?-`&tNP0ur>w?2wpMqeIcILC9$Y}lXcO&Qm%Pk1~!6xGs zDG5d(n>mCEt%!`wL~D%j02lX19gzF8;-L;bre)V$WohLNnbM0Aiw^!Il^wW#P_E{o)4)~AWnBHd^zCpWq5ZP+!Oqt51@ zE)UAYk|&|4%0mxhuxVxN_(_H)mJdUAM<}^}+L%_TTv_d;2vwEaywb=-FTfR?Px^63 zTz5#ncoy^ItJtk5f%YR!2Ea+ZnD9?P7|eU&=iEz9^p^$4al}yjvalDZB%DAvYc;B1}qVALh`w z+_UKPlj`^&J44~OaRBUoj7IXW(LqtTdXysq1Xu2}{VIZksB+-{qlpY;D&Pna1VoDn z1cdni_&0tP0jSe~_QIWT4@iJ$l6CSHMj<~7HOEIoTu63LPPa=cCX?zFOH8HKPSzYu z4uv#17!yIuh?)g5%0~VS-N0OR`q{Z~WM{R`0@{RWymVXUiUx(Pkzv4IW zu&)JChj&uu*Y@A+$HfmtZnbZrGD4p-NwyNEs#$`qQF3##uN5S9C+Z%)M{x$#&1>T@b*bMj!ql1@ zB~D=x?Ta01Yq!TTi)vMR^!e-}Ds{sKnZIJhTC~_?#Z)P?%Zjxra0-TJ&<%?2We~4V zb?iblt9rLYDZS&S1JWE-yz2`0;41EU5aEG)pEORdv&&xPKR#ic&?+I*aWtINkJw>3 z{CK}H(j1XlIB0vt4|LV{s_!|{I%;}F|Ktsi>x=l-Oxx4!m)|?2y;S=3OnX-LYSP(h zc8VVO%jqx?aMs3W=F8zom$UziMdn9udC5Y(%dC=or*mClNuH{=TH+X-Jcc~qIo<o<`%}#F9`{9T3`T=#n0~?V}6cOU1esENCscJXrcz4Ni!hRiot3# zp=I}_J1)(bRR9HFUVD7!D%pXt4pT0TSQoM2d<}KZVI{HzBTBg-377LKedw#Ogxse6 zDq$S1N_deJ*%(KqCJ~()i#geY?eAGNjb04I86PGGiU~@LZ8J}Z^LFE*4;-wD^vD*k z4VQM@&jN+G*vJ(s?mdLDyA{A|fH=0cQC>H}9s5q%mrgMmc`Er8i6rY+BLXYA8j;m4 z{FP3+qX#AowL@r_a${oAWBKNJBDR3(2Kb`$h}8a%bv#Ni0nS>^IoS1_Q(y8b~E9Z4j` zSyxcg$WM=jw@zVVuSn%OKehRE8D5rAVXF%*L2M(xH^&AD}H}OjG!Kwh4v?% zyl(Uun0j(=y*Gm7SYF^BYnP>FK|8xcgv2i+S7@?3(*_jrKuxYAZYY;>4I@G0puOz$ z5CwM?B2*z0!{dk^iE<{$+Sy+jnm`zreME7`Jjl;X+f=sdcm+H+uzf~ad5uMJ0!x@ zGi@K464}S(anF0ETUOoHYZRYYsQ_zm-g{ztKxR^qBKrtue~gl?Th2OeuaNUV)7Ys6 zu5_I84NYMd(hgXEby#B;pwDJ9wYrTQFX1b9Qakc}jU8t;emTWHQ4@n~)~*V*7ekA; zwQ~u!Rz~phaXpV~;j0-EETa7Mh6NwK3LSp2v2#{2%|X^sd958s&0ToSjhxN)1QlPI z-1)Fm6gO@5v1!Q`eoW9h=`=i&?pI{H5xx5->BhynbOCg*vakmT_EFn)6Q<=GisJwv zs2_n=0#v>Cg=hR^*%7R?0ucKQfx&@k+NA%SX@rdTj*CB+kOu7EQM=RFK13+>E7r57 z&AzPt@(GQ305?ZKvGFtzV50VLb22KNXJlSA{8=6FP;315#Fd-JHq4d$+0sd!YBo z5-z?f2fz_vKskHl}OEMZ-|He66T6yyq(Amoj2@Z=qRQE?*X@<*on{g zyH$UMjw?7$7y&}S|K)hve4q*H4`%3hSNPW^^nUMn(K+$pBtSf$w>v0Oz~M>W z*77aeq9Pet%Gpk=j%1thBo#P{e`G%@id{EB9%9J9AC+G#mZLzU+K>b+P=LmNiHh#t z_He3pgI2h)dZ}qjUAf-sk-f3v03skk5NCthQ1$GV$AeDp7F+_d%Clxft~_1~v_zLE zK%-`@VtcLP`{g?2_VK>Fw|<=9Q~_8nx^VXI{vCVARS}|Y?3W_lX7V#30)=7|PXfDv zS9fB<*hBBDH}_q2q^}_6ncPV|*$gy(w=9J3f!*pca|%IRN@2{#=wT4X^sSV8I>QBd zmFcUm)7}N|(2|CT3PSSN!`8L%A9POd~{nrw|rFGn|`9WWXF@x_t zCmg)(?XR>&On#Y!*9T)jJ(*YL?LghD6XQ!ZO49D}EDsU@BhpU3k2`&l&=F${$P0H* zRmuErx%KnV^E#MT8uRsJ!_MXBeqem_2a9k?r=t-G4}jLtQok~Z?c@By>Vl=-d;oWJ z^`O~g{$V{_F7Em_KLP=I(;QV}MM8!8R#?er%X5Szc>C5Rf&Cye3+=|(gau*o*Uh=a z4c>>n>uVnDo5)9X9bIBeNBv;1lfG+(Fc&#KpKXZ*4CssJI{O{4 zP4`_uN3~L{ zZ^A;vVgN*mFbyeY{vY@_U}uF-+)p~%Ta6{X(4IKWp=e^kc9Lo7=u!DCuS1WU6Q>F(4XdjQ zXRh*#F0TRNHHWU=n(wJ>b@yF1oWl$3bIUibW#{E<&09V`;`ZueR@WYiZ}HLh_-Xwt z1=-~}5gi#Be2U}fIvM3p1?A+7i$8A0*;W;o%Z(XJISp!n8ZDIogMDRXP3nR)_q3QQ z>)wy7`Q&iw`8Uh_l0V57SCw|JG*xkhtLnXSpq%XgR+h;BSlf6TpIFPK`8Y~1TNrM- ztN5E+Z!P)d`Q$9SUBbFGsw2ZxSa08=*}wdiT2^S9*Wc54Tv|C%EkA3wty>)nqY{a1 zc;6!T>W|(7iaX4=`o;ua-|P*D3$nysrMDuViTDD3QhQ zV~b$&MO)#u#$pFJ@hg~i#Yx@3+yRs}KNxEt&L!qpD~U7jS-&s|>+JC}cph+Elt=vT zjDA@Re?;2;1c-o~@pJs>k!*;D5?^mrZ9Jl!4y9=oKbg8MPQY`*w3=$Y*Kkx$)-L0B-%fc*I${@wZ=+iZi^jny9^87)bD z(@$)EO7fBAO>3C<{Nd464F-2ZTX%f>4D#LEt13F@4D$>#DX&Bh!tK>Gv!`pYN*GUZsVNko?vMn4y{ zeFe?b=_?TO7aWH-DZck=W%Rrx!r%^~AS{+me);YQy!FA7#kD-m>$%SZPVpEu{i|5>W>aOWt5nsrPCxf>YJn*x z96K}st(zdkmoS1H(~7!r6%p10{L?})j-MN8I|aOnpDhf6dgvn0O`PO0PqyaWm%AyTC^X z+PJW>m?e~5zFT7{n9&TMG94+4+v{0!1B06wkf3BV`Lkii1+ZKrCho9rzmz zl^$f8Z4eX+0&}DOs8>i=vVMj`!u5mD(hBkL>J8@;#pe?w$52A?crq-vfl+GU5CJ0c z4gFnxgNkoQikwx+kY*VR^3h!xyuu=pza9@7Li)ePK6a-}%^Q;icWvU1BX0uNafR2` z-3qpWxwN@{LvQUZKeTMwEbsr2G}|7VFqmE(d;24U_CjnMiv042bn@nAROL9W-KT58 z#$?d?u)Pl`0!HaPY{?72<>%xAByGO*@nAw{45@$Xe3bM=cdc@fjebLF!83usd@!~+5poeg zY``UxrPT8C0{V)m!f4_9)*brYyCU=fM8$rxx-_~l*+^qp(yqWtvQPiw$o6Kve-H@fdSW39yq;A!%e!$R#iZ|84d^v~C{lAzW) zdH2~|xAIjOl&Sl4Mi2R^zv%9`cOZoM*DiJ6w(I@}%d>CN6-{6POHP2M2m4h+|pE4!wnkX{eprZ6FM3vd(Vao;`R~+@!{=$8nmtj|6X1HmD)~o2kt{sLSV=_w# z`RUCO$mAZ6)EPDTLv)zITp4Gg@#M!a?8-ZWuL1F>HMz$G2CrV|XY zsl$6|OvWT#Z%7-?AMxj6S&&DJx@Ha#QLug@+BJHC#(@3QDFEg}$YaM!9U7e22qDGzmS#Vt? znH9~}mX2U#&X)aPn=LClZd|0JK^NCGaOS3Tdb^@x| zPKl?tJ4E=uTS~JkoShTPZ~cl43!S9{WkcLBGpzLpO3(F>OMALhd@!bU(|YNbyqpt< z@nG&z3~)+$3@8d_%Cw1pP2ai2&LiB?#MPL4xhL+;*p@!zf;6O^b_oYStwVGI8WI;` z@9rX6gry_~gufq-yM&LziRzBya0&=Z)wK}v;+E`^%cWoK(yMgAJ`~P3tDRfMx50Y( z2YE|PEfa0t^p{#8&b{}_?i}Lw=^HR1&iw)u6FKg~a17B(e+~PS;(|J)C2&}4*z6FL zUfbF1ARjCIO{PF8&0BJHPV{gEs0MwHvC}Nf6HW0Dok%8k{ie9uxu|J2K^3MRRFEqr zkM@Y%qttsk9r^m^e9OEW#&2L%VslEp2ap1Uj%L{U`J~bp42B6)p$SpU1@THbN8$CC zdWSTvK9J)Y%$9HurMDv`F1u&j`7c-dK^h+N*ZDNnWQm<hJ`rRQ@x*1lSSrqINhZd?dYc@LDaB$I3DE1O=5ys-jyu0UZJtVg~a_edMhNACA zIp#R+T@1;sSsGn7jmzG{Mfgc`spcC=Vt(#bM~_m)GIk{ORx|j3B@Ms1#zH0jjBe6e zs-I&=u^cM?v-5KOR(~5PSy&g@Fa7c4KeXAZ%;g`ImHiBsN=G#dyIkEl)|=9Ge2%kp zwY5{zmn0e*mBxAQZLjrG9eyL}gHfCG%gH!~@Et~t^yp#Ut`PaUEXlG}x{pkwe|{L% zu&*u>H_mSB&m5lvEWV3`L5{pD!QRXeQ*;r+OLnr_$UO}tPT92|mwigy_ST9y>swNf2NrjEX&!R;yd zUswqm>=LQi!3dY|QCM)C?q-9J?v}w6d^U&KNhS^Yb$1gHt8x(yD74gb`UWf-O(`_- zlWQjN%fBH?ekoupL#8=TiA~$Estd((`u6(n#^%ei41ucXlVGp{hmnH#V$ihs?l^|9DH6%Nq|SDx z3#+Os-LN*~$@1YKj(R=V?Bu&T49}oN>2g)qkSd3?3u}<>bf|*@rX9NZw;+FUK5-Zn z#$F@}@3QKh(=R~_usiDdYf)2!nA>2qYYimG>W08K@rZ|#jhneE3|UGVqklF>yJW6T zs4}wxIa~ZPbdEjL|6;DQR3l`l`g08lUc^WE|o9JjAW)JO{yB7Xv@kYz! zQ)V_^P%6*suv#Q3S6HuOXkq*UC~A85#FyyOEv6dLX2f7#;5kXdE@?J-s7I{2m~>p< zV_hyQ;Tq5>Pu-X7QV!Zwywvj%8uUj}=F_Ku8@{lqiX7_)pA3YCW*H4_|1GIW_=-Dg zjTp?l_U=r4I}Pl;s3wYJ4cT`3>%#SJA4hziXBgR z#YCgA9XUE8(Tiw?(Y_v9+7fo~{%8Q;wxb>(nB6yS#@KUjVH)I?UFt^Cm}+(Ys>3YN zJ?(iT$V+eYxmolYBFXyIJ;592jf8JI0B*gpg@j1IeI7?Cgy$Lw7Df*sz~07Jd*XL>wKX~T=u zC@1b_;ztMxF9U;-Ojg=Pxg7d)%|dRcg+!kR6~xe{=i}2kA~R}4xGh6IhN{5au7n1A ziqUDjV!3*7Xy(k9qBNqSKb;6H=RKM0b}Q6s7=Y$D#nC*E!<~KL_ai_3V(M=%KlC88 zu((8m?v7AGhqK(rnPGV+gS`CE_fbB*xf|gD4EG3vz|SG0qKs_G^5ad7Z^6|-DXR-5 z3VB^=Zrj|)P^i55Xfqhs-y$N}oxIgfC$J9wg@OcB`?fsOAE*NeK-no^Q2Divf4!io1EYfN-Gxp(U9&X8a_x6?*ytrOK)& z8ZWgga!4)@_b&m1+Xy~%4G-PhDJG6!#lyoP7JZT)mgHE)9W2klB%}35^IXGi4bOKn z5OzRNA+Ykc{(+kZ%m4$T(ib|&=Z}_OeRBYkgB{|gZ^Kc1VD+5vQ8Dj(+z<>#G4~@2Bi95!w3(ZgZYVa{dFq^SoK--xe%dF+ zdR!MT(A7CvSK-8IYt5Y8C--Q7G-(yi=8^f1d03gdjbRzB15i=R11Y^?u(nF9$Qjyk z5LW{8%zl=-rp!o<@5jjEh^9I%HkOIHTorjW^RVo`b*m5BTg#IfqH0?8rmESyg_K2d z+q_Jp-Sa5oShA_8&Gm&m=Uk#}#aVAjABObM3`BV)R&wJq`8L-Z4P*|&f+t$;r<`Ap0dmT>ShRK6B`LQVZiVanmm05M&v(R-km#5@2JZ;(MA|N}SJIhhFJJ2#4kddvQb8_*p+SEE` zaPoD1maNPTTBXThr|?N!gP#Jj*j=pk))$b1EN)KXQH{EVcSR`h;kpcF!em_f*zZN` zR5FrwQX6OAE3rd#kzkLGEE@8@@U9E&z!r@dPGJ@aw~zE(BEpl{Pws1IG}n>WeJO(* zfd;7U7-?54#x#_q1d&J3?$}I*2~7h3M400_&>Kh>(4u_JC4r~j==$*oxCH}ILh*mh ze)IXeytlj$&;D-zh^sLfFd6=#CNJXr^hQDjN&R1*F71xw@gt(~BkSh|s)pNt%^{lgu?0Snf` zQ_FML!faXAbBx}%bB)<&lN0;QkfY8L#QnH_5Q%3@IbN9}`4G0gGQU6zHg65D2g>CR zu+@GM$GtkX7MA17iY(gRusz-IFCFB2ZZ^)PDCvgh8QIBiA#uSx?Q8r} zrx}o6RnOiUudtuQH%;5t$^!6VKyTPr86lU9YfW3?nxdt*W62%y{;~Kco1^6QhrEa-e0&eJ1R%XOhhE51?IP-zQ2cO)`}=V0!uCAJ zjq(**DmqCFt)*+#8&Q6Fdb7tXHeBgHR+hQ*{ofQoP*C+v;u*Lvk&`D~`)v`@Cg z@Dn7jcd;)PuZ49rZ$^3}fJaCbMyB^g)jFo4_>C$JzwDluStV%Jl{TD464Co_S(rCO zBusXK%Ib@7s}))j99_Ta6j6Vs;?Y6?gpVO~+zavV`X5LQMXOl+h#0$YzIvfZ-5TdL zWV2hfqC=^x+sYdphcx&mFE;VuYBZUrJE5o*Sqn;{EI%kL1VpDO~nC9jV z*h=*9YQM0~Cdxhh!X_f#+~NG9w_tit(m3f^`QQR^w{(#Ob-pDyYun&t8;#lLE5cGo zQ32u#?K!Vk%A;xq&?bt{U#e2tE8Tfg)p1kZC{UYB^$iQ>%KkyXpX=_IAAJxP(h8%j zNpEfa1gaA@IXxP;Tj?x`aMpNQ0!BNazFGRI8+&nFXzZq0LWBxw1XTkUtreZNpG!d zUjDyDQkaYs%b(J6GZ5pYtpm|X~o6c*LM;Y?2|N#loZt8P{hO!=Lq}RSYy@WvlmEq zIOv2aFYZfN%m4G=ZbIGz78L;m#DM&2pfbgc9(ddL@b?V5OtkURF8o{a9fwUTes z-rJ+Mvj2cjd> zk5y~68es7O$9^SI>5S`_L;=W9j;z>!uqNZB3>|ug^kLQ)b32eVvGECVPSv?7ZV#|= zq+?|IS4DagbNoPXk2gK;5rp!B9r@*&`Jz4e$!#86g%Ht-!y{jsp*9;m2`b`~Pl0kg zbwhiuo-mA*e4~+G!=xK2c}g+ArH#IUaGFXj1Zd3afQca_YPPTCOrCPPg+UR&qDW=H z$FOORy~s9x%xJx$kFM3+jxi=#0AY21(RDXn z!P?S&D#1uF)X4|O?8-$)@3kDGWXb`|GTG3I#!j)Nr;ru$G2v0+`?2Pct0a7i;0!8o zz=N66;t;A*Dzzpt0b=E{!Xb2cIm8Y6aMA{wnL}s_v{{)xtam95<_+phC8{bvgl#CQ zK0on!aHvRes<>|nTgkBqk8X`SX9W_buxqVnaNxAIucNk`R3(RP|1y z|FHfppTg;j=i*gR#~BMx)M&%Yo%z|#?h~R!hv%I#{s(^OvY##rMj0C?_|&fiurP_5 z6==pc`2eSIdPd%~fvg{U+i(Gw|5JHp(O!GICx=Q)qu#gf>2$^p;>yviI)7_WDWipu zSTLa~aLJZJS)`LC_rN7+InOw!Q;hZ8FRERPr9NkCGw*3bhe)I=_FZvNKSDEpA1`a~ zHt$SxY+OVM^}FuKy6BQwmaZ-hNHcXrgwtFLXd8ti7ug^V3c*Kadsp1T#=)aEm-MjA z!!rtA&I73@(NGhZn*Uqt9x-SfH|2fhoOLDGHf10*lckrZpcSA#+cocjk}O!0w!aAD z_i@_?OL$YXe2n_r*_zcc$eoP-hml3DDPM#>H+My~P`WBeZf-LL2rsX>xO`v;&GM;^ zAD_|D$sFH{`QBDu!HhFy<1ngMaSroF-V1cBu(uHVSKT={{(F z$h6}Q0Q$e~ycB~CoD^4M)Re3ZxRyY5e=r0YnFh)@P!NzCFc1(fkd(y@9w1G5TV_NE zi9e?_uI>)w#uJjz1|58iQ#=tvn2LrfxZi%e%pp#CvpgCoGeY*e1^=LO_Rt-zChDJ^ z&B|hRwRpMy{DUq8I-_06lou3&ZEvAJ+oNUVWZRskH%&3()HUSjO?32IDBPq%lSXN9tU8W?9EVp2&@;Rq+lFuWBn8z5pMf zPiClklDoL`{-WEmBH2)g9JJh7S6vMI(NC_MeM%f6jtK6=UwY831(;BM7!p7;(})nP zh8)R=s-gbituS={(_pPtrYN^KuXEe>BifHXvKLe0)Bq;H{kW5UZ(UdUiw<&9Dcv0!AGqH0#C zVS530lIf0i^Gl)sn=Z`Gcxi(=HhH~*CzDh^3-SNkDqBz8yg2`hG$bK_fKdEz!$<%I zQ#)f<*EEfD0CyFg-&7XIo6n$l1DeA_DObo$x;-02J|{DEgC%OPS>JKxzZd6{SiV>` za|@Y=wXF5B$V+;?^|^>+tZ~zloK-eau;gyLsuS^&Ssa2` zi&BC`85-eG3O4uj?#A-e|a_BB%Kd!n6_b`ot&v{TAf8^ z^fi9o_j-+l0n*=iEgueOp!CK%IA}<7)ag7mPDCS+!B3+K%S@B-%ep|+2UMNkd2&0k z5ySJ`RUe|Du9<<`?2`DH4rP?dnem@y!WbC#CA=`7im& z$uCeFy-1MLOaum;_$*EDlxRRvJMIA*8`Vo{96J~NjuhMaLu)+6OJJ_qq;#L-Wzyvr~?YPc$mnEOb4z3PwzyZ=a2iNRDX>$AN}PUygIdF<`i zn%IG+?QJ`TcL%@$_RRio9?RY^el_X<^-`XDC1N}#o9gJk`9}yUdk~Yl6zgXVQjq$##znQ*36&Zu|QvaqS8)d7dI)36))vXm1viLo-rRTHZj((`^`#KnbI z{zZvyc?)0wNIp!I3!GDvS3^(LUp5zIePR7(npc}KN4=K|V>fmGA!!$J&QbC21$w~! z4&j0aB`j9QJ#78Dt#?ljSYee$SSDyw!kJ&Z2*`%a4!Oe#UDsPpSfE;jgCh+j^~xmM zR2}w~Tbcz?6&NHfo$5vbA4PR6tP5I_Zw;g>!UM#iB=kx}^V<_fQI<_mtmXb?I;M~( zo#fn!pd2;_z6LYc8Unmm3oaiK6qfKjuUK zJtNpC!QCvml$jbYTHrvaM}UA5VSiM=*n=YxGllz5`db7-C=pzoI7%_DH_zacdzrsm zx&i1BCGgzReR^gq`TQmlRpHH}U+%bI6Ss(dBrN#}0WCQWCr!LmdXq^A8>0c5tvDKG z^w0iL)44Kyqy^rP@7oYVu*NN3 zF<$a-cU%x1HeRlLa$8Rab1M#!$_P8&IX_@9#-Db@5nnGML8UqIw{udG`doR~HuQ!s zF6E4@!7uFlbcKu6{P4aDVKgfb% z%|Wpc8IzBSyR2B}Ifoz<-F7UL@7$T$T*rQHURK|LudjRbK_U$@Hqs?2Q8f`*5)>1x zN0Xl~T;be*VSiOZ#52c3;T@s$W~LGy5KoZCQ~yYKlc9$wh68q`cPGVN!gGJoCdnDZ zkv-PYYtK-q%WJ2{1)Gdc`RJ{xxh5O8uWx=h!pi2-H^scvF?XW+Xc$`|2bbm5wVLpy zEPE%nl0U`f8m7RjU&V-Y6wWv~UzAm0u~sWHX?iY+Oa0jIEvJ?sVZ?-i(T?MCfm0hU z;_AyXoq^F*B>-pUQW^~&DL9YUUC0=1yC$oxD|HwQ z7|vZS48rB{F3d8zB}U|EqJa1vq#jT z8Oy6ovD_%$_4YRZQ6w=TPfe9yXS8kb1_^m{Nk|yYE&eLnCp$%Eo{*YOnXa&{G#qvK z0Z`qfW75p8a6KLF!cuM%J=B5;|EJ#-BQG1G9r;gI?i5v+h!LBNHjkR9@#Cg>w`uj; zPxZ9QMgSFz<}}xYW{jtMxUf4sB|7_ic8tqzfOV_Z;7@^Ec!DA~k>^ADxPg1IUA z%YUtJRneqH^aQc~1lfC1%53ld5*1(07uQ&@LN&5OuCs%_rV9hn zZK{N@r<7EqaDv?x6Rvewo)!m@T zA5DyKw-Jr(e#a7HymdVc_Dj0s5vnCG5)>RhHbx%x<3DVF)e2TK^#O~uYc zV_EnLIl%Wq!+Mr-Fj-ePX39I5@4DJ-`FCBxU-S7;U>E?-6cNFqV0s4uFUu0%IGL0} zY^*JTkulO4T!Tj#{R?KITB(OZ6c~4p$0go5?j%3r5J+YYbdT?irQ+a%!Slp^&vT6R z@e4PmbKFdm8`J2h@sX&($zre3Q2lk?Ykc9U-c!HLy=1^4H_|oiVnUU$HF18}d{pyIS= zq3V!V?O3n2>8nrKDrTJY(iBCr*5XyV7E!RIg9A_3c2nD?=JSA?IerZ_UeNXJKU*Ny z)lGSmyy&ngug*@BTWo@FNxn>#dlS8Bl*bCL)<<4(-zagk?0OeV?zly;(zV*Q@Nw^T z^lA*N7FnK9-aJ14IsCJ&9dMQOzY5TcCmET;oud6lve?~$<;z1Jsl__MI_%pqO^neP zS);NP6G8AilY{+>o;wy9Y@B=0Igu!UQE_VS0^N{?X`l%@D|=0ASE=iix{d;gMs<2>wE^8c(7 z_sh0I@}!_%k|Mrhv~f>W{NhP@s=ECA$;Zz*oM7D#@y2`lVbv+tCOkTb3eFy^+6H~s zm}?(qc_vlUQ&$}7Cy)*id}n}U=ZsWvz~M1)IX;a`#a{>6qp3jJKw9LYP^GOJm0rfc zMTqxnCE%Rk!bS7--$+60^FScb^*vB2i$6hQvcmgc6m^UQS-IC5T})`jg{fD*N4FJm zsg^8RkT_AQ+ynU|=oz$E^hnxy{A6(g1IYsOmSHOVJob@U253L1uS`#lJpj*j)cJ(P z^JlB^O_Z5J+ZNxKIFy9QFA;oV^#Wl;o|Yk9y2*SRrR z88}p<&SVgfG{X#Gd`6-#lPuNLapZReM^V}*%`lRMV2x2uAHmX@W-v>ujqt^u^jhJhI;?eHfNV;h~f4 z{g^I!1!4ZDY7&xILGO=6NK0p^;)1zDQ|drQt1p%0Z&1jV%Y*?i7{4o(xrHD99k$3R zM3s~&+sSyqFRgP=RCToPK8|0d%b>DJG~&);XUT3s!Mqx_;Ro6D7|N~|5L@4heP$faa~OqVKB%vipVgQA|Y#{ zB))dpMWy&k$d)YANV284lA*DL>`dl|k`fKd&e)e>ELpOq5|ZyVPs`8w=X^fz`JDGT z_dd^k?m6#y&#BZqg_956#TFiOx=%V2u>GD8((?Ac^>Pm{nSZBhN^?k8c@GVv58ZlEXhd+laRG_GN41Eh{;7gn4&z^e2^F zX%<2~rmp3srEX-!!m=!T(e6%XW!d3Jt0+A)Z>pl4gWBY+>@8wv^JRTU|NUfJ*QD@4 zanJOSa0jV{Jlt3;fw0UG3}kRhY4s56B-CeW?pFpEl*^rdm@g|;oV~6vIJ(HR?*_}> z8x~(tRFqtBk$Aw?=P@`^{BYZc#Qy6T>Yd6NxSeg&0>_1JE8Y5V**a(VQ$3(LW$1<83=;wuO? zX0idj*M|xl^vI8w;RO}?H3fY#q|#yYuQek6wuuHp9cbV8<>B|kQjL|Y%Tbzlr?0S; zNcP<^f!R-+lLXw5Rk6o;XoweCxfQQC8ZzuYuiV8MaN5*Vg?jb`9Q2xh-l}5)+gN@U zR)MU#?~kmC9)@#{s>2q&#Re*=lD+Aurc%Ey?%|%CmLI|z#!Di$TihC{6Ta1U$e>!z zEo7SBce|4^=hfhxbkf`Gt9`sMn;h#FFybTf_VJcn8CHeXtY@%)b9(c0Ii~Upsom}^ zW!G@1?{(}pP}X-*%ObkV)0M-lkmdMn^<#k~u}`m_@gq4N1I!m4G9)R@W|agV7By>P z*9xY3FKgjp9<$tI7hU+xJ0e_+vJ2zp64K+ag00x%5$dx?9yxE!G;$PoJ}RxLsu_Y z1Z)3fCyDPbFz_ZI$PsZZ^jSR$VMu&W=k1S6fuTCdG(W#!?GPIhl{a!IQ%mG|fxDFY z|5`^CQjSFimbC6Bi|>k{^sp4suVu@eu3*-P)VIsmrYrTIw>#TH8@C`ta#L zx`@bDjN_Wq1Ks^a>>|4^9%-Z;Fpuh$GpfR)o^uIAY+4FLL zs+VVxGLU!+)1#!bD)d6OeL*I^)=IqtC-*+^`RG3EdO=F$(Yh|H z7~a_Y^~n!6mB)4UCKjQ-6qf~&F{@c?6K&e@z{kD$MRmHwT#Y&!#GblXd0N4nF|Bk49NH7!S8OXiT+V#vQ&w zUK{f^kJdJ>6G(NeyNYCWn5V-Z-TvakmQ~i-dN%Il0H(h$Owcg&M$DmIqBUHwGzXW7 zK}3xq$wSvfNy}m+Zi?SWESeoRX?@1jtum+-_9YedE`a8wJ$oy3wl|5r+Ky=O-a4!8BWU;04KlSVo*rKSKn7tLC>Fj|f4e6BXtb-olm zru;F=1Hn6DW{!KSKBgNJkw()05o5B<{yIM7o!7Ez2oG-5(yMmGG9jom`g3{8m|fKP z#LEMX=B;hKc&RPR zFWvuVMO#;d(}=B4Div_oIIlQ)d59l*Kjuxk%jH7$s5ABR4joH*2mK$$@D#3or$vgl zaX3*%)8iH_+e5q}w(>pO4M$!#zmY$sw;;^%Q;AYT7Fp9IZxE9BIS`rZEc`r7OxGl# zKOZzD7A3zU&2sZ?%6?NnK&&FZh^cNMS3lY0o2%CPK(4KAA5Y?+HLdi2>7l52CkI{DYu<1wrVK4&_t- zDOva~O@?Tx7mMDJas5ZKJ+2h!kTCQB+X>jFeMf;HO9mzMq0u`xE&g`O!b{jd5%F{A zok8;d7qHuHhs;6i{-Z$nCCH&X__`kpB)Wk3D@Q=~egr5nfCA#&Ac_VZ2b6(1A24~q z2r%#kKMhy_k9@&XgQ@_<7sL(P14cK&5^q6pbv_e_{@7OiZf3TcJ7&kGTg4M8U>k9iSk=3uXtD*yj$nbM6b2n0 zISnkPGN4BK9#E6W549pd?@<)+CF7TwA6%y+z&^;dJDXuL%oG4+=~!S_4ujI@d%;Nh zQD8Ig7d->a6*8dSm@06xgaM<94s3fy@AwLK4DePdm_d>N_m5+NE(*9&D*<{yklqZY uj9US)c2K%ijq#ebCQv|sM|`^~gXKc3kP1>J^nssLkb6>Tk9OC#+y4Mph6Viq delta 18570 zcmY(K18*e^)U9iGYTLGL+qP}v)b^=u+qR9Vt!X>O)W&_ko12?_$^HpDPxgAU_Q*W= z{4{uDAS@ycg&8L|1PI6mGzf?Yu*eJ(*bf5>>{+7#F2M2t5}5kkCBy#4<-PNSvhkK2 zhY}r6!~r6YyE1NXQh97qv5e_2bm=i%n^IdNnz%vly*u?Q0^f3j+ zr1lyU2?b#bzf9d%N%byMCh{gW#UP)Zx*PgqMzSgArYWLZJPjz=sbte4zqtdp=Dc`{3; z6Ie6xhAc*@Nzk3*?!tA)VW(Dits$9WLo!ZN3-k3SY#B5I3g{D`<#`)Mwhei?1YKLG zF~PP0ZqX8W8tvL1*0O{yYA#i~h{0ISR-u;mu}2Zp^xhixQ(Gw(?qrGs5UtM084Rta z1L2&ldfY+3;3t-pYTpriO-;Zc;iX(=i(mfefCDk>ji(1&TYF_Kc7r)bK+8NTA1|V- z@irbzSEx~}5PB^SJ@ngdq3UtHfg}$?-HI{*r^EOKLoA7E#eZgTT%qZy1h)H64n-NZ zeLVPtCH#7hr23K+c5Z8E_ zRrPDPaPBD`|A=L$mEBIvuveBpIZRyv7Zk?=za@xuY+_bTu*uiYn}5<9`%A9+sY;0! zBZ*w#glIfzj^wR#uAuJFQ(|_BeJ62Mdkj756Q6loy=%jNKL)N+H>^@ynORo1_(49g ztYqOWqP-K~lUW{yeG~qYnTHL)pj=8I=!=X%E?0xit9}K|hI&#$$+KD4TJ-(_)PoTb zhiEdyQz=W`Thaby`kiUy0ydERrNn#3$NCCR_9|EAnlj0O`o>OC4IC>4D%FgebFIu& zmu7BFm9nR)6SM}yL)Ah}$2vyo8lt79Jtj=Z>b}&?>lxxJaP8@g^=q&LIyc{=VxC_Z z>jZVgVvl*kF-iX{Klt!hefwOH-ize3vj0FT%Yj3{fq=lkfcysukcBolK`by35c&VG z!T$e|LA#C(NK?CZL={07Xi>)OkQ>t&()}>eyHrgp3zP_Dv0dau0yZW*ko?X6ORGk z(Ud)|i+kzoH!%!#qbohg(PVQ(DE2p;=>0E+*f7HtfQ6h+#eiAA{=LY1orfR!UO)(j z`bfP57wNh4@yQ2WzPfzBd?z6;!FQWr6}{-*PG2HrC=e||R1<8^5q>zeI@#VEaq2So zwmYRmSJbzm*b!U2w?ELZaF%%=**5ZgS9)#2H4vS(wk!NE)pwR(k@B9o^5$qjgoc;x zVO{eLV2hiUh299nywS5Hp*DKG|3<(qh1QmclQokp9-g0NmLts;L$7-u$$>649w+UGpf(L~}tx zGE7|{0?k`iC%wRYrbBpxrFf?E9fnui z8inh_#e0_I4%gzy^U{w^1OC{EYfcFXqko z2_BWA;u(zty3Ud;2k7GtzPr+%AtRmxuY%^qQqB`)&JxRDGlxE<-+AiBC$Vy_=)4fv zu-}$@a&MCvZTo_18lA;PnlFU@_X;WH6%Y;m0s%oI1Qw!D19V`$^|d^{ z6D@s|N-D<8^qx1O*%Hj8)^Pa}2xg+uW-d4GrPs7sn~%AbmVxZg){H( zaN(`t!Z66vL5o?ko%j=b~7$7um~w?nPy-5KjYMnDM32FkiNMFS}khT~U<&f5hBP&NF%P>38;RQLD^3k5W3qr5WyfJjTu0Bgb>#q z7HBUKpeka3^|smf+XF=DSP%3Q@mmuSprzmda5^FadzJbG1_L+&ch?}CherO&0Nfzl zPr7f0po-gnl>KOffCHhluz$4EUWlI=&BuPWuTC7FiMag@5n8`*!8vA^F@tGPg$vZP zj#$R*qL_kqI~`*RlvvPXt1MR$fDm9&((yE!AUstZW%EhC61L11Su`Tx|Cr5vgf%B+ z)o{~(Yedv?lABKhaMjjuT|;TOpqPm-<-4X*8&i7n(B6L3xwB<*l%`-S0^F8~a|Am^@uZ#3)FM5F!RnMhipzUrifXeUa4LC8%V#RH|lX z(@|}^Rgc;e+FOJ&+yq$5&nKn8oAb3;b+cOmTP5xg5G!y+UNj{NNJL<4u1yX3*{Xkq z`H#V!`OYqY;oMetIApAzsob6r+54qj(%L-0i~mBr1x-xxy;C67S^ZXZ#M5+8abndk z_}pXScy=*fE=wLwO-}c@r1L{}x6_0>4L?57Vr71KdC}yBou+&%(+;VC&sGVumqiNB zR>w`jr%Pty`utxkdh4hf0rU~9lpXU`GKvN6rKuo*?$T;>s`JiVL~Hl6k3Q%ND>6@} zZE_A#A}_i0vJ~hUGH$UF4XN7GRmYg)vg|jWD_X{SC$_nyqQRn3(QU27%C~ezy=}LX zYDd+bcaq6xSU!kDlXAk0rcdA7S5><_tIulA?)AjqXP;2YR%>4dnXkTv!JhtkG1U#! z$xs1!UBNPHjdcZB*l?ue>amyZCjAm~WHXN?(L?OYgR;6(sSqN-s?|fjnu7~SQbIF4pOoj)?zCNizsAs5RV8YpA(|Ay zGBM{QOXRV0hpyIG9>;VYM_E}%HXs=|vos5kFeLlQ&QDYl5VPu%DKl8QIhXm@u@c>R zq^mQoQC&)UoWLqMFwaX>Gwo@ue!T`iJ53N`arO7me%LebTb10gJ~!#j@p6)aqcjVM z?n{zZ&Nbw7?10=Lmfi?wH z+52v&2#n7&f=TzJn3=#isgLB)n;+;-9_5BVa8qrxLA>tVi_H_rre0$*9u$X9+)`Q^ z?|4vcQ(+uXpCN9VOyzkSF7~IUk6C4)_O2u;%6U#1r0+2FFmy@XwB2&*H{X)*n@tg# z%t+FXsx2lQ zan6O@6T|HFaQ?J5IC7E}^3E5d`Tf@9zRWuXCeaZ{1VVdwYu;%2NZg0NeW(c9f$Xfu z2oI^S3nsD~3gNMtcz?=oMF=`>VP+@^h^A@tpPikCNwz8-vbgisY2%L(25_5@0&h_vT4&`e^~<7xC}JAhB&?Y#8d4ETrF7`+TF zVRjL16{dEhT@EMO}BCb+?}l z%i*;2ya43Pl&AC4CjyW~Y1uu^jV^HAk5vcT!nvZiV*<2-a7+Wrwn29r{E0KfM4IFJ zdb4`YFedNr-2aMx_qVtA9-FDjtoLiD|4qX{5*SMU>6oePmQU4sK)-11i-?ynbo8ir z)af=yZcFVtr+`Z|thTkZd#X6NC~rIWwL0dGFvBMo(-QSe}#zJAgrysLZIukq$; zB&BqwF;mT|HPevy)`O$LX5@JHEVobAdtLryu(|GDE!W36OUK-&E~l@xMBl$KFnUt; z_k$%rdNxn}6bo=*7_LAem##&uHUPzZQ|-Q4TEieDDxfNRw{sX7pEGwwP{D4oau4~^ zH4>R-zaQ?tV^M@1CqiD#_E{Z2)Og32;TM5c71tfYQr~pwB2|<)^j4%kKDXct8ylWg zH|*f*NqN42oIWEbTYJuC z{0UdnHi+0==BdPA3o1G4_c>hk)9Uv>BT=s)`xVP#f6BF;17yU%)$oyP-o;SV245xr~(nr@fw96S%sfJtg+!vEA$5l=>_D za{{okR|aq`n|8j@n-I#DRob`#!=e?h4uOYctexN$Cvi3=4r}3_@BtIXplpPaDV6;P z^anIv3B(t^lOOU|_RBs#_#aLo!3%<)^1r>;+1(+nb682jYR!~lH^Z3F z%(NaU$>li1x%2H6`N3_{r9%}^tFC^Or+Ki(a01X*dP8ee8u2^Y=1Xe0&0u(EZ`TiQ zm)b?B70Hsk*7!}M$G?@%+xMe5Zc3GI zmH}9EIDNrzAJBQ2c0~msWK@z886@s$?`Mkz{6eJ4POF*u<3BsItI+l09N3VKH}6FA zrk+SNp=53F^eWRE&bL3^m63{l=0r#Pz&_Xj2t5Z@y1(nzlV*F znrX0~ESPvybaYXikO3m`MECF=uO1ofhUF|Ps`ebW2rKtg)cQ&| zPvEn7!k{~}v;+{(XxGl1AN(5c**1`SDI(+9HrTOyJ#ZD)aqToUjhelFv#&f6?X_X6A#>> zSS;dszxrcT{1MGwd5>NxKVB@B`vD4DUeX_gHVp^0jb>wVHi@=5e_vl{1K|?cqV?_| zXwJ01QR@5i5Y3N3Z@zJ!eK@Xp61`k83l(sBV|zJTtx<($bQtp0 zQjx9RP^(dq_Tpa#$*Pm?`TWhjQU6(}tFr)LgLUBQIU45&F~R?_-KGY=-~qh12B8ri zapHj9RaTe^a<0*TwidbN#J*zRqU2lLE%)Hi4G{dD)4x_5t;=2`5%4W96eYSW0!*v- zUD31eVL~grZN3oN5jf8pHMn=?3miVXLWQ>9TFh|03>n+1jGVZQ)}Al#OeEbE$D}6T z==13+U(G=^ytq2#&8%9lX8^bT$XE77_4u9wuyX4+2DQ6u2fJ(=0^IR_AuoR#E3#mA z#;kt%ZN|CX0waebttUnhR>=@fE}LSy5YZ-YC{yQ#9&S+WuNa5M9sN_^ERtZ1iB^*5rLXct!N8M)$1n zg$55Dz=mrz^hly9JP)!^wkdt!(v4PTB?sT6d6{y26z6?40u5Z^R(B_KiLBZNf}g&y z>Ga1g1tpC+cm#*+eE^evpan;>a?`f{xEWbPF5}!vesMYg(1OF-#Eo0{T$-rBUi8Ze z4}G5pVr&4WmUT&tGkmpW)n17mFfU1sf*|MAi`^|uTMlzz<-G3=@vQ?Pd?9hZGemO{B#5h&pc}s>WgGPE=h>fx_uN1@3Rg+u6;uz zZq~!3A6`(A&!C_pyx<`I53rXYAjaUy)wn5Xe_;STDoDM} zz9>IreH%B4h*njsJhr1P2NPgyH{66X7jrAc8OkU{`5H8GF2-#eDAr}kXig4tGZ-)(G0(tsJkf#CjS#}#)C z06&3|crh&KwNuX2E`fZDTQ?~|r+>3rD|^mo7_WJ8M+^0kh84Es!G#nSP^zl*n!|_$ zhQe%*&R89*hd5vBA4x}t;+Sb!~Ho%Im$W!Io+ISdxV_^2}G7SVW}?Zv9{ior4aunMUFmsz^ zQhI$ocT?0T7IF7u_M1|JMa)S(!mt;@bs2Qdb@^93&h!+)d6;TDRJ>)-9ohwf<~!6^ zi>g6IX1dn~wWN)sWR0^J3&N&3lW6o4aayS~kiA6j`-B4K%ri$^&4r4jF~IV+cZ{aZ$BVT2$T-I5}5)QiPSMBcKSNZZMU z@WnR@sD-xgbnUJ8Nf13VszZ_QD7 ziVgxI%m4yH{QuSz5x6M|516{$03$)T41y5F98Q2J6@@`X0EI;c*YF`{nwq4)o5M?~ ztZSEWu`y8I4)Lgqf!9!x8KmCQU9VoPZmZM7x~^~2tkGoO|LNso%OXbt1sDZR_5IuR z`%lQc6}T<*`22~5MnYYZ|DIJkb539%DaBz_sAnf@UOJIsQ#Aw+1kfI+z$JR34187= zeDX{F8JA_7UpVHWo`VolIywGqjir(SHr$#EA)e8Q8WrBE6Jfny1Tw02ZYY&>jw&JD zvg6RCS#}SzeX{3Ez4=fs+^g{{q_6WV&!$=Kf$&aFe0cW{Td?*GDxKYn;_wem#FqYK z7bu;K2^x~Joszz)0!V6a)d-m$?qrsk7XHW@9{%UGrxPpE;Iuw$1U<)gXl*p*++rH_ z$nb6bdo?B3XnG9~b%_f89`oXC98^&Bs$@5OTzCj79IjvpiVHn+H z7!H8sdezRzkhJ+jvvX46u~X|jxnOmVX)_#Hot7L`z$% zpQEG_Z>bY>OzT&Gu+@6C7vhLD$a%{JeIzNMo8vX4;4?a{U3O?UsO(ZB+1+&#TEWrR zX}5bDtZuQT12ora4b-~KcekcBR;_9cdp&v76j!shH0#)FC30l#QOaff|vYKq!g&n0crIHJ?~Bed<<@mFv&$9YgR z85QPd7#XrRd$CC+IU~a##48|c*lZfDQ};B96i3DM08bW0js2bdhKUnewkmTeb-$y< z#!Rq+^1Mu_aaplg@(gb&d;_{Dr7->e`ciuN)QVW)>SM(GU5%c!+|{lzQRPRjSs%XS zAS3@bCDO5pA96O%ePjQDyZmv9Xu$SVR@6DgzONFh+Y>2rWF=^Ztu%POvzQ>Pas=4`b8EW>Ok{wWP(?p`YY8>Ayw_=i-jZ=FZ{$a6M+7 z0+^9r955yN^Er!Zwa3ovCr-*=hYhIt_N^OM5&&dWGl%<5!vbbeOTP+ZJinH>U%D8rN`PTSrB9PPpGKoQxu^4vvv89BO9M%~zAi>{?NNsgQ=)|N1WI2?Ssw)k%a9;E)-$Hx5?DCX zKNU8do^)-OyQ|!g?-&9Vm#{X}rT`cu>nBT5cs=f^gb7s1K1YcWkiI&J2CgS4yK$^` zP*{!7dE;sIU9tnvi-Y(+84O>zoZn&D#enQA=Ek>2vfIB_h=t_!IT#K>#WYa*b$H3j z{EW9!AkU{z5-kwfWGly<#g7pj-D}2H%blCZa+x--x_V5pI4E=M3}9Z^SxEl6oMf?9 z*lukoz}#MP6u9kN=iKGSZwS_smzV^NizI`j=Yh6j))4p85PQZDu#9?by+3HV9LX2-0PW9U}Bcka9FtZ40F8(VW&k zf(HV@AYuCwiwwvOTpu_VRk2lM;hpyc_snCERARQI-xh3oa8oNfO|7UUdBqU;MJmZq zifJ>gnf`KdBOGk=J!__hI2wvT9$?)dUHb;g;1|~Em3sc{)M3w z7Mm;9N-Nkz?jQR6cuwtki^C}lOuBcDnQmaYQo;Q16Yt+;^J6~?{%Nb62H7~vvscNc z?b3(-2j!*CI}`>~W!Q5llJJRnDwYIkM;tGcy$PAls=a?;8&b>;+ls92?f}iNsU_O} zwNsom4w?&$4;C4a6LDd)QMgIw9A@tMe6toUhzkl|%-k?9l(!A~& z!yD2vh^qVvj2i9{{RM0%nE)am2qvB-(_D9R$|P!HVg7UXV-(c>>fbE;A)O1kb1<&J?eWSLMm^L{ZglXJ`L6JsX zq9oiDLV^?<$ScP~*0Tu3+8k}l?3EJRB9;fv@iMGAn6=XRts-og_5g80q`wZ_sUz1z zI5yqSHkB;CVR~(E@(4A(iG1oeCttKg2Y#7~e6`3^ea9Q5m~s9bul;^l1Pd!^dA)v zdU4p8+1DcHON@S-=kzkgZo007P>8Z$Xb59P?IdfVGkE4Q%522CtdL*^Yo1f5Ly}~e z!2$AQC|F7xWgF}&^qD$)P%3$8kPpXiOX`e>vJNzA&(2#JPk<@frrCSA4G1i~GQ$Db zGyo&ZYv;7mEf7wq?0)4lZhGLrN(JiiWNR#QRa?9v(w zmOt%h7NGLlK>HK_Q-3e?T99eiRwI(OfBK^SjWv)62Ke3dt#Tpo-7!rRh4nK+R6e`i zAg%UBCgbsgL6mrL8LZJ)-I1Jy!{sI$_tW%xgq9{)uB85*+MxLc@aKcO9HH_nr;*Zm zDO6Qdkb@|Z&Goq=+NWvIo0t6n+B|0XgISSMwD{xy?okv0-DAS^eWNHzvcSDjH-AoX zbZ9}b1GEj0syxb-87S+}^Z|!*9YvJyIWH`ot|QUqul*2Nn~_w;$A)jgCThYClCS37 z$D8y&{LA=Ka|KQH3WRN*d>Pe03+0R|>_4W%b)0Gl+Fa)`O0?txQyDxzdz%+S$S{mY zJF<0je;q==^f7u~;ex{b3JOVwTJp8#9yuI02K+@~^xhdX3_Y+rPN_;uVq97-%u=;8 z8H~v7Ai;sX38``&V{7#aY^~>k&TKjr=n&|FOiS3fJL;*LvTtW*@Qa>1squ0b@d3O) zLaN*ywz-nNWHqJY%TgPn*s$CM|8;1c2fKu?B5H~MVz{IZ?B*;WX?ufBFeUi%%H<>E z1?Uaq5nfp}wF2^n17zqe!QOReWYrBg$}KaGf`?SMHEg1(X+g}wBa&U_v6uyxlGm&( z@3)(f-j*(pRu@F?NN^ekhtgfGOpG)(bZqYac+=-Hdqh9%rHS%Qla&%;{o0+iOIu}j zTu>Ndjx>4gme?C!5@a=p$k%g%*P(7#0mO4%5>KUHHds8Dttk`t7Nm*Zl~wWL6x$$z zT)9Z~2ajTk720)S#!IN2ML1YUp>yl^X}<|DCZHd(xh%RZl(yQEpg|%k64ITpCB*l} z^{#*TdX_#AL|$>#TG9F#QI-hGKxwR%Z48^ir3Px*rTA!--;l8B$A!*KbAbcj01Qui zAFWMNSd-UU)nF}x5s9=&rHt2Lk$u-mc)8lZJOM2#Izai9)EGcSmf+(mV??GNd409? z$0d!WX>dADqe&AVQCI)pCK+(t1qpJgwxiCLA0d~nZf2$v5qo00S6HOn&Ue2A(Q)mF z&Md+ZWsU3>G)RcJx+809y?13m{$)hE2UCCJ!EK3=OE~bopo-4QQlKlJ^=L_|mlF|B zh3JH+Gh;+81$%b6$o)k+Aa~|^ciEM(xaI;^`lRUXG2k7v^VNp)3t7|E#DV0xlt`py zY4L%#n@}Ci24OddifK1$)v>l0*6voo>2l=EbT%;y-c|CZVsM%fS3~Q!Oi7J_+pbvT zqF6txQa#@#6s{WTt@~nT*Ny(m)9ybkX#RSAY93Fom+dhE_IdxQJqhunh(F%xutjWPNYS_zDL0@L^{4u!IWC%#lJMwW8f*sX{8qswQvjRwaSi z8E8<3p8jVh{2~K0K$WwukjI^FS5i=g*I^Cbnkz1Baj<&(;up3RGxx5Hmq< ze-tI1`Zo>Y)hY1TUwPA7JUnmlThY#8H3QcmN;Z5xeyf-dfC+7-8^-!oMb$^ut>G*> zN!m_O)lt7%J-B*`#9;QItB`^ps(4vIl)349Upj*6;}mn^Xbx5~FSZM=xH$SSJ3KNZ z6$z&n2`?83<5*>j1MCWvee3P`+DhQBhXU1(&1;E%&{@-A-hNEA zv9f$VKA4BYBR?gZ*J+ReF2m=hq$}jeXYu{D&!!SX_jLda@1_tm>ftb|5yaG-V%gIC7-W2+$lD)5<{$IA@_do&t&+r9VBo(aHX6aU}&n zMP&I3Dk_!k7FZOyNnp)MDW}P#>ofu}i}kb-cdgQ*yXv*oY41UFPYO zOC#qs0)HO`xdvfcs9bY0nO@Ev^)v1ves_#qc?oaw*^ny>g2NOb*)t72xJG5V-XV+@ ziAR6|rZ*qS1t1}bb##|7WI8??Rde&wJevgWnI**!NnJh2sA#G>xhoe}UM(`Ma|d_R z@$EN@GAu7M;)tXm* z6|4ZuhXzSsbzhe8)HO_KMcCK6DYYIg?!tZ$42rxGQxk$A5S6oLTEZ3s ziZJgQVUxDOqVk}B>w}OPKrHBk?du1h)tuV|J>zs^LiNJ_=}F6neu7Vz(gRu7#b;bt zVX8x(op_DisLL8(ZU9jv^3)Y$TAC4-!^x9xSZp}VkrV!){em)EA8ct@c4BB0N*^CI z({wJOmLUAhgM8LEA75WKZXhmb!25Uys0xldb&1NM`x+Esz%9cO6uV%f5qGjhOGm8* zmwFl_+wT{00Xan2_Q=a6Ppv;}9s<<&MG!S4czmHbEO$i-UkKOp-TlwBkNP2&j3RuD zKzEGV;#h|NB&my)i9Dubw2(F@CY4IXK6sHYNT&_3&h{E(kF6#)8DzqMbaA<#fq+D(uD$^5fW0|&?_AZnP zRgf_gsRK%dM50BgrBD82^PIVF&R!FGX2$afe{Hv5)u6CzfMO{6JAC37x_tAAk<=90 zkAyvAa%)L!_GvBBrnI2id=>-1bw2DC*>aHHbVA;p>NMJQ>>f4Lm*97ZvUgSNyeK}7 zN8cY_nmrlHn1Ud^2vkrfDu=X8+g%0l>vLN~>cp#?eqBxnph&?J`mI1SmTZ26*j|u3 z)GzmoFOWCS$8DEydnLR^Tw}6#Xv_UFl1HYK%NLVeB;($nMc7A@`=<%O!6S`#fFw)d zjPQQKxBff+XSWR={DfmS!dIHKsOc3);|w`(+V`)1!dgP59|$j|*N$bs4KdXHgw`f6 zW2ukcZIPdLVF1ORJ+BbhJ-GByyM9sfJrAPFx!C9?39Sb8a(rvG`GLMAbM2y3nC{^~ zfvTq_SMQAsP@9BSZTA+CTd^Q(pUf+@CK8u;0X*SxB^ZaOe-m@LeXov{RUTZO^DvSN zjghOpuM6;c=>hq(IjLADa8{&|+?><9;Y|Hujet~0T*Dp>ZJGP|W+QhpGVi66K3VZI zWa@gK*fC9xMVtdYv!C7?CaV)!%D&|l<=Q26P9o_2=b>~FLz@DSt_c_GJpADcXVK6$ zW-S+1s32-YE}?uNyNv?})bKA=S|bs(oR@`irtvMhmk#u_tlNU%w4?WI+BRu=Imh(d z$kEqNm)AC|DRnb+pdxqDrE93ec%^m@1t)sRH#-5&eV1lwjqs*m1~9V;%IW(N@N5#v z^s`eDac^40BgKmvti6IDO+98|YJp- z-}9C&H5Z!4)qR_C#e$MDO75C9(_h zqAZ#)0hw%P&w2lHMn2!aj;VqI>}N}&uqeE>+4khj4BxzuOGIzCZIb|60mGstRPz$G z&9BlF_g|&%mFz<^J+_T+>43()Nq4Fn>m@~V|00P2CXY{vlihb?;)U$!-*FFumFlNN zuuof8&o;{!)|X)igys%1+V+*|gh&h7ssJv}h`sbr=fz%`K9d^=h73 zjdauyB^%l&5z@?Ty}h~A_O}J(K)sFRx7}0&rLJ}lfEUHhab0#)D4=0ex#NTzrb7A| zfl(MX5e%XdDsw^~gvd>B4ff-wA$e7sCR&>B?#xoD-Bq;6Qd1|315x7}hwvv0Otd7p zZxIG;+JbCJ(;MhL>ls|ah~MeoAihne><&-0g;Rulop502wl`FqXX1;l^q1u?`i!*l zA9?n1(`ckxEAC0(2({Iw{3R1@tvmB!+y{s5m?cCwEr1Pw&Tmh5X2;ApO8x-kt37BV zK@iW|*{!o$yxOGB`|N#j0LcID6B5X79AW;~VAuPvQ%L!LwKcy!2EfJ)PYXl9Ob%Cv za3aB!s?7{*jHQl#6BCwQcn7MNL&a`$NY7KS6j*VY8oNq&%h$CgBhb#b5P1_Be}K5M!v~ zE(^xpU$fLo!(Z{wf|N6=QPN?1%$Yhr(Kr@a)S;Y-zfS4^>4?_Y;=1^IcZ^fmodU`L z#yGr*KVQhdgPxEdQ|x{C;}6$Y2@ED0Ze9cJl?R~ZRMXsQGr$a!9#VI!8NrXVAwyPD zW+*z=%?!ArMi={_=0=xc$)-m40Pl8@6$@9qvi_9gOWUB=^_w42yGsSq0S&U|FCk|--%yE2bArRB+fPn#a+xokMX+S_tg^sfn<)1vsf$O>y7JY4p1s2m&^ zS~&gcR)xY$CVoKzyD>0&Iu$K44 z;@c*okRvR0{80p1Ct$_>aomLJWPZuwRNk>ZtG~G>;pKkopaQ*z>KwhfWk?5yn{5MA zN!(kV5r<;{hBLohOVz#=4z}CamqrV4K9$p&Ee_dy1?aWBFn-!6`Lx-eF z?XH8A9UitLmZ&egjhAMz$tHvdDpi@+ra7!^%33^sD|{XNO($F3bobB?7A5L=*3R3~ zXNw8+7p1@ofq6*i_arO9`VzJ2h>ssuFtc4@sZGIq4my>TGjUI4H#^q~!5RtNN1Jh0 zjdi;a1BenI8niyN74NN<-;vG;k0GIpTYKfoxS=fryo3H?krMO#8F~}lG-NXejtw-_ zN}hy8sPI=Fj&L^|MdDK4Hb+d{g+(NqX^P>l^ltkx*uA0{E;Yt*?A?dbkIz^N+HK$l z((?)pDdD81Miflhq6@9x!Y2ntYoX@`yht!{0s_m9)^RPFXJYfS{DsGAa05g4ZC?oj z8xE0POF|SgR-qiq#^?06T;p<;-+|O|rh4bS+u{``)j>MM=t{- zA2TC&L!pqqSHj15UUv0tdyR`ow6QS`y1r# z%!wr|s99*39rSa<3<56q2kM zd|Ir|Zc&%<=ig3dIE#Q*W&Z1H_pud3HlX$I^-DhZOV7h}cH#9UO~HHe0uD~AhUh7% zoeO|mRd4sUMB>Csyi3{HhlVk~!P-L^_;YVF0YPu`fNO_rLenASU9*94b#9#)Ad3DF zeIRB2KDHKT*Bj)l@E}y5!zAev=~{>C%?zy+{|})b=|p6wuaH%^8?#FIeRhp+uvr+} zVkF4Tm=v{5KB{00%e<-a4%aO+hXfZX6zUY8b+sOksP2K6Mi9H+;y+#98=H@Gaa3JV zQ!rrv+z+bEsXt$GZDCi8FeQsRK*E`lCAXH;U9b_{L+2Y#YpFSO(iNjl-v*;qBGLK$ zmvd!dUrfTokil-rhx>nHU{e`L-5P2(6MbXsRT(pXoy{iz;}W-$Celh4j28D7BBC+c zWW7y}@FLr;gJtZtQS%mokrhjU-vg+7hMkVq`1+n}W=)AbErlIIeBBB-RFNgIy|=gz zRXL#@^dT0Bn8R?_aPcp{iIxi7AR7oE4z|kdY+>vOvnm;P7(6ZNTu#jqM|>eZcC|}^ zo|FQlot>OWKaLvg07;BrW|5bCJ)K>7geChl9z77$o&R}SLN}eB(^yvjD6HSplz$4e zY9;PsPq?%$pZ%ijDVPLQL^FH}3jc_mk6HM~z1GDo;Q#S?WG~5*+DkB+1;BXMa<7(6@c#fTF0f-XfLNW>ajA{iTy3 zH_N9db~)M{TdsNxSC2Y1@rAYZ+D5UDCV11rPs8blUGc#xmXZw!h7W)s+9Lc5&#y7` zTXzw1G{6+%BwAML=lG8P-4S}qtIRRs|#b zH!c>TVEJadQ}Kr~%Wcss)Ha4Exm)0Iv;5=lwwbm0@UXD%1hSl&u^1#yz#sz~FD)%i zLLkg^jUKl-_hhl^ z&&Y%og*J~HqpzK+ppYvkU*Mc^X!<9uEc-ZHrxlYkLoNwnh{ix`t|>&HzPfbv-(Ry6 zYWx_sLVRn&WI*<`H1=P_fT0uxx(*YYh&Be8IlX%Y+2j0Vto{Sm? zgm%;Q=nLDW+N!29Y~s~j6T}SIIpxI(cE$d>wC=v*^?(qVV^L%hb{Bp`Q+zahposh- zj#!SKEjE+^flpDx#dvz4xIil*72FYRJ?w;KWs(7+JQ4W5Yi?oBG?*J)ly1!@^-ea1 zTtd-5c>zrY`Gm3Abo=T^GhW3c!GIOz>mIG8Xo&-AmAt8#V$9g&U~&x`Cye-|)7Fdf zG7Qw;8i3-FUo4e*57lFfDuOJoGd0zw*Sic>F0)HJyy#m!C6;qMt!8hFRqZ|%HQ9cv z#b3N52RvQFN~%opgFk8AuQtj{k^`TTzW9yG(g^Iuy|+JUZ8DV0YkQH<5BNetnqq=_<|Apwmv+47@ior-Jtzh7=gr2Ht~QSujV z#|8?=L;qFgU4IAPGQO(Dk$+Qfi|jr#mQoaT#15*`E7lsnbP=CsRoZ+BQ%C0 z{FZ~G<}|d7`ImEaPU>u_rosmu5<}v6*10`?K4h^aHDwPjFs>Wz7W$kc2f| zdLHQ=UdAkb4|0t}vgbq-_s*YLE^rC_25Xs#0R=}aotQWP{qI*Q2Ob23AJ`N~05m>? z27bj80#vsp(ZAO)Ysq3qJ2S)DOMh{I`VZ}-80U~fX6v~)>r%B+_DzYpVA%7J%_tZa zB`$VITWyihtd#!nu5_I5_lxlf(D4|XhRRd2eUk;X&NIiOpbQ zsm_y=QHDsR6eGeYaCG#49l_PR`jcbM;M0LU1DX(U*17AUokNeyydyLw+D>cJ-5y#DT2rX=>l|&}MY<8%9shOUW{`E3h<-DC z0lG~%u1s5Y?xzZOSQU14>#E};6dNU!Qkv=$&Pzoch_)nh=UbQ&e$N%x>3SX}JN#Bi zSipwb?ZS)YbhvNh$pTacHk8*?>&Rd^%rMh+DbA#FJ^k7r9cs|UATfv(diV5zRwC^} z^9oH52cL0o2!6pwYM0Tl9^NRn@@U6c3U~-JWW_;%<{W!q=&d+qt*7CNaGV4Z&tJ0b=w&E6<^fxbFaJN031%G zt7SVYbApzl@5V>j#jJ&N;7=pFE#3TUge7#ue`7QH5ZYWPfg^hDBL`M#>-)DHEy9y< zo!pzPE+e7x+MC_HcH}W;d36(JaWh7l)K$d%zAQ`8P*}l>-#9`rsDC58c}T^pvB6tq z#{gHs{)Jsxt>KK3 zi%3xAdgzw1&T)B5sRUK&6U>M|c_;XXO84WIP(Ue0fHP6BJ&3p`=w*si9^e-?LH!oN zR$?p18G+=|fBM7(u_#s24_}ok-AwRUHvx$Fhw~0x7z%hL2(D!({R-Au_dk*f^CHPW zZABP|ZQ$P>6Q=Q3eMhX2{Z_xj5#_`7?}~Mn=}pE>HYC~i`HL^RC;+eM&L++gtCBu~ zttXmdAemrL_Qz$cXy5>K8&J|<1_l(@n=QBgBZiz4MJ6LCC6kX{Uge>{2mSOFiXSxOd2>GT{B((xu$ha-eIxn|=4P9!#23u-hX8WS9uzB=$$jb01(s;B zQ`GjCN{4?sJqzvx#gc+ma$&iTWG5UXarcErlp#;-c3AAbAUNSUv2pK)8u=WdIN4k*Ibr*^bsa80RBSLLLpXOE9c=ZBU1|7NcoQw%mniM~$fUjqNVcnmD`e5|emzVLz+0AbS<4?x5EKxkXlcx@ z$irWYdl%(s-J?FUR@=S#=>?zaiwoh{^{Zw=?DuICJu6ad4XI#kvbLu<+mctlhWZI5{pzVgo1u?c{6v z-SjfTqvv6%NQ<3l1=j2(#uD&L3tWCYbNf*O)^}tK>;86yi?B`B|5L@8$5WO5aa>P! z;#e=om0hy55OI+ui4-A&B1^K2rsT>t=_WB=$kp#b;aVE&gsi!;L_)<_GG)2wy0#_} zEhA%{+xf}NJ^wu4@8@}c&vRbSIp_5{=Q-#5-TB8V|KOMmRr#IEM?!Vq$x8=SF56d$ zJq;2)YFM=U{=Gbr##=N`otGgWjB?tZSJKC&buO8<3*p5EM}AVFEyNO4o)#qhR-@^e zmK*L8uSQNw6(-fk%;@ku^J<8n& zbv}NDujGu=ncURO(#o?(t4=+u_C1Ds@MC05%TK-Ya<3?U8u?Cw#jlDFPNflkR`|>p z$7!48VV82rvtl97TahB?=(xA*mq%s7ro|tZ3_et&=~5{Ntm~j9QC-W{clB6ZWAyv4 zb$(N$_U`k5Lb|P=h?{8+)E58aLjA?Hc7d68L%#kL$#-D}dK-3nlBR-&fu{*JJ6}vJ zwb-lql)L#i^yNP5&vwrgjoY>{=R7o+ys})<)v7w`;@zfRGyQnx1evP3|CX##F^7u zmbq^&UG<0(S@(u&SjQtbH0Z+}@645aJ?_YQ?maD|VqyjcuHEuk`9Yr!)1=M!dt6Tq z_^cS$;)%8~;<@>)bG?M~8L5^QjDAb@r`nO0$rBUEE=LzFN;1L zm*|fph1l**%yp(TH=0!mDXGgC4uV=_*mctMLRbY~#oRWW=X+E(L9O@b7ZS;)7a)St$w4aX$_*>8-bRy$g@(i^l1k0JI~Rtn1K5uE4@o*(W`mWVfQpGm9%?i*xL#ciN0zrE-}s<- zLS)jMdAREdhB9str<)o4Bt7V4|FmESzpF%qhE{Fb>mP&y;+qR=wP_=S=sGR>>VSy7 z(c#-K`bLuq{9`Lk%xIz085@X12jL;SC~%oat@V7zGn?J_vl+G@_1ar;UrudsDYxFy zRbG#_8mBLrUX3QjsXb_FnS|8cd?B`!cxI-aK}b{fZ;n@Gf*B605^LAQOA3Cki<1@( zb~}=GlWUz!D^7A9E8tarGx1C3ujLl_qyVCo|B0LeFLZphA*pdHy}T>8Q!z6k`{due zIP?pjMSIj^XU)ZB4#f#M>UD{533Bb!dQq{549(?Evfb@lxngo7=Jet1t6Uwr*UUtP zx0e*j$2wG#z^Y7_?peN@k2$IqGVY4Ad!5dBRh3XFE-Xf=AN(2`^eK2;vzzQg(pym* zBt^ZiqPBJjabUGhU`+pyc$N|d%%TS0x4cL$0T`(+h800k9ZNebQ_JOtEKnWj1{97Uk$5DL%( z7<^<%53p-v=C|Ac%8&$}C2N3X_;kZ9aCS%m&+^|k3+Q3I2_6K89WXPIhR6T=6@-mV zmxZ5q#2^Eg9q><@3Z7+UodsNR-ULOU1OtIy!zgg@z$EH9X-vxVMfksm zxL~)h2;4EE3f%oRB_mKD2+1QhK#B;*MjU__5gr`X2axGq(Spbu1X4#)urrwHO2XMu zYtR!88OeC)^u`37zlxdN*90-J{|yS-A}~pSt^@dPGED{ON5|o}mN|IX*z~_27qiWa zT>L-OL^lJOu^8V(=YWFoJ0M{U!Lz)-vBV|CWANH7DL6z&VD%UZ-jgxu^O!cCQ!v-KkP{xr7RM;xaTIK1Z^@NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init +if "%ERRORLEVEL%" == "0" goto execute echo. echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. @@ -51,7 +54,7 @@ goto fail set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe -if exist "%JAVA_EXE%" goto init +if exist "%JAVA_EXE%" goto execute echo. echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% @@ -61,28 +64,14 @@ echo location of your Java installation. goto fail -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell diff --git a/solace-integration-test-support b/solace-integration-test-support new file mode 160000 index 0000000..c411ac2 --- /dev/null +++ b/solace-integration-test-support @@ -0,0 +1 @@ +Subproject commit c411ac2e0f82af25ece2994691352cb0d6235142 diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java deleted file mode 100644 index e2a1294..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java +++ /dev/null @@ -1,63 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.BindMode; -import org.testcontainers.containers.FixedHostPortGenericContainer; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.junit.jupiter.Container; - -public class DockerizedPlatformSetupApache implements MessagingServiceFullLocalSetupApache { - - @Container - public final static GenericContainer KAFKA_CONNECT_REST = new FixedHostPortGenericContainer<>("bitnami/kafka:2") - .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", dockerIpAddress + ":2181") - .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") - .withCommand("/bin/sh", "-c", //"sleep 10000") - "sed -i 's/bootstrap.servers=.*/bootstrap.servers=" + dockerIpAddress - + ":39092/g' /opt/bitnami/kafka/config/connect-distributed.properties; " - + "echo 'plugin.path=/opt/bitnami/kafka/jars' >> /opt/bitnami/kafka/config/connect-distributed.properties; " - + "echo 'rest.port=28083' >> /opt/bitnami/kafka/config/connect-distributed.properties; " - + "/opt/bitnami/kafka/bin/connect-distributed.sh /opt/bitnami/kafka/config/connect-distributed.properties") - .withFixedExposedPort(28083,28083) - .withExposedPorts(28083) -//// -// // Enable remote debug session at default port 5005 -// .withEnv("KAFKA_DEBUG", "y") -// .withEnv("DEBUG_SUSPEND_FLAG", "y") -//// - .withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", - "/opt/bitnami/kafka/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY) -// .withStartupTimeout(Duration.ofSeconds(120)) - .waitingFor( Wait.forLogMessage(".*Finished starting connectors and tasks.*", 1) ) - ; - - @BeforeAll - static void setUp() { - assert(KAFKA_CONNECT_REST != null); // Required to instantiate - } - - @DisplayName("Local MessagingService connection tests") - @Nested - class MessagingServiceConnectionTests { - @DisplayName("Setup the dockerized platform") - @Test - @Disabled - void setupDockerizedPlatformTest() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - try { - Thread.sleep(36000000l); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java deleted file mode 100644 index 10a2703..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java +++ /dev/null @@ -1,75 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; -import org.testcontainers.containers.BindMode; -import org.testcontainers.containers.FixedHostPortGenericContainer; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.Wait; -import org.testcontainers.junit.jupiter.Container; - -public class DockerizedPlatformSetupConfluent implements MessagingServiceFullLocalSetupConfluent { - - @Container - public final static GenericContainer connector = new FixedHostPortGenericContainer<>( - "confluentinc/cp-kafka-connect-base:5.4.0") - .withEnv("CONNECT_BOOTSTRAP_SERVERS", COMPOSE_CONTAINER_KAFKA.getServiceHost("kafka_1", 39092) + ":39092") - .withFixedExposedPort(28083, 28083).withFixedExposedPort(5005, 5005).withExposedPorts(28083, 5005) - .withEnv("CONNECT_REST_PORT", "28083") -// -// // Enable remote debug session at default port 5005 -// .withEnv("KAFKA_DEBUG", "y") -// .withEnv("DEBUG_SUSPEND_FLAG", "y") -// - .withEnv("CONNECT_GROUP_ID", "testconnect-avro") - .withEnv("CONNECT_CONFIG_STORAGE_TOPIC", "testconnect-avro-config") - .withEnv("CONNECT_OFFSET_STORAGE_TOPIC", "testconnect-avro-offsets") - .withEnv("CONNECT_STATUS_STORAGE_TOPIC", "testconnect-avro-status") - .withEnv("CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR", "1") - .withEnv("CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR", "1") - .withEnv("CONNECT_STATUS_STORAGE_REPLICATION_FACTOR", "1") -// .withEnv("CONNECT_OFFSET_FLUSH_INTERVAL_MS", "100") - .withEnv("CONNECT_KEY_CONVERTER", "io.confluent.connect.avro.AvroConverter") - .withEnv("CONNECT_VALUE_CONVERTER", "io.confluent.connect.avro.AvroConverter") - .withEnv("CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL", - "http://" + COMPOSE_CONTAINER_KAFKA.getServiceHost("schema-registry_1", 8081) + ":8081") - .withEnv("CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL", - "http://" + COMPOSE_CONTAINER_KAFKA.getServiceHost("schema-registry_1", 8081) + ":8081") - .withEnv("CONNECT_INTERNAL_KEY_CONVERTER", "org.apache.kafka.connect.json.JsonConverter") - .withEnv("CONNECT_INTERNAL_VALUE_CONVERTER", "org.apache.kafka.connect.json.JsonConverter") -// - .withEnv("CONNECT_REST_ADVERTISED_HOST_NAME", "localhost").withEnv("CONNECT_LOG4J_ROOT_LOGLEVEL", "INFO") - .withEnv("CONNECT_PLUGIN_PATH", "/usr/share/java,/etc/kafka-connect/jars") - .withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", - "/etc/kafka-connect/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY) -// .waitingFor( Wait.forHealthcheck() ); - .waitingFor(Wait.forLogMessage(".*Kafka Connect started.*", 1)); - - @BeforeAll - static void setUp() { - assert(connector != null); - } - - @DisplayName("Local MessagingService connection tests") - @Nested - class MessagingServiceConnectionTests { - @DisplayName("Setup the dockerized platform") - @Test -// @Disabled - void setupDockerizedPlatformTest() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - try { - Thread.sleep(36000000l); - } catch (InterruptedException e) { - // TODO Auto-generated catch block - e.printStackTrace(); - } - - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java deleted file mode 100644 index 6e300cd..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java +++ /dev/null @@ -1,46 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import java.io.File; -import org.junit.jupiter.api.BeforeAll; -import org.testcontainers.containers.DockerComposeContainer; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; -import org.testcontainers.containers.wait.strategy.Wait; - -@Testcontainers -public interface MessagingServiceFullLocalSetupApache extends TestConstants { - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_PUBSUBPLUS = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-solace.yml")) - .withEnv("PUBSUB_NETWORK_NAME", PUBSUB_NETWORK_NAME) - .withEnv("PUBSUB_HOSTNAME", PUBSUB_HOSTNAME) - .withEnv("PUBSUB_TAG", PUBSUB_TAG) - .withServices(SERVICES) - .withLocalCompose(true) - .withPull(false) - .waitingFor("solbroker_1", - Wait.forLogMessage(".*System startup complete.*", 1) ); - - public static final String dockerReportedAddress = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - public static final String dockerIpAddress = (dockerReportedAddress == "localhost" || dockerReportedAddress == "127.0.0.1" ? - Tools.getIpAddress() : dockerReportedAddress); - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_KAFKA = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-kafka-apache.yml")) - .withEnv("KAFKA_TOPIC", KAFKA_SOURCE_TOPIC) - .withEnv("KAFKA_HOST", dockerIpAddress) - .withLocalCompose(true); - - @BeforeAll - static void checkContainer() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - } -} - diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java deleted file mode 100644 index 727deb4..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import java.io.File; -import org.junit.jupiter.api.BeforeAll; -import org.testcontainers.containers.DockerComposeContainer; -import org.testcontainers.junit.jupiter.Container; -import org.testcontainers.junit.jupiter.Testcontainers; -import org.testcontainers.containers.wait.strategy.Wait; - -@Testcontainers -public interface MessagingServiceFullLocalSetupConfluent extends TestConstants { - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_PUBSUBPLUS = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-solace.yml")) - .withEnv("PUBSUB_NETWORK_NAME", PUBSUB_NETWORK_NAME) - .withEnv("PUBSUB_HOSTNAME", PUBSUB_HOSTNAME) - .withEnv("PUBSUB_TAG", PUBSUB_TAG) - .withServices(SERVICES) - .withLocalCompose(true) - .withPull(false) - .waitingFor("solbroker_1", - Wait.forLogMessage(".*System startup complete.*", 1) ); - - @Container - public static final DockerComposeContainer COMPOSE_CONTAINER_KAFKA = - new DockerComposeContainer( - new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-kafka-confluent.yml")) - .withEnv("KAFKA_TOPIC", KAFKA_SOURCE_TOPIC) - .withEnv("KAFKA_HOST", COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080)) - .withLocalCompose(true) - .waitingFor("schema-registry_1", - Wait.forHttp("/subjects").forStatusCode(200)); - - @BeforeAll - static void checkContainer() { - String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); - assertNotNull(host); - } -} - diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java deleted file mode 100644 index a0c64a9..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java +++ /dev/null @@ -1,74 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import java.util.HashMap; -import java.util.Map; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import com.solace.connector.kafka.connect.source.SolaceSourceTask; -import com.solacesystems.jcsmp.JCSMPChannelProperties; -import com.solacesystems.jcsmp.JCSMPProperties; -import com.solacesystems.jcsmp.JCSMPSession; - -public class ParameterTesting implements TestConstants { - - - @DisplayName("Default Parameter test") - @Test - void CheckDefaultParams() { - SolaceSourceTask testSourceTask = new SolaceSourceTask(); - - Map props = new HashMap(); - /* ("sol.host", "tcp://" + MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_PUBSUBPLUS - .getServiceHost("solbroker_1", 55555) + ":55555"); - jobject.addProperty("sol.username", SOL_ADMINUSER_NAME); - jobject.addProperty("sol.password", SOL_ADMINUSER_PW); - jobject.addProperty("sol.vpn_name", SOL_VPN); */ - props.put("sol.host", "tcp://" + MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_PUBSUBPLUS - .getServiceHost("solbroker_1", 55555) + ":55555"); - props.put("sol.username", SOL_ADMINUSER_NAME); - props.put("sol.password", SOL_ADMINUSER_PW); - props.put("sol.vpn_name", SOL_VPN); - - testSourceTask.start(props); - JCSMPSession solSession = testSourceTask.getSolSession(); - assert(!solSession.isClosed()); - JCSMPChannelProperties chanProperties = - (JCSMPChannelProperties) solSession.getProperty(JCSMPProperties.CLIENT_CHANNEL_PROPERTIES); - boolean GENERATE_SEND_TIMESTAMPS = (boolean) solSession.getProperty(JCSMPProperties.GENERATE_SEND_TIMESTAMPS); - solSession.getProperty(JCSMPProperties.GENERATE_RCV_TIMESTAMPS); - solSession.getProperty(JCSMPProperties.GENERATE_SEQUENCE_NUMBERS); - solSession.getProperty(JCSMPProperties.CALCULATE_MESSAGE_EXPIRATION); - solSession.getProperty(JCSMPProperties.PUB_MULTI_THREAD); - solSession.getProperty(JCSMPProperties.MESSAGE_CALLBACK_ON_REACTOR); - solSession.getProperty(JCSMPProperties.IGNORE_DUPLICATE_SUBSCRIPTION_ERROR); - solSession.getProperty(JCSMPProperties.IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR); - solSession.getProperty(JCSMPProperties.NO_LOCAL); - solSession.getProperty(JCSMPProperties.SUB_ACK_WINDOW_SIZE); - solSession.getProperty(JCSMPProperties.SUBSCRIBER_LOCAL_PRIORITY); - solSession.getProperty(JCSMPProperties.SUBSCRIBER_NETWORK_PRIORITY); - solSession.getProperty(JCSMPProperties.REAPPLY_SUBSCRIPTIONS); - solSession.getProperty(JCSMPProperties.AUTHENTICATION_SCHEME); - solSession.getProperty(JCSMPProperties.KRB_SERVICE_NAME); - solSession.getProperty(JCSMPProperties.SSL_CONNECTION_DOWNGRADE_TO); - solSession.getProperty(JCSMPProperties.SSL_CIPHER_SUITES); - solSession.getProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE); - solSession.getProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE_DATE); - solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE); - solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE_PASSWORD); - solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE_FORMAT); - solSession.getProperty(JCSMPProperties.SSL_TRUSTED_COMMON_NAME_LIST); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_PASSWORD); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_FORMAT); - solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_NORMALIZED_FORMAT); - solSession.getProperty(JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD); - - - - - - testSourceTask.stop(); - } - -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java index 1e936cc..1c5871c 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java @@ -1,44 +1,56 @@ package com.solace.connector.kafka.connect.source.it; -import java.io.File; -import java.io.IOException; -import java.time.Instant; -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; - -import org.apache.commons.io.FileUtils; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.NewTopic; -import org.apache.kafka.clients.producer.ProducerConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; - +import com.solace.connector.kafka.connect.source.SolaceSourceConnector; +import com.solace.connector.kafka.connect.source.VersionUtil; +import com.solace.connector.kafka.connect.source.it.util.KafkaConnection; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; +import okhttp3.ResponseBody; +import org.apache.commons.io.FileUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.time.Duration; +import java.util.Iterator; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.atomic.AtomicReference; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; +import static org.junit.jupiter.api.Assertions.assertTrue; public class SolaceConnectorDeployment implements TestConstants { - static Logger logger = LoggerFactory.getLogger(SolaceConnectorDeployment.class.getName()); + static Logger logger = LoggerFactory.getLogger(SolaceConnectorDeployment.class); - static String kafkaTestTopic = KAFKA_SOURCE_TOPIC + "-" + Instant.now().getEpochSecond(); - OkHttpClient client = new OkHttpClient(); - String connectorAddress = new TestConfigProperties().getProperty("kafka.connect_rest_url"); + private final OkHttpClient client = new OkHttpClient(); + private final KafkaConnection kafkaConnection; + private final String kafkaTopic; + + public SolaceConnectorDeployment(KafkaConnection kafkaConnection, String kafkaTopic) { + this.kafkaConnection = kafkaConnection; + this.kafkaTopic = kafkaTopic; + } public void waitForConnectorRestIFUp() { - Request request = new Request.Builder().url("http://" + connectorAddress + "/connector-plugins").build(); + Request request = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connector-plugins").build(); Response response = null; do { try { - Thread.sleep(1000l); + Thread.sleep(1000L); response = client.newCall(request).execute(); } catch (IOException | InterruptedException e) { // Continue looping @@ -46,25 +58,12 @@ public void waitForConnectorRestIFUp() { } while (response == null || !response.isSuccessful()); } - public void provisionKafkaTestTopic() { - // Create a new kafka test topic to use - String bootstrapServers = new TestConfigProperties().getProperty("kafka.bootstrap_servers"); - Properties properties = new Properties(); - properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - AdminClient adminClient = AdminClient.create(properties); - NewTopic newTopic = new NewTopic(kafkaTestTopic, 5, (short) 1); // new NewTopic(topicName, numPartitions, - // replicationFactor) - List newTopics = new ArrayList(); - newTopics.add(newTopic); - adminClient.createTopics(newTopics); - adminClient.close(); - } - - void startConnector() { - startConnector(null); // Defaults only, no override + void startConnector(Properties props) { + startConnector(props, false); } - void startConnector(Properties props) { + void startConnector(Properties props, boolean expectStartFail) { + Gson gson = new GsonBuilder().setPrettyPrinting().create(); String configJson = null; // Prep config files try { @@ -76,11 +75,7 @@ void startConnector(Properties props) { JsonElement jconfig = jtree.getAsJsonObject().get("config"); JsonObject jobject = jconfig.getAsJsonObject(); // Set properties defaults - jobject.addProperty("sol.host", "tcp://" + new TestConfigProperties().getProperty("sol.host") + ":55555"); - jobject.addProperty("sol.username", SOL_ADMINUSER_NAME); - jobject.addProperty("sol.password", SOL_ADMINUSER_PW); - jobject.addProperty("sol.vpn_name", SOL_VPN); - jobject.addProperty("kafka.topic", kafkaTestTopic); + jobject.addProperty("kafka.topic", kafkaTopic); jobject.addProperty("sol.topics", SOL_TOPICS); jobject.addProperty("sol.queue", SOL_QUEUE); jobject.addProperty("sol.message_processor_class", CONN_MSGPROC_CLASS); @@ -89,12 +84,7 @@ void startConnector(Properties props) { jobject.addProperty("key.converter", "org.apache.kafka.connect.storage.StringConverter"); jobject.addProperty("tasks.max", "1"); // Override properties if provided - if (props != null) { - props.forEach((key, value) -> { - jobject.addProperty((String) key, (String) value); - }); - } - Gson gson = new Gson(); + props.forEach((key, value) -> jobject.addProperty((String) key, (String) value)); configJson = gson.toJson(jtree); } catch (IOException e) { e.printStackTrace(); @@ -104,38 +94,52 @@ void startConnector(Properties props) { try { // check presence of Solace plugin: curl // http://18.218.82.209:8083/connector-plugins | jq - Request request = new Request.Builder().url("http://" + connectorAddress + "/connector-plugins").build(); - Response response = client.newCall(request).execute(); - assert (response.isSuccessful()); - String results = response.body().string(); - logger.info("Available connector plugins: " + results); - assert (results.contains("solace")); + Request request = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connector-plugins").build(); + try (Response response = client.newCall(request).execute()) { + assertTrue(response.isSuccessful()); + JsonArray results = responseBodyToJson(response.body()).getAsJsonArray(); + logger.info("Available connector plugins: " + gson.toJson(results)); + boolean hasConnector = false; + for (Iterator resultsIter = results.iterator(); !hasConnector && resultsIter.hasNext();) { + JsonObject connectorPlugin = resultsIter.next().getAsJsonObject(); + if (connectorPlugin.get("class").getAsString().equals(SolaceSourceConnector.class.getName())) { + hasConnector = true; + assertEquals("source", connectorPlugin.get("type").getAsString()); + assertEquals(VersionUtil.getVersion(), connectorPlugin.get("version").getAsString()); + } + } + assertTrue(hasConnector, String.format("Could not find connector %s : %s", + SolaceSourceConnector.class.getName(), gson.toJson(results))); + } // Delete a running connector, if any - Request deleterequest = new Request.Builder().url("http://" + connectorAddress + "/connectors/solaceSourceConnector") - .delete().build(); - Response deleteresponse = client.newCall(deleterequest).execute(); - logger.info("Delete response: " + deleteresponse); + deleteConnector(); // configure plugin: curl -X POST -H "Content-Type: application/json" -d // @solace_source_properties.json http://18.218.82.209:8083/connectors - Request configrequest = new Request.Builder().url("http://" + connectorAddress + "/connectors") + Request configrequest = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connectors") .post(RequestBody.create(configJson, MediaType.parse("application/json"))).build(); - Response configresponse = client.newCall(configrequest).execute(); - // if (!configresponse.isSuccessful()) throw new IOException("Unexpected code " - // + configresponse); - String configresults = configresponse.body().string(); - logger.info("Connector config results: " + configresults); + try (ResponseBody configresponse = client.newCall(configrequest).execute().body()) { + assertNotNull(configresponse); + String configresults = configresponse.string(); + logger.info("Connector config results: " + configresults); + } // check success - Request statusrequest = new Request.Builder() - .url("http://" + connectorAddress + "/connectors/solaceSourceConnector/status").build(); - Response statusresponse; - long starttime = System.currentTimeMillis(); - do { - statusresponse = client.newCall(statusrequest).execute(); - assert (System.currentTimeMillis() - starttime < 10000l); // don't wait forever - } while (!statusresponse.body().string().contains("state\":\"RUNNING")); + AtomicReference statusResponse = new AtomicReference<>(new JsonObject()); + assertTimeoutPreemptively(Duration.ofSeconds(10), () -> { + JsonObject connectorStatus; + do { + connectorStatus = getConnectorStatus(); + statusResponse.set(connectorStatus); + } while (!(expectStartFail ? "FAILED" : "RUNNING").equals(Optional.ofNullable(connectorStatus) + .map(a -> a.getAsJsonArray("tasks")) + .map(a -> a.size() > 0 ? a.get(0) : null) + .map(JsonElement::getAsJsonObject) + .map(a -> a.get("state")) + .map(JsonElement::getAsString) + .orElse(""))); + }, () -> "Timed out while waiting for connector to start: " + gson.toJson(statusResponse.get())); Thread.sleep(10000); // Give some extra time to start logger.info("Connector is now RUNNING"); } catch (IOException e) { @@ -145,4 +149,35 @@ void startConnector(Properties props) { e.printStackTrace(); } } + + public void deleteConnector() throws IOException { + Request request = new Request.Builder().url(kafkaConnection.getConnectUrl() + "/connectors/solaceSourceConnector") + .delete().build(); + try (Response response = client.newCall(request).execute()) { + logger.info("Delete response: " + response); + } + } + + public JsonObject getConnectorStatus() { + Request request = new Request.Builder() + .url(kafkaConnection.getConnectUrl() + "/connectors/solaceSourceConnector/status").build(); + return assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (true) { + try (Response response = client.newCall(request).execute()) { + if (!response.isSuccessful()) { + continue; + } + + return responseBodyToJson(response.body()).getAsJsonObject(); + } + } + }); + } + + private JsonElement responseBodyToJson(ResponseBody responseBody) { + return Optional.ofNullable(responseBody) + .map(ResponseBody::charStream) + .map(s -> new JsonParser().parse(s)) + .orElseGet(JsonObject::new); + } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceSourceTaskIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceSourceTaskIT.java new file mode 100644 index 0000000..edf45c8 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceSourceTaskIT.java @@ -0,0 +1,188 @@ +package com.solace.connector.kafka.connect.source.it; + +import com.solace.connector.kafka.connect.source.SolMessageProcessorIF; +import com.solace.connector.kafka.connect.source.SolaceSourceConstants; +import com.solace.connector.kafka.connect.source.SolaceSourceTask; +import com.solace.connector.kafka.connect.source.it.util.extensions.NetworkPubSubPlusExtension; +import com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor; +import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension; +import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension.ExecSvc; +import com.solace.test.integration.junit.jupiter.extension.LogCaptorExtension; +import com.solace.test.integration.junit.jupiter.extension.LogCaptorExtension.LogCaptor; +import com.solace.test.integration.semp.v2.SempV2Api; +import com.solacesystems.jcsmp.BytesXMLMessage; +import com.solacesystems.jcsmp.JCSMPErrorResponseException; +import com.solacesystems.jcsmp.JCSMPErrorResponseSubcodeEx; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPFactory; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.JCSMPStreamingPublishCorrelatingEventHandler; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.TextMessage; +import com.solacesystems.jcsmp.XMLMessageProducer; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.kafka.connect.errors.ConnectException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.IOException; +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.instanceOf; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; + +@ExtendWith(ExecutorServiceExtension.class) +@ExtendWith(LogCaptorExtension.class) +@ExtendWith(NetworkPubSubPlusExtension.class) +public class SolaceSourceTaskIT { + private SolaceSourceTask solaceSourceTask; + private Map connectorProperties; + + private static final Logger logger = LoggerFactory.getLogger(SolaceSourceTaskIT.class); + + @BeforeEach + void setUp(JCSMPProperties jcsmpProperties) { + solaceSourceTask = new SolaceSourceTask(); + + connectorProperties = new HashMap<>(); + connectorProperties.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, TestConstants.CONN_MSGPROC_CLASS); + connectorProperties.put(SolaceSourceConstants.SOL_HOST, jcsmpProperties.getStringProperty(JCSMPProperties.HOST)); + connectorProperties.put(SolaceSourceConstants.SOL_VPN_NAME, jcsmpProperties.getStringProperty(JCSMPProperties.VPN_NAME)); + connectorProperties.put(SolaceSourceConstants.SOL_USERNAME, jcsmpProperties.getStringProperty(JCSMPProperties.USERNAME)); + connectorProperties.put(SolaceSourceConstants.SOL_PASSWORD, jcsmpProperties.getStringProperty(JCSMPProperties.PASSWORD)); + } + + @AfterEach + void tearDown() { + solaceSourceTask.stop(); + } + + @Test + public void testFailTopicListenerInit() { + String topicName = RandomStringUtils.randomAlphanumeric(100); + connectorProperties.put(SolaceSourceConstants.SOL_TOPICS, String.join(",", topicName, topicName)); + + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(connectorProperties)); + assertThat(thrown.getMessage(), containsString("Failed to start topic consumer")); + assertThat(thrown.getCause(), instanceOf(JCSMPErrorResponseException.class)); + assertEquals(JCSMPErrorResponseSubcodeEx.SUBSCRIPTION_ALREADY_PRESENT, + ((JCSMPErrorResponseException)thrown.getCause()).getSubcodeEx()); + } + + @Test + public void testFailQueueConsumerInit() { + connectorProperties.put(SolaceSourceConstants.SOL_QUEUE, RandomStringUtils.randomAlphanumeric(10)); + + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(connectorProperties)); + assertThat(thrown.getMessage(), containsString("Failed to start queue consumer")); + assertThat(thrown.getCause(), instanceOf(JCSMPErrorResponseException.class)); + assertEquals(JCSMPErrorResponseSubcodeEx.UNKNOWN_QUEUE_NAME, + ((JCSMPErrorResponseException)thrown.getCause()).getSubcodeEx()); + } + + @ParameterizedTest(name = "[{index}] ignoreMessageProcessorError={0}") + @ValueSource(booleans = { true, false }) + public void testMessageProcessorError(boolean ignoreMessageProcessorError, + JCSMPSession jcsmpSession, + SempV2Api sempV2Api, + Queue queue, + @ExecSvc ExecutorService executorService, + @LogCaptor(SolaceSourceTask.class) BufferedReader logReader) throws Exception { + String vpnName = connectorProperties.get(SolaceSourceConstants.SOL_VPN_NAME); + + connectorProperties.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, BadMessageProcessor.class.getName()); + connectorProperties.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR_IGNORE_ERROR, Boolean.toString(ignoreMessageProcessorError)); + connectorProperties.put(SolaceSourceConstants.SOL_QUEUE, queue.getName()); + solaceSourceTask.start(connectorProperties); + + XMLMessageProducer messageProducer = jcsmpSession.getMessageProducer(new JCSMPStreamingPublishCorrelatingEventHandler() { + @Override + public void responseReceivedEx(Object o) { + + } + + @Override + public void handleErrorEx(Object o, JCSMPException e, long l) { + + } + }); + + try { + TextMessage message = JCSMPFactory.onlyInstance().createMessage(TextMessage.class); + message.setText("Test payload"); + messageProducer.send(message, queue); + } finally { + messageProducer.close(); + } + + assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) + .getData().getTxUnackedMsgCount() == 0) { + logger.info("Waiting for queue {} to deliver messages", queue.getName()); + Thread.sleep(Duration.ofSeconds(1).toMillis()); + } + }, String.format("Timed out while waiting for queue %s to deliver its messages", queue.getName())); + + if (ignoreMessageProcessorError) { + Future future = executorService.submit(() -> { + String logLine; + do { + try { + logger.info("Waiting for error log message"); + logLine = logReader.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } while (!logLine.contains("Encountered exception in message processing")); + }); + assertThat(solaceSourceTask.poll(), empty()); + future.get(30, TimeUnit.SECONDS); + solaceSourceTask.commit(); + assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (!sempV2Api.monitor() + .getMsgVpnQueueMsgs(vpnName, queue.getName(), 1, null, null, null) + .getData() + .isEmpty()) { + logger.info("Waiting for queue {} to be empty", queue.getName()); + Thread.sleep(Duration.ofSeconds(1).toMillis()); + } + }); + } else { + ConnectException thrown = assertThrows(ConnectException.class, () -> solaceSourceTask.poll()); + assertThat(thrown.getMessage(), containsString("Encountered exception in message processing")); + assertEquals(BadMessageProcessor.TEST_EXCEPTION, thrown.getCause()); + solaceSourceTask.commit(); + Thread.sleep(Duration.ofSeconds(5).toMillis()); + assertEquals(1, sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) + .getData().getTxUnackedMsgCount()); + } + } + + public static class BadMessageProcessor extends SolSampleSimpleMessageProcessor { + static final RuntimeException TEST_EXCEPTION = new RuntimeException("Some processing failure"); + + @Override + public SolMessageProcessorIF process(String skey, BytesXMLMessage message) { + throw TEST_EXCEPTION; + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java index 515da08..d8f9836 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java @@ -1,67 +1,85 @@ package com.solace.connector.kafka.connect.source.it; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonObject; +import com.solace.connector.kafka.connect.source.SolaceSourceConstants; +import com.solace.connector.kafka.connect.source.it.util.extensions.KafkaArgumentsProvider; +import com.solace.connector.kafka.connect.source.it.util.extensions.KafkaArgumentsProvider.KafkaArgumentSource; +import com.solace.connector.kafka.connect.source.it.util.extensions.KafkaArgumentsProvider.KafkaContext; +import com.solace.connector.kafka.connect.source.it.util.extensions.NetworkPubSubPlusExtension; +import com.solacesystems.jcsmp.BytesMessage; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.Message; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.TextMessage; +import com.solacesystems.jcsmp.Topic; +import com.solacesystems.jcsmp.impl.AbstractDestination; +import org.apache.commons.lang3.RandomStringUtils; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; -import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.solacesystems.jcsmp.BytesMessage; -import com.solacesystems.jcsmp.JCSMPException; -import com.solacesystems.jcsmp.Message; -import com.solacesystems.jcsmp.Queue; -import com.solacesystems.jcsmp.TextMessage; -import com.solacesystems.jcsmp.Topic; -import com.solacesystems.jcsmp.impl.AbstractDestination; import java.nio.ByteBuffer; +import java.time.Duration; import java.util.Arrays; import java.util.Properties; -import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; -public class SourceConnectorIT extends DockerizedPlatformSetupApache implements TestConstants { +@ExtendWith(NetworkPubSubPlusExtension.class) +@ExtendWith(KafkaArgumentsProvider.AutoDeleteSolaceConnectorDeploymentAfterEach.class) +public class SourceConnectorIT implements TestConstants { + + private Properties connectorProps; + private static final Logger LOG = LoggerFactory.getLogger(SourceConnectorIT.class); + static TestSolaceProducer solaceProducer; - static Logger logger = LoggerFactory.getLogger(SourceConnectorIT.class.getName()); - // Connectordeployment creates a Kafka topic "kafkaTestTopic", which is used - // next - static SolaceConnectorDeployment connectorDeployment = new SolaceConnectorDeployment(); - static TestKafkaConsumer kafkaConsumer = new TestKafkaConsumer(SolaceConnectorDeployment.kafkaTestTopic); - static TestSolaceProducer solaceProducer = new TestSolaceProducer(); - //////////////////////////////////////////////////// // Main setup/teardown @BeforeAll - static void setUp() { - connectorDeployment.waitForConnectorRestIFUp(); - connectorDeployment.provisionKafkaTestTopic(); - solaceProducer.setup(); - kafkaConsumer.run(); - try { - Thread.sleep(1000l); - } catch (InterruptedException e) { - e.printStackTrace(); - } + static void setUp(JCSMPSession jcsmpSession) throws Exception { + solaceProducer = new TestSolaceProducer(jcsmpSession); + solaceProducer.start(); + } + + @BeforeEach + public void beforeEach(JCSMPProperties jcsmpProperties) { + connectorProps = new Properties(); + connectorProps.setProperty(SolaceSourceConstants.SOL_HOST, String.format("tcp://%s:55555", NetworkPubSubPlusExtension.DOCKER_NET_PUBSUB_ALIAS)); + connectorProps.setProperty(SolaceSourceConstants.SOL_USERNAME, jcsmpProperties.getStringProperty(JCSMPProperties.USERNAME)); + connectorProps.setProperty(SolaceSourceConstants.SOL_PASSWORD, jcsmpProperties.getStringProperty(JCSMPProperties.PASSWORD)); + connectorProps.setProperty(SolaceSourceConstants.SOL_VPN_NAME, jcsmpProperties.getStringProperty(JCSMPProperties.VPN_NAME)); } @AfterAll static void cleanUp() { - kafkaConsumer.stop(); solaceProducer.close(); } //////////////////////////////////////////////////// // Test types - void messageToKafkaTest(Message msg, AbstractDestination destination, String expectedValue, Object expectedKey) { + void messageToKafkaTest(Message msg, AbstractDestination destination, String expectedValue, Object expectedKey, KafkaContext kafkaContext) { try { - // Clean catch queue first - // TODO: fix possible concurrency issue with cleaning/wring the queue later - TestKafkaConsumer.kafkaReceivedMessages.clear(); // Send Solace message if (destination instanceof Topic) { solaceProducer.sendMessageToTopic((Topic) destination, msg); @@ -69,11 +87,13 @@ void messageToKafkaTest(Message msg, AbstractDestination destination, String exp solaceProducer.sendMessageToQueue((Queue) destination, msg); } // Wait for Kafka to report message - ConsumerRecord record = TestKafkaConsumer.kafkaReceivedMessages.poll(5, TimeUnit.SECONDS); + ConsumerRecords records = kafkaContext.getConsumer().poll(Duration.ofSeconds(5)); + assertEquals(1, records.count()); + ConsumerRecord record = records.iterator().next(); // Evaluate message - assert (record != null); - logger.info("Kafka message received - Key=" + record.key() + ", Value=" + record.value()); - assert record.value().equals(expectedValue); + assertNotNull(record); + LOG.info("Kafka message received - Key=" + record.key() + ", Value=" + record.value()); + assertEquals(expectedValue, record.value()); // Check key if (expectedKey == null) { assert (record.key() == null); @@ -90,8 +110,6 @@ void messageToKafkaTest(Message msg, AbstractDestination destination, String exp } } catch (JCSMPException e1) { e1.printStackTrace(); - } catch (InterruptedException e) { - e.printStackTrace(); } } @@ -106,76 +124,86 @@ class SolaceConnectorSimpleMessageProcessorTests { //////////////////////////////////////////////////// // Scenarios - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic"); - prop.setProperty("sol.username", "test"); - prop.setProperty("sol.password", "test"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic"); + connectorProps.setProperty("sol.username", "test"); + connectorProps.setProperty("sol.password", "test"); } @DisplayName("TextMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("1-Hello TextMessageToTopicTest world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "1-Hello TextMessageToTopicTest world!", null); + "1-Hello TextMessageToTopicTest world!", null, kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(new byte[] { '2', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "2-Hello Topic world!", null); + "2-Hello Topic world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { '3', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "3-Hello attached world!", null); + "3-Hello attached world!", null, kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("4-Hello TextmessageToKafkaTest world!"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "4-Hello TextmessageToKafkaTest world!", null); + "4-Hello TextmessageToKafkaTest world!", null, kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(new byte[] { '5', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "5-Hello Queue world!", null); + "5-Hello Queue world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Queue-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { '6', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "6-Hello attached world!", null); + "6-Hello attached world!", null, kafkaContext); } } @@ -187,75 +215,85 @@ void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorNoneKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "NONE"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "NONE"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", null); + "Hello TextMessageToTopicTest1 world!", null, kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", null); + "Hello Topic world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello attached world!", null); + "Hello attached world!", null, kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", null); + "Hello TextmessageToKafkaTest world!", null, kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", null); + "Hello Queue world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello attached world!", null); + "Hello attached world!", null, kafkaContext); } } @@ -267,71 +305,81 @@ void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorDestinationKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "DESTINATION"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "DESTINATION"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", "TestTopic1/SubTopic"); + "Hello TextMessageToTopicTest1 world!", "TestTopic1/SubTopic", kafkaContext); } @DisplayName("TextMessage-Topic-wildcard-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest2() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest2(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest2 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic2/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest2 world!", "TestTopic2/SubTopic"); + "Hello TextMessageToTopicTest2 world!", "TestTopic2/SubTopic", kafkaContext); } @DisplayName("TextMessage-Topic-multi-level-wildcard-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest3() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest3(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest3 world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic3/SubTopic/SubSubTopic"), // expected value & key: - "Hello TextMessageToTopicTest3 world!", "TestTopic3/SubTopic/SubSubTopic"); + "Hello TextMessageToTopicTest3 world!", "TestTopic3/SubTopic/SubSubTopic", kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", "TestTopic1/SubTopic"); + "Hello Topic world!", "TestTopic1/SubTopic", kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", SOL_QUEUE); + "Hello TextmessageToKafkaTest world!", SOL_QUEUE, kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", SOL_QUEUE); + "Hello Queue world!", SOL_QUEUE, kafkaContext); } } @@ -342,57 +390,63 @@ void kafkaConsumerBytesmessageToKafkaTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorCorrelationIdKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "CORRELATION_ID"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "CORRELATION_ID"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); msg.setCorrelationId("test"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", "test"); + "Hello TextMessageToTopicTest1 world!", "test", kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test2"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", "test2"); + "Hello Topic world!", "test2", kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); msg.setCorrelationId("test3"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", "test3"); + "Hello TextmessageToKafkaTest world!", "test3", kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test4"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", "test4"); + "Hello Queue world!", "test4", kafkaContext); } } @@ -403,58 +457,64 @@ void kafkaConsumerBytesmessageToKafkaTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorCorrelationIdAsBytesKeyedMessageProcessorTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); - prop.setProperty("sol.kafka_message_key", "CORRELATION_ID_AS_BYTES"); - prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); - prop.setProperty("key.converter", "org.apache.kafka.connect.converters.ByteArrayConverter"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.kafka_message_key", "CORRELATION_ID_AS_BYTES"); + connectorProps.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); + connectorProps.setProperty("key.converter", "org.apache.kafka.connect.converters.ByteArrayConverter"); } @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); msg.setCorrelationId(new String(new byte[] { 1, 2, 3, 4 })); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest1 world!", new String(new byte[] { 1, 2, 3, 4 })); + "Hello TextMessageToTopicTest1 world!", new String(new byte[] { 1, 2, 3, 4 }), kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test2"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", "test2"); + "Hello Topic world!", "test2", kafkaContext); } @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerTextmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); msg.setCorrelationId("test3"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello TextmessageToKafkaTest world!", "test3"); + "Hello TextmessageToKafkaTest world!", "test3", kafkaContext); } @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") - @Test - void kafkaConsumerBytesmessageToKafkaTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerBytesmessageToKafkaTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); msg.setCorrelationId("test4"); messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), // expected value & key: - "Hello Queue world!", "test4"); + "Hello Queue world!", "test4", kafkaContext); } } @@ -466,46 +526,81 @@ void kafkaConsumerBytesmessageToKafkaTest() { @TestInstance(Lifecycle.PER_CLASS) class SolaceConnectorSharedSubscriptionsTests { - @BeforeAll + @BeforeEach void setUp() { solaceProducer.resetQueue(SOL_QUEUE); - Properties prop = new Properties(); - prop.setProperty("sol.message_processor_class", + connectorProps.setProperty("sol.message_processor_class", "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); - prop.setProperty("sol.topics", "#share/group1/TestTopic1/SubTopic"); - prop.setProperty("tasks.max", "5"); - connectorDeployment.startConnector(prop); + connectorProps.setProperty("sol.topics", "#share/group1/TestTopic1/SubTopic"); + connectorProps.setProperty("tasks.max", "5"); } @DisplayName("TextMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerTextMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest world!"); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello TextMessageToTopicTest world!", null); + "Hello TextMessageToTopicTest world!", null, kafkaContext); } @DisplayName("ByteMessage-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage( new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello Topic world!", null); + "Hello Topic world!", null, kafkaContext); } @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleSimpleMessageProcessor") - @Test - void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + @ParameterizedTest + @KafkaArgumentSource + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest(KafkaContext kafkaContext) { + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); BytesMessage msg = solaceProducer.createBytesMessage(null); msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), // expected value & key: - "Hello attached world!", null); + "Hello attached world!", null, kafkaContext); } } + + @DisplayName("Solace connector provisioning tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorProvisioningTests { + private final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); + + @BeforeEach + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + } + + @ParameterizedTest + @KafkaArgumentSource + void testFailPubSubConnection(KafkaContext kafkaContext) { + connectorProps.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); + connectorProps.setProperty("sol.vpn_name", RandomStringUtils.randomAlphanumeric(10)); + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps, true); + AtomicReference connectorStatus = new AtomicReference<>(new JsonObject()); + assertTimeoutPreemptively(Duration.ofMinutes(1), () -> { + JsonObject taskStatus; + do { + JsonObject status = kafkaContext.getSolaceConnectorDeployment().getConnectorStatus(); + connectorStatus.set(status); + taskStatus = status.getAsJsonArray("tasks").get(0).getAsJsonObject(); + } while (!taskStatus.get("state").getAsString().equals("FAILED")); + assertThat(taskStatus.get("trace").getAsString(), containsString("Message VPN Not Allowed")); + }, () -> "Timed out waiting for connector to fail: " + GSON.toJson(connectorStatus.get())); + } + } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java deleted file mode 100644 index 06b1c9d..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java +++ /dev/null @@ -1,64 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import java.io.FileReader; -import java.io.IOException; -import java.util.Properties; - -public class TestConfigProperties { - - static String testConfigPropertiesFile = "src/integrationTest/resources/manual-setup.properties"; - // This class helps determine the docker host's IP address and avoids getting "localhost" - static class DockerHost { - static public String getIpAddress() { - String dockerReportedAddress = MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA - .getServiceHost("kafka_1", 9092); - if (dockerReportedAddress == "localhost" || dockerReportedAddress == "127.0.0.1") { - return Tools.getIpAddress(); - } else { - return MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA - .getServiceHost("kafka_1", 9092); - } - } - } - - - private Properties properties = new Properties(); - - TestConfigProperties() { - try(FileReader fileReader = new FileReader(testConfigPropertiesFile)){ - properties.load(fileReader); - } catch (IOException e) { - e.printStackTrace(); - } - } - - String getProperty(String name) { - String configuredProperty = properties.getProperty(name); - if (configuredProperty != null) { - return configuredProperty; - } - switch(name) { - case "sol.host": - // No port here - return DockerHost.getIpAddress(); - - case "sol.username": - return "default"; - - case "sol.password": - return "default"; - - case "sol.vpn_name": - return "default"; - - case "kafka.connect_rest_url": - return (DockerHost.getIpAddress() + ":28083"); - - case "kafka.bootstrap_servers": - return (DockerHost.getIpAddress() + ":39092"); - - default: - return null; - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java index 22a21ec..7ff108c 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java @@ -1,30 +1,14 @@ package com.solace.connector.kafka.connect.source.it; +import com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor; + public interface TestConstants { + String UNZIPPEDCONNECTORDESTINATION = "src/integrationTest/resources"; + String CONNECTORJSONPROPERTIESFILE = "etc/solace_source_properties.json"; - public static final String PUBSUB_TAG = "latest"; - public static final String PUBSUB_HOSTNAME = "solbroker"; - public static final String PUBSUB_NETWORK_NAME = "solace_msg_network"; - public static final String FULL_DOCKER_COMPOSE_FILE_PATH = "src/integrationTest/resources/"; - public static final String[] SERVICES = new String[]{"solbroker"}; - public static final long MAX_STARTUP_TIMEOUT_MSEC = 120000l; - public static final String DIRECT_MESSAGING_HTTP_HEALTH_CHECK_URI = "/health-check/direct-active"; - public static final int DIRECT_MESSAGING_HTTP_HEALTH_CHECK_PORT = 5550; - public static final String GUARANTEED_MESSAGING_HTTP_HEALTH_CHECK_URI = "/health-check/guaranteed-active"; - public static final int GUARANTEED_MESSAGING_HTTP_HEALTH_CHECK_PORT = 5550; + String SOL_TOPICS = "pubsubplus-test-topic"; + String SOL_QUEUE = "pubsubplus-test-queue"; + String CONN_MSGPROC_CLASS = SolSampleSimpleMessageProcessor.class.getName(); + String CONN_KAFKA_MSGKEY = "DESTINATION"; - public static final String CONNECTORSOURCE = "build/distributions/pubsubplus-connector-kafka-source.zip"; - public static final String UNZIPPEDCONNECTORDESTINATION = "src/integrationTest/resources"; - public static final String CONNECTORPROPERTIESFILE = "etc/solace_source.properties"; - public static final String CONNECTORJSONPROPERTIESFILE = "etc/solace_source_properties.json"; - - public static final String SOL_ADMINUSER_NAME = "default"; - public static final String SOL_ADMINUSER_PW = "default"; - public static final String SOL_VPN = "default"; - public static final String KAFKA_SOURCE_TOPIC = "kafka-source-test-topic"; - public static final String SOL_TOPICS = "pubsubplus-test-topic"; - public static final String SOL_QUEUE = "pubsubplus-test-queue"; - public static final String CONN_MSGPROC_CLASS = "com.solace.source.connector.msgprocessors.SolSampleSimpleMessageProcessor"; - public static final String CONN_KAFKA_MSGKEY = "DESTINATION"; - } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java deleted file mode 100644 index ea7519f..0000000 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java +++ /dev/null @@ -1,117 +0,0 @@ -package com.solace.connector.kafka.connect.source.it; - -import org.apache.kafka.clients.consumer.ConsumerConfig; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.errors.WakeupException; -import org.apache.kafka.common.serialization.ByteBufferDeserializer; -import org.apache.kafka.common.serialization.StringDeserializer; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.time.Duration; -import java.util.Arrays; -import java.util.Properties; -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; - -public class TestKafkaConsumer { - - // Queue to communicate received messages - public static BlockingQueue> kafkaReceivedMessages = new ArrayBlockingQueue<>(10); - - private Runnable myConsumerRunnable; - private String kafkaTopic; - Logger logger = LoggerFactory.getLogger(TestKafkaConsumer.class.getName()); - CountDownLatch latch = new CountDownLatch(1); - - public TestKafkaConsumer(String kafkaTestTopic) { - kafkaTopic = kafkaTestTopic; - } - - public void run() { - String bootstrapServers = MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA.getServiceHost("kafka_1", 39092) - + ":39092"; - String groupId = "test"; - - // latch for dealing with multiple threads - - // create the consumer runnable - logger.info("Creating the consumer thread"); - myConsumerRunnable = new ConsumerRunnable(bootstrapServers, groupId, kafkaTopic, latch); - - // start the thread - Thread myThread = new Thread(myConsumerRunnable); - myThread.start(); - try { - latch.await(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - public void stop() { - logger.info("Stopping consumer"); - ((ConsumerRunnable) myConsumerRunnable).shutdown(); - try { - latch.await(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - logger.info("Consumer has been stoppped"); - } - - public class ConsumerRunnable implements Runnable { - - private CountDownLatch latch; - private KafkaConsumer consumer; - private Logger logger = LoggerFactory.getLogger(ConsumerRunnable.class.getName()); - - public ConsumerRunnable(String bootstrapServers, String groupId, String topic, CountDownLatch latch) { - this.latch = latch; - - // create consumer configs - Properties properties = new Properties(); - properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class.getName()); - properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); - properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId); - properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); - - // create consumer - consumer = new KafkaConsumer(properties); - // subscribe consumer to our topic(s) - consumer.subscribe(Arrays.asList(topic)); - } - - @Override - public void run() { - // poll for new data - try { - while (true) { - ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); - latch.countDown(); - for (ConsumerRecord record : records) { - kafkaReceivedMessages.put(record); - logger.info("Key: " + record.key() + ", Value: " + record.value()); - logger.info("Partition: " + record.partition() + ", Offset:" + record.offset()); - } - } - } catch (WakeupException e) { - logger.info("Received shutdown signal!"); - } catch (InterruptedException e) { - e.printStackTrace(); - } finally { - consumer.close(); - } - } - - public void shutdown() { - // the wakeup() method is a special method to interrupt consumer.poll() - // it will throw the exception WakeUpException - consumer.wakeup(); - } - } -} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java index f29a946..e2a3c34 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java @@ -1,54 +1,45 @@ package com.solace.connector.kafka.connect.source.it; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.solacesystems.jcsmp.BytesMessage; import com.solacesystems.jcsmp.DeliveryMode; import com.solacesystems.jcsmp.EndpointProperties; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; -import com.solacesystems.jcsmp.JCSMPProperties; import com.solacesystems.jcsmp.JCSMPSession; -import com.solacesystems.jcsmp.JCSMPStreamingPublishEventHandler; +import com.solacesystems.jcsmp.JCSMPStreamingPublishCorrelatingEventHandler; import com.solacesystems.jcsmp.Message; import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.TextMessage; import com.solacesystems.jcsmp.Topic; import com.solacesystems.jcsmp.XMLMessageProducer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -public class TestSolaceProducer { - - static Logger logger = LoggerFactory.getLogger(SourceConnectorIT.class.getName()); - private JCSMPSession session; +public class TestSolaceProducer implements AutoCloseable { + + private static final Logger logger = LoggerFactory.getLogger(TestSolaceProducer.class); + private final JCSMPSession session; private XMLMessageProducer producer; - public void setup() { - TestConfigProperties configProps = new TestConfigProperties(); - final JCSMPProperties properties = new JCSMPProperties(); - properties.setProperty(JCSMPProperties.HOST, "tcp://" + configProps.getProperty("sol.host") + ":55555"); // host:port - properties.setProperty(JCSMPProperties.USERNAME, configProps.getProperty("sol.username")); // client-username - properties.setProperty(JCSMPProperties.VPN_NAME, configProps.getProperty("sol.vpn_name")); // message-vpn - properties.setProperty(JCSMPProperties.PASSWORD, configProps.getProperty("sol.password")); // client-password - try { - session = JCSMPFactory.onlyInstance().createSession(properties); - session.connect(); - producer = session.getMessageProducer(new JCSMPStreamingPublishEventHandler() { - @Override - public void responseReceived(String messageID) { - logger.info("Producer received response for msg: " + messageID); - } - @Override - public void handleError(String messageID, JCSMPException e, long timestamp) { - logger.info("Producer received error for msg: %s@%s - %s%n", - messageID,timestamp,e); - } - }); - } catch (JCSMPException e1) { - e1.printStackTrace(); - } + public TestSolaceProducer(JCSMPSession session) { + this.session = session; } - + + + public void start() throws JCSMPException { + producer = session.getMessageProducer(new JCSMPStreamingPublishCorrelatingEventHandler() { + @Override + public void responseReceivedEx(Object correlationKey) { + logger.info("Producer received response for msg: " + correlationKey); + } + + @Override + public void handleErrorEx(Object correlationKey, JCSMPException e, long timestamp) { + logger.error("Producer received error for msg: {} {}", correlationKey, timestamp, e); + } + }); + } + public TextMessage createTextMessage(String contents) { TextMessage textMessage = JCSMPFactory.onlyInstance().createMessage(TextMessage.class); textMessage.setText(contents); @@ -60,20 +51,20 @@ public BytesMessage createBytesMessage(byte[] contents) { bytesMessage.setData(contents); return bytesMessage; } - + public Topic defineTopic(String topicName) { return JCSMPFactory.onlyInstance().createTopic(topicName); } - + public Queue defineQueue(String queueName) { return JCSMPFactory.onlyInstance().createQueue(queueName); } - + public void sendMessageToTopic(Topic topic, Message msg) throws JCSMPException { producer.send(msg,topic); logger.info("Message sent to Solace topic " + topic.toString()); } - + public void resetQueue(String queueName) { try { final Queue queue = JCSMPFactory.onlyInstance().createQueue(queueName); @@ -89,14 +80,15 @@ public void resetQueue(String queueName) { e.printStackTrace(); } } - + public void sendMessageToQueue(Queue queue, Message msg) throws JCSMPException { msg.setDeliveryMode(DeliveryMode.PERSISTENT); producer.send(msg,queue); logger.info("Message sent to Solace queue " + queue.toString()); } - + + @Override public void close() { - session.closeSession(); + producer.close(); } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java index ed08533..9b604a6 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java @@ -1,39 +1,21 @@ package com.solace.connector.kafka.connect.source.it; +import com.solace.connector.kafka.connect.source.VersionUtil; + import java.io.IOException; -import java.net.InterfaceAddress; -import java.net.NetworkInterface; -import java.net.SocketException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; public class Tools { - static public String getIpAddress() { - Set HostAddresses = new HashSet<>(); - try { - for (NetworkInterface ni : Collections.list(NetworkInterface.getNetworkInterfaces())) { - if (!ni.isLoopback() && ni.isUp() && ni.getHardwareAddress() != null) { - for (InterfaceAddress ia : ni.getInterfaceAddresses()) { - if (ia.getBroadcast() != null) { //If limited to IPV4 - HostAddresses.add(ia.getAddress().getHostAddress()); - } - } - } - } - } catch (SocketException e) { } - return (String) HostAddresses.toArray()[0]; - } static public String getUnzippedConnectorDirName() { String connectorUnzippedPath = null; try { DirectoryStream dirs = Files.newDirectoryStream( - Paths.get(TestConstants.UNZIPPEDCONNECTORDESTINATION), "pubsubplus-connector-kafka-*"); + Paths.get(TestConstants.UNZIPPEDCONNECTORDESTINATION), + "pubsubplus-connector-kafka-source-" + VersionUtil.getVersion()); for (Path entry: dirs) { connectorUnzippedPath = entry.toString(); break; //expecting only one diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/KafkaConnection.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/KafkaConnection.java new file mode 100644 index 0000000..872aa74 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/KafkaConnection.java @@ -0,0 +1,52 @@ +package com.solace.connector.kafka.connect.source.it.util; + +import org.testcontainers.containers.GenericContainer; + +import java.util.Objects; + +public class KafkaConnection { + private final String bootstrapServers; + private final String connectUrl; + private final GenericContainer kafkaContainer; + private final GenericContainer connectContainer; + + public KafkaConnection(String bootstrapServers, String connectUrl, GenericContainer kafkaContainer, + GenericContainer connectContainer) { + this.bootstrapServers = bootstrapServers; + this.connectUrl = connectUrl; + this.kafkaContainer = kafkaContainer; + this.connectContainer = connectContainer; + } + + public String getBootstrapServers() { + return bootstrapServers; + } + + public String getConnectUrl() { + return connectUrl; + } + + public GenericContainer getKafkaContainer() { + return kafkaContainer; + } + + public GenericContainer getConnectContainer() { + return connectContainer; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + KafkaConnection that = (KafkaConnection) o; + return Objects.equals(bootstrapServers, that.bootstrapServers) && + Objects.equals(connectUrl, that.connectUrl) && + Objects.equals(kafkaContainer, that.kafkaContainer) && + Objects.equals(connectContainer, that.connectContainer); + } + + @Override + public int hashCode() { + return Objects.hash(bootstrapServers, connectUrl, kafkaContainer, connectContainer); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/KafkaArgumentsProvider.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/KafkaArgumentsProvider.java new file mode 100644 index 0000000..2b022b3 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/KafkaArgumentsProvider.java @@ -0,0 +1,406 @@ +package com.solace.connector.kafka.connect.source.it.util.extensions; + +import com.solace.connector.kafka.connect.source.it.SolaceConnectorDeployment; +import com.solace.connector.kafka.connect.source.it.util.KafkaConnection; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.BitnamiKafkaConnectContainer; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.ConfluentKafkaConnectContainer; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.ConfluentKafkaControlCenterContainer; +import com.solace.connector.kafka.connect.source.it.util.testcontainers.ConfluentKafkaSchemaRegistryContainer; +import org.apache.commons.lang3.RandomStringUtils; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.DeleteTopicsResult; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.errors.UnknownTopicOrPartitionException; +import org.apache.kafka.common.serialization.ByteBufferDeserializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.junit.jupiter.api.extension.AfterEachCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ExtensionContext.Namespace; +import org.junit.jupiter.api.extension.ExtensionContext.Store.CloseableResource; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.ArgumentsProvider; +import org.junit.jupiter.params.provider.ArgumentsSource; +import org.junitpioneer.jupiter.CartesianAnnotationConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.utility.DockerImageName; + +import java.io.IOException; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.util.Collections; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.stream.Stream; + +public class KafkaArgumentsProvider implements ArgumentsProvider, + CartesianAnnotationConsumer { + private static final Logger LOG = LoggerFactory.getLogger(KafkaArgumentsProvider.class); + + @Override + public Stream provideArguments(ExtensionContext context) { + KafkaConnection bitnamiCxn = context.getRoot() + .getStore(KafkaNamespace.BITNAMI.getNamespace()) + .getOrComputeIfAbsent(BitnamiResource.class, c -> { + LOG.info("Creating Bitnami Kafka"); + BitnamiKafkaConnectContainer container = new BitnamiKafkaConnectContainer() + .withNetwork(NetworkPubSubPlusExtension.DOCKER_NET); + if (!container.isCreated()) { + container.start(); + } + return new BitnamiResource(container); + }, BitnamiResource.class) + .getKafkaConnection(); + + KafkaConnection confluentCxn = context.getRoot() + .getStore(KafkaNamespace.CONFLUENT.getNamespace()) + .getOrComputeIfAbsent(ConfluentResource.class, c -> { + LOG.info("Creating Confluent Kafka"); + KafkaContainer kafkaContainer = new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka") + .withTag("6.2.1")) + .withNetwork(NetworkPubSubPlusExtension.DOCKER_NET) + .withNetworkAliases("kafka"); + if (!kafkaContainer.isCreated()) { + kafkaContainer.start(); + } + + ConfluentKafkaSchemaRegistryContainer schemaRegistryContainer = + new ConfluentKafkaSchemaRegistryContainer(kafkaContainer) + .withNetworkAliases("schema-registry"); + if (!schemaRegistryContainer.isCreated()) { + schemaRegistryContainer.start(); + } + + ConfluentKafkaControlCenterContainer controlCenterContainer = + new ConfluentKafkaControlCenterContainer(kafkaContainer, schemaRegistryContainer); + if (!controlCenterContainer.isCreated()) { + controlCenterContainer.start(); + } + + ConfluentKafkaConnectContainer connectContainer = + new ConfluentKafkaConnectContainer(kafkaContainer, schemaRegistryContainer); + if (!connectContainer.isCreated()) { + connectContainer.start(); + } + return new ConfluentResource( + new KafkaContainerResource<>(kafkaContainer), + new KafkaContainerResource<>(schemaRegistryContainer), + new KafkaContainerResource<>(controlCenterContainer), + new KafkaContainerResource<>(connectContainer)); + }, ConfluentResource.class) + .getKafkaConnection(); + + return Stream.of( + Arguments.of(createKafkaContext(bitnamiCxn, KafkaNamespace.BITNAMI, context)), + Arguments.of(createKafkaContext(confluentCxn, KafkaNamespace.CONFLUENT, context)) + ); + } + + private KafkaContext createKafkaContext(KafkaConnection connection, KafkaNamespace namespace, + ExtensionContext context) { + AdminClient adminClient = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(AdminClientResource.class, c -> { + LOG.info("Creating Kafka admin client for {}", connection.getBootstrapServers()); + Properties properties = new Properties(); + properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, connection.getBootstrapServers()); + AdminClient newAdminClient = AdminClient.create(properties); + return new AdminClientResource(newAdminClient); + }, AdminClientResource.class) + .getAdminClient(); + + String kafkaTopic = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(TopicResource.class, c -> { + String topicName = RandomStringUtils.randomAlphanumeric(100); + LOG.info("Creating Kafka topic {}", topicName); + try { + adminClient.createTopics(Collections.singleton(new NewTopic(topicName, 5, (short) 1))) + .all().get(5, TimeUnit.SECONDS); + } catch (InterruptedException | ExecutionException | TimeoutException e) { + throw new RuntimeException(e); + } + return new TopicResource(topicName, adminClient); + }, TopicResource.class) + .getTopicName(); + + SolaceConnectorDeployment connectorDeployment = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(ConnectorDeploymentResource.class, c -> { + SolaceConnectorDeployment deployment = new SolaceConnectorDeployment(connection, kafkaTopic); + deployment.waitForConnectorRestIFUp(); + return new ConnectorDeploymentResource(deployment); + }, ConnectorDeploymentResource.class) + .getDeployment(); + + KafkaConsumer consumer = context.getRoot() + .getStore(namespace.getNamespace()) + .getOrComputeIfAbsent(ConsumerResource.class, c -> { + LOG.info("Creating Kafka consumer for {}", connection.getBootstrapServers()); + Properties properties = new Properties(); + properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, connection.getBootstrapServers()); + properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class + .getName()); + properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class + .getName()); + properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, RandomStringUtils.randomAlphanumeric(50)); + properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + + // create consumer + KafkaConsumer newConsumer = new KafkaConsumer<>(properties); + // subscribe consumer to our topic(s) + newConsumer.subscribe(Collections.singleton(kafkaTopic)); + return new ConsumerResource(newConsumer); + }, ConsumerResource.class) + .getConsumer(); + + return new KafkaContext(namespace, connection, adminClient, connectorDeployment, consumer); + } + + @Override + public void accept(KafkaArgumentSource kafkaArgumentSource) { + + } + + @Target(ElementType.METHOD) + @Retention(RetentionPolicy.RUNTIME) + @ArgumentsSource(KafkaArgumentsProvider.class) + public @interface KafkaArgumentSource { + + } + + public static class AutoDeleteSolaceConnectorDeploymentAfterEach implements AfterEachCallback { + @Override + public void afterEach(ExtensionContext context) throws Exception { + for (KafkaNamespace namespace : KafkaNamespace.values()) { + ConnectorDeploymentResource deploymentResource = context.getRoot() + .getStore(namespace.getNamespace()) + .get(ConnectorDeploymentResource.class, ConnectorDeploymentResource.class); + if (deploymentResource != null) { + deploymentResource.close(); + } + } + } + } + + public static class KafkaContext { + private final KafkaNamespace namespace; + private final KafkaConnection connection; + private final AdminClient adminClient; + private final SolaceConnectorDeployment solaceConnectorDeployment; + private final KafkaConsumer consumer; + + private KafkaContext(KafkaNamespace namespace, KafkaConnection connection, AdminClient adminClient, + SolaceConnectorDeployment solaceConnectorDeployment, + KafkaConsumer consumer) { + this.namespace = namespace; + this.connection = connection; + this.consumer = consumer; + this.solaceConnectorDeployment = solaceConnectorDeployment; + this.adminClient = adminClient; + } + + public KafkaConnection getConnection() { + return connection; + } + + public AdminClient getAdminClient() { + return adminClient; + } + + public SolaceConnectorDeployment getSolaceConnectorDeployment() { + return solaceConnectorDeployment; + } + + public KafkaConsumer getConsumer() { + return consumer; + } + + @Override + public String toString() { + return namespace.name(); + } + } + + private static class ConsumerResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(ConsumerResource.class); + private final KafkaConsumer consumer; + + private ConsumerResource(KafkaConsumer consumer) { + this.consumer = consumer; + } + + + public KafkaConsumer getConsumer() { + return consumer; + } + + @Override + public void close() { + LOG.info("Closing Kafka consumer"); + consumer.close(); + } + } + + private static class TopicResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(TopicResource.class); + private final String topicName; + private final AdminClient adminClient; + + private TopicResource(String topicName, AdminClient adminClient) { + this.topicName = topicName; + this.adminClient = adminClient; + } + + public String getTopicName() { + return topicName; + } + + @Override + public void close() throws Throwable { + LOG.info("Deleting Kafka topic {}", topicName); + DeleteTopicsResult result = adminClient.deleteTopics(Collections.singleton(topicName)); + for (Map.Entry> entry : result.values().entrySet()) { + try { + entry.getValue().get(1, TimeUnit.MINUTES); + } catch (ExecutionException e) { + if (!(e.getCause() instanceof UnknownTopicOrPartitionException)) { + throw e; + } + } + } + } + } + + private static class AdminClientResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(AdminClientResource.class); + private final AdminClient adminClient; + + private AdminClientResource(AdminClient adminClient) { + this.adminClient = adminClient; + } + + public AdminClient getAdminClient() { + return adminClient; + } + + @Override + public void close() { + LOG.info("Closing Kafka admin client"); + adminClient.close(); + } + } + + private static class ConnectorDeploymentResource implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(ConnectorDeploymentResource.class); + private final SolaceConnectorDeployment deployment; + + private ConnectorDeploymentResource(SolaceConnectorDeployment deployment) { + this.deployment = deployment; + } + + public SolaceConnectorDeployment getDeployment() { + return deployment; + } + + @Override + public void close() throws IOException { + LOG.info("Closing Kafka connector deployment"); + deployment.deleteConnector(); + } + } + + private static class BitnamiResource extends KafkaContainerResource { + + private BitnamiResource(BitnamiKafkaConnectContainer container) { + super(container); + } + + public KafkaConnection getKafkaConnection() { + return new KafkaConnection(getContainer().getBootstrapServers(), getContainer().getConnectUrl(), + getContainer(), getContainer()); + } + } + + private static class ConfluentResource implements CloseableResource { + private final KafkaContainerResource kafka; + private final KafkaContainerResource schemaRegistry; + private final KafkaContainerResource controlCenter; + private final KafkaContainerResource connect; + + private ConfluentResource(KafkaContainerResource kafka, + KafkaContainerResource schemaRegistry, + KafkaContainerResource controlCenter, + KafkaContainerResource connect) { + this.kafka = kafka; + this.schemaRegistry = schemaRegistry; + this.controlCenter = controlCenter; + this.connect = connect; + } + + public KafkaConnection getKafkaConnection() { + return new KafkaConnection(kafka.getContainer().getBootstrapServers(), + connect.getContainer().getConnectUrl(), kafka.container, connect.container); + } + + public KafkaContainerResource getKafka() { + return kafka; + } + + public KafkaContainerResource getConnect() { + return connect; + } + + @Override + public void close() { + connect.close(); + controlCenter.close(); + schemaRegistry.close(); + kafka.close(); + } + } + + private static class KafkaContainerResource> implements CloseableResource { + private static final Logger LOG = LoggerFactory.getLogger(KafkaContainerResource.class); + private final T container; + + private KafkaContainerResource(T container) { + this.container = container; + } + + public T getContainer() { + return container; + } + + @Override + public void close() { + LOG.info("Closing container {}", container.getContainerName()); + container.close(); + } + } + + private enum KafkaNamespace { + BITNAMI, CONFLUENT; + + private final Namespace namespace; + + KafkaNamespace() { + this.namespace = Namespace.create(KafkaArgumentsProvider.class, name()); + } + + public Namespace getNamespace() { + return namespace; + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/NetworkPubSubPlusExtension.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/NetworkPubSubPlusExtension.java new file mode 100644 index 0000000..9e58cee --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/extensions/NetworkPubSubPlusExtension.java @@ -0,0 +1,20 @@ +package com.solace.connector.kafka.connect.source.it.util.extensions; + +import com.solace.test.integration.junit.jupiter.extension.PubSubPlusExtension; +import com.solace.test.integration.testcontainer.PubSubPlusContainer; +import org.testcontainers.containers.Network; + +public class NetworkPubSubPlusExtension extends PubSubPlusExtension { + public static final Network DOCKER_NET = Network.newNetwork(); + public static final String DOCKER_NET_PUBSUB_ALIAS = "solace-pubsubplus"; + + public NetworkPubSubPlusExtension() { + super(() -> new PubSubPlusContainer() + .withNetwork(DOCKER_NET) + .withNetworkAliases(DOCKER_NET_PUBSUB_ALIAS)); + } + + public Network getDockerNetwork() { + return DOCKER_NET; + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/BitnamiKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/BitnamiKafkaConnectContainer.java new file mode 100644 index 0000000..81a4aef --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/BitnamiKafkaConnectContainer.java @@ -0,0 +1,145 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import com.github.dockerjava.api.command.InspectContainerResponse; +import com.solace.connector.kafka.connect.source.SolaceSourceTask; +import com.solace.connector.kafka.connect.source.it.Tools; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.images.builder.Transferable; +import org.testcontainers.utility.DockerImageName; + +import java.nio.charset.StandardCharsets; +import java.util.Comparator; + +public class BitnamiKafkaConnectContainer extends GenericContainer { + private static final String BROKER_LISTENER_NAME = "PLAINTEXT"; + private static final int BROKER_LISTENER_PORT = 9092; + private static final String BOOTSTRAP_LISTENER_NAME = "PLAINTEXT_HOST"; + public static final int BOOTSTRAP_LISTENER_PORT = 29092; + public static final int CONNECT_PORT = 28083; + private static final int ZOOKEEPER_PORT = 2181; + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("bitnami/kafka"); + private static final String DEFAULT_IMAGE_TAG = "2"; + private static final String STARTER_SCRIPT = "/testcontainers_start.sh"; + private DockerImageName zookeeperDockerImageName = DockerImageName.parse("bitnami/zookeeper:3"); + private GenericContainer zookeeperContainer; + + public BitnamiKafkaConnectContainer() { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG)); + } + + public BitnamiKafkaConnectContainer(String dockerImageName) { + this(DockerImageName.parse(dockerImageName)); + } + + public BitnamiKafkaConnectContainer(DockerImageName dockerImageName) { + super(dockerImageName); + + withNetwork(Network.newNetwork()); + withExposedPorts(CONNECT_PORT, BROKER_LISTENER_PORT, BOOTSTRAP_LISTENER_PORT); + withEnv("KAFKA_CFG_BROKER_ID", "1"); + withEnv("ALLOW_PLAINTEXT_LISTENER", "yes"); + withEnv("KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP", String.join(",", + BROKER_LISTENER_NAME + ":PLAINTEXT", BOOTSTRAP_LISTENER_NAME + ":PLAINTEXT")); + withEnv("KAFKA_CFG_LISTENERS", String.join(",", + BROKER_LISTENER_NAME + "://:" + BROKER_LISTENER_PORT, BOOTSTRAP_LISTENER_NAME + "://:" + BOOTSTRAP_LISTENER_PORT)); + withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", + "/opt/bitnami/kafka/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY); + waitingFor(Wait.forLogMessage(".*Finished starting connectors and tasks.*", 1)); + } + + @Override + public void start() { + if (zookeeperDockerImageName != null) { + String zookeeperNetworkAlias = "zookeeper"; + zookeeperContainer = new GenericContainer<>(zookeeperDockerImageName) + .withNetwork(getNetwork()) + .withNetworkAliases(zookeeperNetworkAlias) + .withEnv("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_PORT)) + .withEnv("ZOOKEEPER_TICK_TIME", "2000") + .withEnv("ALLOW_ANONYMOUS_LOGIN", "yes"); + dependsOn(zookeeperContainer); + withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", String.format("%s:%s", zookeeperNetworkAlias, ZOOKEEPER_PORT)); + } + super.start(); + } + + @Override + protected void doStart() { + // Delay starting Kafka until after container has started + withCommand("sh", "-c", "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + STARTER_SCRIPT); + super.doStart(); + } + + @Override + protected void containerIsStarting(InspectContainerResponse containerInfo) { + String command = "/bin/sh\n" + + "set -e\n" + + "echo 'plugin.path=/opt/bitnami/kafka/jars' >> /opt/bitnami/kafka/config/connect-distributed.properties\n" + + "echo 'rest.port=" + CONNECT_PORT + "' >> /opt/bitnami/kafka/config/connect-distributed.properties\n" + + "echo 'log4j.logger.org.apache.kafka.connect.runtime.WorkerSinkTask=DEBUG' >> /opt/bitnami/kafka/config/connect-log4j.properties\n" + + "echo 'log4j.logger." + SolaceSourceTask.class.getName() + "=TRACE' >> /opt/bitnami/kafka/config/connect-log4j.properties\n" + + "export KAFKA_CFG_ADVERTISED_LISTENERS=" + advertisedListeners(containerInfo) + "\n" + + "/opt/bitnami/scripts/kafka/setup.sh\n" + + "/opt/bitnami/scripts/kafka/run.sh &\n" + + "/opt/bitnami/kafka/bin/connect-distributed.sh /opt/bitnami/kafka/config/connect-distributed.properties\n"; + copyFileToContainer(Transferable.of(command.getBytes(StandardCharsets.UTF_8), 0777), STARTER_SCRIPT); + super.containerIsStarting(containerInfo); + } + + @Override + public void close() { + super.close(); + if (zookeeperContainer != null) { + zookeeperContainer.close(); + } + } + + public String getBootstrapServers() { + return String.format("%s:%s", getHost(), getMappedPort(BitnamiKafkaConnectContainer.BOOTSTRAP_LISTENER_PORT)); + } + + public String getConnectUrl() { + return String.format("http://%s:%s", getHost(), getMappedPort(BitnamiKafkaConnectContainer.CONNECT_PORT)); + } + + public BitnamiKafkaConnectContainer withZookeeper(DockerImageName dockerImageName) { + zookeeperDockerImageName = dockerImageName; + return this; + } + + private String advertisedListeners(InspectContainerResponse containerInfo) { + return String.join(",", + String.format("%s://%s:%s", BROKER_LISTENER_NAME, getExternalIpAddress(containerInfo), BROKER_LISTENER_PORT), + String.format("%s://%s:%s", BOOTSTRAP_LISTENER_NAME, getHost(), getMappedPort(BOOTSTRAP_LISTENER_PORT))); + } + + /** + * @see org.testcontainers.containers.KafkaContainer + */ + private String getExternalIpAddress(InspectContainerResponse containerInfo) { + // Kafka supports only one INTER_BROKER listener, so we have to pick one. + // The current algorithm uses the following order of resolving the IP: + // 1. Custom network's IP set via `withNetwork` + // 2. Bridge network's IP + // 3. Best effort fallback to getNetworkSettings#ipAddress + return containerInfo.getNetworkSettings().getNetworks().entrySet() + .stream() + .filter(it -> it.getValue().getIpAddress() != null) + .max(Comparator.comparingInt(entry -> { + if (getNetwork().getId().equals(entry.getValue().getNetworkID())) { + return 2; + } + + if ("bridge".equals(entry.getKey())) { + return 1; + } + + return 0; + })) + .map(it -> it.getValue().getIpAddress()) + .orElseGet(() -> containerInfo.getNetworkSettings().getIpAddress()); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaConnectContainer.java new file mode 100644 index 0000000..b882e0a --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaConnectContainer.java @@ -0,0 +1,63 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import com.solace.connector.kafka.connect.source.it.Tools; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.DockerImageName; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ConfluentKafkaConnectContainer extends GenericContainer { + public static final int CONNECT_PORT = 28083; + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-kafka-connect-base"); + private static final String DEFAULT_IMAGE_TAG = "6.2.1"; + + public ConfluentKafkaConnectContainer(KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG), kafka, schemaRegistry); + } + + public ConfluentKafkaConnectContainer(DockerImageName dockerImageName, + KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + super(dockerImageName); + assertThat(kafka.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertThat(schemaRegistry.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertEquals(kafka.getNetwork(), schemaRegistry.getNetwork()); + + dependsOn(kafka, schemaRegistry); + withNetwork(kafka.getNetwork()); + withExposedPorts(CONNECT_PORT); + withEnv("CONNECT_REST_PORT", Integer.toString(CONNECT_PORT)); + withEnv("CONNECT_GROUP_ID", "quickstart-avro"); + withEnv("CONNECT_CONFIG_STORAGE_TOPIC", "quickstart-avro-config"); + withEnv("CONNECT_OFFSET_STORAGE_TOPIC", "quickstart-avro-offsets"); + withEnv("CONNECT_STATUS_STORAGE_TOPIC", "quickstart-avro-status"); + withEnv("CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR", "1"); + withEnv("CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR", "1"); + withEnv("CONNECT_STATUS_STORAGE_REPLICATION_FACTOR", "1"); + withEnv("CONNECT_KEY_CONVERTER", "io.confluent.connect.avro.AvroConverter"); + withEnv("CONNECT_VALUE_CONVERTER", "io.confluent.connect.avro.AvroConverter"); + withEnv("CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL", String.format("http://%s:%s", + schemaRegistry.getNetworkAliases().get(1), ConfluentKafkaSchemaRegistryContainer.REGISTRY_PORT)); + withEnv("CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL", String.format("http://%s:%s", + schemaRegistry.getNetworkAliases().get(1), ConfluentKafkaSchemaRegistryContainer.REGISTRY_PORT)); + withEnv("CONNECT_BOOTSTRAP_SERVERS", String.format("%s:9092", kafka.getNetworkAliases().get(1))); + withEnv("CONNECT_INTERNAL_KEY_CONVERTER", "org.apache.kafka.connect.json.JsonConverter"); + withEnv("CONNECT_INTERNAL_VALUE_CONVERTER", "org.apache.kafka.connect.json.JsonConverter"); + withEnv("CONNECT_REST_ADVERTISED_HOST_NAME", "localhost"); + withEnv("CONNECT_LOG4J_ROOT_LOGLEVEL", "INFO"); + withEnv("CONNECT_PLUGIN_PATH", "/usr/share/java,/etc/kafka-connect/jars"); + withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", + "/etc/kafka-connect/jars", BindMode.READ_ONLY); + waitingFor( Wait.forLogMessage(".*Kafka Connect started.*", 1) ); + } + + public String getConnectUrl() { + return String.format("http://%s:%s", getHost(), getMappedPort(ConfluentKafkaConnectContainer.CONNECT_PORT)); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaControlCenterContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaControlCenterContainer.java new file mode 100644 index 0000000..f7ba130 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaControlCenterContainer.java @@ -0,0 +1,40 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.utility.DockerImageName; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class ConfluentKafkaControlCenterContainer extends GenericContainer { + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-enterprise-control-center"); + private static final String DEFAULT_IMAGE_TAG = "6.2.1"; + + public ConfluentKafkaControlCenterContainer(KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG), kafka, schemaRegistry); + } + + public ConfluentKafkaControlCenterContainer(DockerImageName dockerImageName, + KafkaContainer kafka, + ConfluentKafkaSchemaRegistryContainer schemaRegistry) { + super(dockerImageName); + assertThat(kafka.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertThat(schemaRegistry.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + assertEquals(kafka.getNetwork(), schemaRegistry.getNetwork()); + + dependsOn(kafka, schemaRegistry); + withNetwork(kafka.getNetwork()); + withEnv("CONTROL_CENTER_REPLICATION_FACTOR", "1"); + withEnv("CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS", "1"); + withEnv("CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS", "1"); + withEnv("CONFLUENT_METRICS_TOPIC_REPLICATION", "1"); + withEnv("CONTROL_CENTER_SCHEMA_REGISTRY_URL", String.format("http://%s:%s", + schemaRegistry.getNetworkAliases().get(1), ConfluentKafkaSchemaRegistryContainer.REGISTRY_PORT)); + withEnv("CONTROL_CENTER_BOOTSTRAP_SERVERS", String.format("%s:9092", kafka.getNetworkAliases().get(1))); + withEnv("CONTROL_CENTER_ZOOKEEPER_CONNECT", String.format("%s:%s", kafka.getNetworkAliases().get(1), + KafkaContainer.ZOOKEEPER_PORT)); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaSchemaRegistryContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaSchemaRegistryContainer.java new file mode 100644 index 0000000..6740f0d --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/util/testcontainers/ConfluentKafkaSchemaRegistryContainer.java @@ -0,0 +1,40 @@ +package com.solace.connector.kafka.connect.source.it.util.testcontainers; + +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.utility.DockerImageName; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +public class ConfluentKafkaSchemaRegistryContainer extends GenericContainer { + public static final int REGISTRY_PORT = 8081; + private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("confluentinc/cp-schema-registry"); + private static final String DEFAULT_IMAGE_TAG = "6.2.1"; + + public ConfluentKafkaSchemaRegistryContainer(KafkaContainer kafka) { + this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_IMAGE_TAG), kafka); + } + + public ConfluentKafkaSchemaRegistryContainer(DockerImageName dockerImageName, KafkaContainer kafka) { + super(dockerImageName); + + assertNotNull(kafka.getNetwork()); + assertThat(kafka.getNetworkAliases().size(), greaterThanOrEqualTo(2)); + + dependsOn(kafka); + withNetwork(kafka.getNetwork()); + withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:" + REGISTRY_PORT); + withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", String.format("PLAINTEXT://%s:9092", + kafka.getNetworkAliases().get(1))); + waitingFor(Wait.forHttp("/subjects").forStatusCode(200)); + } + + @Override + protected void doStart() { + withEnv("SCHEMA_REGISTRY_HOST_NAME", getNetworkAliases().size() > 1 ? getNetworkAliases().get(1) : getHost()); + super.doStart(); + } +} diff --git a/src/integrationTest/resources/docker-compose-kafka-apache.yml b/src/integrationTest/resources/docker-compose-kafka-apache.yml deleted file mode 100644 index afa48bf..0000000 --- a/src/integrationTest/resources/docker-compose-kafka-apache.yml +++ /dev/null @@ -1,29 +0,0 @@ -version: '3' - -services: - zookeeper: - image: bitnami/zookeeper:3 - ports: - - 2181:2181 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - ALLOW_ANONYMOUS_LOGIN: 'yes' - kafka: - image: bitnami/kafka:2 - ports: - - 9092:9092 - - 29092:29092 - - 39092:39092 - environment: - KAFKA_CFG_BROKER_ID: 1 - KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 - ALLOW_PLAINTEXT_LISTENER: 'yes' - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAINTEXT_EXTHOST:PLAINTEXT - KAFKA_CFG_LISTENERS: PLAINTEXT://:9092,PLAINTEXT_HOST://:29092,PLAINTEXT_EXTHOST://:39092 - KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 -# KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 -# KAFKA_CFG_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 -# KAFKA_CFG_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - depends_on: - - zookeeper diff --git a/src/integrationTest/resources/docker-compose-kafka-confluent.yml b/src/integrationTest/resources/docker-compose-kafka-confluent.yml deleted file mode 100644 index 2982b01..0000000 --- a/src/integrationTest/resources/docker-compose-kafka-confluent.yml +++ /dev/null @@ -1,71 +0,0 @@ -version: '3' - -services: - zookeeper: - image: confluentinc/cp-zookeeper:5.4.0 - ports: - - 2181:2181 - environment: - ZOOKEEPER_CLIENT_PORT: 2181 - ZOOKEEPER_TICK_TIME: 2000 - kafka: - image: confluentinc/cp-kafka:5.4.0 - ports: - - 9092:9092 - - 29092:29092 - - 39092:39092 - environment: - KAFKA_BROKER_ID: 1 - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAINTEXT_EXTHOST:PLAINTEXT - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 - KAFKA_TOPIC: $KAFKA_TOPIC - depends_on: - - zookeeper - kafka-setup: - image: confluentinc/cp-kafka:5.4.0 - hostname: kafka-setup - depends_on: - - kafka - - zookeeper - command: "bash -c 'echo Waiting for Kafka to be ready... && \ - cub kafka-ready -b kafka:9092 1 30 && \ - kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $KAFKA_TOPIC && \ - sleep 30'" - environment: - # The following settings are listed here only to satisfy the image's requirements. - # We override the image's `command` anyways, hence this container will not start a broker. - KAFKA_BROKER_ID: ignored - KAFKA_ZOOKEEPER_CONNECT: ignored - - schema-registry: - image: confluentinc/cp-schema-registry:5.4.0 - ports: - - 8081:8081 - environment: - SCHEMA_REGISTRY_HOST_NAME: localhost - SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 - SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka:9092 - depends_on: - - kafka - - control-center: - image: confluentinc/cp-enterprise-control-center:latest - hostname: control-center - depends_on: - - zookeeper - - kafka - - schema-registry - ports: - - "9021:9021" - environment: - CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' - CONTROL_CENTER_ZOOKEEPER_CONNECT: 'zookeeper:2181' - CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" - CONTROL_CENTER_REPLICATION_FACTOR: 1 - CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1 - CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1 - CONFLUENT_METRICS_TOPIC_REPLICATION: 1 - PORT: 9021 \ No newline at end of file diff --git a/src/integrationTest/resources/docker-compose-solace.yml b/src/integrationTest/resources/docker-compose-solace.yml deleted file mode 100644 index fad702e..0000000 --- a/src/integrationTest/resources/docker-compose-solace.yml +++ /dev/null @@ -1,25 +0,0 @@ -version: '3' - -services: - solbroker: - image: solace/solace-pubsub-standard:$PUBSUB_TAG - hostname: $PUBSUB_HOSTNAME - env_file: - - ./solace.env - ports: - - "2222:2222" - - "8080:8080" - - "55003:55003" - - "55443:55443" - - "55445:55445" - - "55555:55555" - - "55556:55556" - - "5672:5672" - - "5550:5550" - - "8008:8008" - shm_size: 2g - ulimits: - memlock: -1 - nofile: - soft: 2448 - hard: 42192 diff --git a/src/integrationTest/resources/log4j2.xml b/src/integrationTest/resources/log4j2.xml new file mode 100644 index 0000000..e61d599 --- /dev/null +++ b/src/integrationTest/resources/log4j2.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/integrationTest/resources/logback-test.xml b/src/integrationTest/resources/logback-test.xml deleted file mode 100644 index 985c68e..0000000 --- a/src/integrationTest/resources/logback-test.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - %d{HH:mm:ss.SSS} [%thread] %-5level %logger -%msg%n%rEx{full, org} - - - - - - - - - - \ No newline at end of file diff --git a/src/integrationTest/resources/solace.env b/src/integrationTest/resources/solace.env deleted file mode 100644 index 863a835..0000000 --- a/src/integrationTest/resources/solace.env +++ /dev/null @@ -1,4 +0,0 @@ -username_admin_globalaccesslevel=admin -username_admin_password=admin -system_scaling_maxconnectioncount=100 -logging_debug_output=all \ No newline at end of file diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java index 5df1099..ff0b92c 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java @@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory; public class SolFlowEventCallBackHandler implements FlowEventHandler { - final Logger log = LoggerFactory.getLogger(SolFlowEventCallBackHandler.class); + private static final Logger log = LoggerFactory.getLogger(SolFlowEventCallBackHandler.class); @Override public void handleEvent(Object obj, FlowEventArgs event) { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java index ec0c2a4..e8f620a 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java @@ -26,7 +26,7 @@ import org.slf4j.LoggerFactory; public class SolReconnectCallbackHandler implements JCSMPReconnectEventHandler { - final Logger log = LoggerFactory.getLogger(SolReconnectCallbackHandler.class); + private static final Logger log = LoggerFactory.getLogger(SolReconnectCallbackHandler.class); @Override public void postReconnect() throws JCSMPException { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java index 7b32572..abdbeef 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java @@ -27,7 +27,7 @@ import org.slf4j.LoggerFactory; public class SolSessionEventCallbackHandler implements SessionEventHandler { - final Logger log = LoggerFactory.getLogger(SolSessionEventCallbackHandler.class); + private static final Logger log = LoggerFactory.getLogger(SolSessionEventCallbackHandler.class); @Override public void handleEvent(SessionEventArgs event) { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java index c16b61d..cd29211 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java @@ -19,7 +19,8 @@ package com.solace.connector.kafka.connect.source; -import com.solacesystems.jcsmp.InvalidPropertiesException; +import com.solacesystems.jcsmp.Context; +import com.solacesystems.jcsmp.ContextProperties; import com.solacesystems.jcsmp.JCSMPChannelProperties; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; @@ -27,14 +28,13 @@ import com.solacesystems.jcsmp.JCSMPSession; import com.solacesystems.jcsmp.JCSMPSessionStats; import com.solacesystems.jcsmp.statistics.StatType; -import com.solacesystems.jcsmp.Context; -import com.solacesystems.jcsmp.ContextProperties; - -import java.util.Enumeration; - +import org.apache.kafka.common.config.types.Password; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.Enumeration; +import java.util.Optional; + public class SolSessionHandler { private static final Logger log = LoggerFactory.getLogger(SolSessionHandler.class); @@ -57,11 +57,12 @@ public SolSessionHandler(SolaceSourceConnectorConfig connectorConfig) { */ public void configureSession() { // Required Properties - properties.setProperty(JCSMPProperties.USERNAME, + properties.setProperty(JCSMPProperties.USERNAME, connectorConfig.getString(SolaceSourceConstants.SOL_USERNAME)); - properties.setProperty(JCSMPProperties.PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_PASSWORD)); - properties.setProperty(JCSMPProperties.VPN_NAME, + properties.setProperty(JCSMPProperties.PASSWORD, + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_PASSWORD)) + .map(Password::value).orElse(null)); + properties.setProperty(JCSMPProperties.VPN_NAME, connectorConfig.getString(SolaceSourceConstants.SOL_VPN_NAME)); properties.setProperty(JCSMPProperties.HOST, connectorConfig.getString(SolaceSourceConstants.SOL_HOST)); @@ -141,7 +142,8 @@ public void configureSession() { properties.setProperty(JCSMPProperties.SSL_TRUST_STORE, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE)); properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD)); + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD)) + .map(Password::value).orElse(null)); properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_FORMAT, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT)); properties.setProperty(JCSMPProperties.SSL_TRUSTED_COMMON_NAME_LIST, @@ -149,13 +151,15 @@ public void configureSession() { properties.setProperty(JCSMPProperties .SSL_KEY_STORE, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE)); properties.setProperty(JCSMPProperties.SSL_KEY_STORE_PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD)); + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD)) + .map(Password::value).orElse(null)); properties.setProperty(JCSMPProperties.SSL_KEY_STORE_FORMAT, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_FORMAT)); properties.setProperty(JCSMPProperties.SSL_KEY_STORE_NORMALIZED_FORMAT, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT)); properties.setProperty(JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, - connectorConfig.getString(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD)); + Optional.ofNullable(connectorConfig.getPassword(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD)) + .map(Password::value).orElse(null)); // } } @@ -163,15 +167,15 @@ public void configureSession() { /** * Connect JCSMPSession. * @return boolean result - * @throws JCSMPException + * @throws JCSMPException */ public void connectSession() throws JCSMPException { - + System.setProperty("java.security.auth.login.config", connectorConfig.getString(SolaceSourceConstants.SOL_KERBEROS_LOGIN_CONFIG)); System.setProperty("java.security.krb5.conf", connectorConfig.getString(SolaceSourceConstants.SOL_KERBEROS_KRB5_CONFIG)); - + session = JCSMPFactory.onlyInstance().createSession(properties, ctx, new SolSessionEventCallbackHandler()); session.connect(); } @@ -191,13 +195,13 @@ public void printStats() { log.info("\n"); } } - + /** * Shutdown the session. * @return return shutdown boolean result */ public boolean shutdown() { - + Context context = JCSMPFactory.onlyInstance().getDefaultContext(); if ( session != null ) { session.closeSession(); diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java index 7e66d25..d74a030 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java @@ -31,7 +31,7 @@ public class SolaceSourceConnectorConfig extends AbstractConfig { private static final Logger log = LoggerFactory.getLogger(SolaceSourceConnectorConfig.class); - + /** * Constructor to create Solace Configuration details for Source Connector. */ @@ -45,17 +45,17 @@ public SolaceSourceConnectorConfig(Map properties) { * Returns a ConfigDef to be used for Source Task. */ public static ConfigDef solaceConfigDef() { - - + + // TODO: Revise defaults to JCSMP defaults - - + + return new ConfigDef() .define(SolaceSourceConstants.KAFKA_TOPIC, Type.STRING, "default", Importance.HIGH, "Kafka topic to consume from") - .define(SolaceSourceConstants.SOL_USERNAME, Type.STRING, "default", + .define(SolaceSourceConstants.SOL_USERNAME, Type.STRING, "default", Importance.HIGH, "Solace username") - .define(SolaceSourceConstants.SOL_PASSWORD, Type.STRING, "default", + .define(SolaceSourceConstants.SOL_PASSWORD, Type.PASSWORD, "default", Importance.HIGH, "Solace user password") .define(SolaceSourceConstants.SOL_HOST, Type.STRING, null, Importance.HIGH, "host to connect with, can be comma delimited for HA/DR") @@ -68,6 +68,9 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, Type.CLASS, SolMessageProcessorIF.class, Importance.HIGH, "default Solace message processor to use") + .define(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR_IGNORE_ERROR, Type.BOOLEAN, false, + Importance.MEDIUM, + "If enabled, messages that throw message processor errors will be discarded") .define(SolaceSourceConstants.SOL_LOCALHOST, Type.STRING, null, Importance.LOW, "The hostname or IP address of the machine on which the application is running. " + "On a multihomed machine, it is strongly recommended to provide this parameter " @@ -81,28 +84,28 @@ public static ConfigDef solaceConfigDef() { "If enabled, the API maintains a local cache of subscriptions and " + "reapplies them when the subscriber " + "connection is reestablished") - .define(SolaceSourceConstants.SOL_GENERATE_SEND_TIMESTAMPS, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_GENERATE_SEND_TIMESTAMPS, Type.BOOLEAN, false, Importance.LOW, "indicates whether to generate a send timestamp in outgoing messages") - .define(SolaceSourceConstants.SOL_GENERATE_RCV_TIMESTAMPS, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_GENERATE_RCV_TIMESTAMPS, Type.BOOLEAN, false, Importance.LOW, "Indicates whether to generate a receive timestamp on incoming messages") - .define(SolaceSourceConstants.SOL_GENERATE_SEQUENCE_NUMBERS, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_GENERATE_SEQUENCE_NUMBERS, Type.BOOLEAN, false, Importance.LOW, "Indicates whether to generate a sequence number in outgoing messages") - .define(SolaceSourceConstants.SOL_CALCULATE_MESSAGE_EXPIRATION, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_CALCULATE_MESSAGE_EXPIRATION, Type.BOOLEAN, false, Importance.LOW, "Indicates whether to calculate message expiration time in outgoing " + "messages and incoming messages") .define(SolaceSourceConstants.SOL_PUB_MULTI_THREAD, Type.BOOLEAN, true, Importance.LOW, "If enabled (default), the XMLMessageProducer is safe to access from multiple threads") - .define(SolaceSourceConstants.SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF, Type.BOOLEAN, true, Importance.LOW, "If enabled, during send operations, the XMLMessageProducer " + "concatenates all published data. " + "This can result in higher throughput for certain send operations. It can, however, " + "lead to performance degradation for some scenarios with large messages") - .define(SolaceSourceConstants.SOL_MESSAGE_CALLBACK_ON_REACTOR, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_MESSAGE_CALLBACK_ON_REACTOR, Type.BOOLEAN, false, Importance.LOW, "If enabled, messages delivered asynchronously to an XMLMessageListener " + "are delivered directly from the I/O thread " @@ -111,10 +114,10 @@ public static ConfigDef solaceConfigDef() { + "MUST return quickly " + "from the onReceive() callback, and MUST NOT call ANY session" + " methods from the I/O thread") - .define(SolaceSourceConstants.SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR, Type.BOOLEAN, false, Importance.LOW, "ignore errors caused by subscriptions being already presents") - .define(SolaceSourceConstants.SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR, Type.BOOLEAN, false, + .define(SolaceSourceConstants.SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR, Type.BOOLEAN, false, Importance.LOW, "When removing subscriptions ignore errors caused by subscriptions not being found.") .define(SolaceSourceConstants.SOL_NO_LOCAL, Type.BOOLEAN, false, Importance.LOW, @@ -126,72 +129,72 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_AUTHENTICATION_SCHEME, Type.STRING, "AUTHENTICATION_SCHEME_BASIC", Importance.MEDIUM, "String property specifying the authentication scheme.") - .define(SolaceSourceConstants.SOL_KRB_SERVICE_NAME, Type.STRING, "solace", + .define(SolaceSourceConstants.SOL_KRB_SERVICE_NAME, Type.STRING, "solace", Importance.MEDIUM, "This property is used to specify the ServiceName portion " + "of the Service Principal Name (SPN) " + "that has a format of ServiceName/ApplianceName@REALM.") - .define(SolaceSourceConstants.SOL_SSL_CONNECTION_DOWNGRADE_TO, Type.STRING, "", + .define(SolaceSourceConstants.SOL_SSL_CONNECTION_DOWNGRADE_TO, Type.STRING, "", Importance.MEDIUM, "Session property specifying a transport protocol that SSL session connection will be " + "downgraded to after client authentication. " + "Allowed values: TRANSPORT_PROTOCOL_PLAIN_TEXT.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectTimeoutInMillis, Type.INT, 30000, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectTimeoutInMillis, Type.INT, 30000, Importance.MEDIUM, "Timeout value (in ms) for creating an initial connection to Solace") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_readTimeoutInMillis, Type.INT, 10000, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_readTimeoutInMillis, Type.INT, 10000, Importance.MEDIUM, "Timeout value (in ms) for reading a reply from Solace") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetries, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetries, Type.INT, 0, Importance.MEDIUM, "The number of times to attempt and retry a connection to the host appliance " + "(or list of appliances) " + "during initial connection setup") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Type.INT, 0, Importance.MEDIUM, "The number of times to attempt to reconnect to the appliance (or list of appliances)" + " after an initial " + "connected session goes down") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetriesPerHost, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_connectRetriesPerHost, Type.INT, 0, Importance.MEDIUM, "This property defines how many times to try to connect or reconnect " + "to a single host before" + " moving to the next host in the list") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis, Type.INT, 3000, Importance.MEDIUM, "How much time in (MS) to wait between each attempt to " + "connect or reconnect to a host") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis, - Type.INT, 3000, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis, + Type.INT, 3000, Importance.MEDIUM, "The amount of time (in ms) to wait between sending out keep-alive messages") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveLimit, Type.INT, 10, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveLimit, Type.INT, 10, Importance.MEDIUM, "The maximum number of consecutive keep-alive messages that can be sent without " + "receiving a response " + "before the connection is closed by the API") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_sendBuffer, Type.INT, 65536, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_sendBuffer, Type.INT, 65536, Importance.MEDIUM, "The size (in bytes) of the send socket buffer.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_receiveBuffer, Type.INT, 65536, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_receiveBuffer, Type.INT, 65536, Importance.MEDIUM, "The size (in bytes) of the receive socket buffer.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_tcpNoDelay, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_tcpNoDelay, Type.BOOLEAN, true, Importance.LOW, "Whether to set the TCP_NODELAY option. When enabled, this option " + "disables the Nagle's algorithm.") - .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_compressionLevel, Type.INT, 0, + .define(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_compressionLevel, Type.INT, 0, Importance.MEDIUM, "A compressionLevel setting of 1-9 sets the ZLIB compression level to use; " + "a setting of 0 disables compression entirely.") .define(SolaceSourceConstants.SOL_SUBSCRIBER_LOCAL_PRIORITY, Type.INT, 1, Importance.MEDIUM, "Subscriber priority is used to choose a client to receive messages " + "sent with the DeliverToOne property set.") - .define(SolaceSourceConstants.SOL_SUBSCRIBER_NETWORK_PRIORITY, Type.INT, 1, + .define(SolaceSourceConstants.SOL_SUBSCRIBER_NETWORK_PRIORITY, Type.INT, 1, Importance.MEDIUM, "Subscriber priority is used to choose a client to receive messages s" + "ent with the DeliverToOne property set.") - .define(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE, Type.BOOLEAN, true, Importance.LOW, "When adding topic subscriptions override DTO processing for any " + "messages with DTO flags .") @@ -204,36 +207,36 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_SSL_CIPHER_SUITES, Type.STRING, "", Importance.LOW, "This property is used to specify a comma separated list of cipher suites in order of " + "preference used for SSL connections. ") - .define(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE, Type.BOOLEAN, true, + .define(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE, Type.BOOLEAN, true, Importance.LOW, "This property is used to specify whether the API should validate server certificates ") .define(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE_DATE, Type.BOOLEAN, true, Importance.LOW, "This property is used to specify whether the API should validate server " + "certificate's expiry") - .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE, Type.STRING, - "/lib/security/jssecacerts", + .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE, Type.STRING, + "/lib/security/jssecacerts", Importance.LOW, "This property is used to specify the truststore file to use in URL or path format.") - .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD, Type.STRING, "", Importance.LOW, + .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD, Type.PASSWORD, "", Importance.LOW, "This property is used to specify the password of the truststore given " + "in SSL_TRUST_STORE") - .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT, Type.STRING, "JKS", + .define(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT, Type.STRING, "JKS", Importance.LOW, "This property is used to specify the format of the truststore given in " + "SSL_TRUST_STORE.") - .define(SolaceSourceConstants.SOL_SSL_TRUSTED_COMMON_NAME_LIST, Type.STRING, "", + .define(SolaceSourceConstants.SOL_SSL_TRUSTED_COMMON_NAME_LIST, Type.STRING, "", Importance.LOW, "This property is used to specify a comma separated list of acceptable common names " + "for matching with server certificates.") .define(SolaceSourceConstants.SOL_SSL_KEY_STORE, Type.STRING, "", Importance.LOW, "This property is used to specify the keystore file to use in URL or path format.") - .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD, Type.STRING, "", Importance.LOW, + .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD, Type.PASSWORD, "", Importance.LOW, "This property is used to specify the password of the keystore specified " + "by SSL_KEY_STORE.") .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_FORMAT, Type.STRING, "JKS", Importance.LOW, "This property is used to specify the format of the keystore given in SSL_KEY_STORE.") - .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT, Type.STRING, "JKS", + .define(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT, Type.STRING, "JKS", Importance.LOW, "This property is used to specify the format of an internal normalized " + "representation of the keystore " @@ -241,7 +244,7 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_ALIAS, Type.STRING, "", Importance.LOW, "This property is used to specify the alias of the private key to use " + "for client certificate authentication.") - .define(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD, Type.STRING, "", Importance.LOW, + .define(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD, Type.PASSWORD, "", Importance.LOW, "This property is used to specify the password that deciphers the " + "private key from the key store.") .define(SolaceSourceConstants.SOL_KERBEROS_KRB5_CONFIG, Type.STRING, "", Importance.LOW, @@ -250,7 +253,7 @@ public static ConfigDef solaceConfigDef() { "Location of the Kerberos Login Configuration File") .define(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY, Type.STRING, "NONE", Importance.MEDIUM, "This propert determines if a Kafka key record is created and the key to be used"); - + } diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java index 35e95be..d22247b 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java @@ -33,14 +33,17 @@ public class SolaceSourceConstants { // High Importance Solace Message processor public static final String SOL_MESSAGE_PROCESSOR = "sol.message_processor_class"; + // Medium Importance Solace Message processor + public static final String SOL_MESSAGE_PROCESSOR_IGNORE_ERROR = "sol.message_processor.error.ignore"; + // High Importance Solace public static final String SOL_HOST = "sol.host"; public static final String SOL_USERNAME = "sol.username"; public static final String SOL_PASSWORD = "sol.password"; - + // TODO: SOL_MESSAGE_ACK_MODE is not used! public static final String SOL_MESSAGE_ACK_MODE = "sol.message_ack_mode"; - + public static final String SOL_VPN_NAME = "sol.vpn_name"; public static final String SOL_TOPICS = "sol.topics"; public static final String SOL_QUEUE = "sol.queue"; @@ -56,12 +59,12 @@ public class SolaceSourceConstants { public static final String SOL_CALCULATE_MESSAGE_EXPIRATION = "sol.calculate_message_expiration"; public static final String SOL_REAPPLY_SUBSCRIPTIONS = "sol.reapply_subscriptions"; public static final String SOL_PUB_MULTI_THREAD = "sol.pub_multi_thread"; - public static final String SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF + public static final String SOL_PUB_USE_INTERMEDIATE_DIRECT_BUF = "sol.pub_use_immediate_direct_pub"; public static final String SOL_MESSAGE_CALLBACK_ON_REACTOR = "sol.message_callback_on_reactor"; - public static final String SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR + public static final String SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR = "sol.ignore_duplicate_subscription_error"; - public static final String SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR + public static final String SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR = "sol.ignore_subscription_not_found_error"; public static final String SOL_NO_LOCAL = "sol.no_local"; public static final String SOL_ACK_EVENT_MODE = "sol.ack_event_mode"; @@ -82,34 +85,34 @@ public class SolaceSourceConstants { public static final String SOL_SSL_KEY_STORE = "sol.ssl_key_store"; public static final String SOL_SSL_KEY_STORE_PASSWORD = "sol.ssl_key_store_password"; public static final String SOL_SSL_KEY_STORE_FORMAT = "sol.ssl_key_store_format"; - public static final String SOL_SSL_KEY_STORE_NORMALIZED_FORMAT + public static final String SOL_SSL_KEY_STORE_NORMALIZED_FORMAT = "sol.ssl_key_store_normalized_format"; public static final String SOL_SSL_PRIVATE_KEY_ALIAS = "sol.ssl_private_key_alias"; public static final String SOL_SSL_PRIVATE_KEY_PASSWORD = "sol.ssl_private_key_password"; // Low Importance Solace Channel Properties - public static final String SOL_CHANNEL_PROPERTY_connectTimeoutInMillis + public static final String SOL_CHANNEL_PROPERTY_connectTimeoutInMillis = "sol.channel_properties.connect_timout_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_readTimeoutInMillis + public static final String SOL_CHANNEL_PROPERTY_readTimeoutInMillis = "sol.channel_properties.read_timeout_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_connectRetries + public static final String SOL_CHANNEL_PROPERTY_connectRetries = "sol.channel_properties.connect_retries"; - public static final String SOL_CHANNEL_PROPERTY_reconnectRetries + public static final String SOL_CHANNEL_PROPERTY_reconnectRetries = "sol.channel_properties.reconnect_retries"; - public static final String SOL_CHANNEL_PROPERTY_connectRetriesPerHost + public static final String SOL_CHANNEL_PROPERTY_connectRetriesPerHost = "sol.channnel_properties.connect_retries_per_host"; - public static final String SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis + public static final String SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis = "sol.channel_properties.reconnect_retry_wait_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis + public static final String SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis = "sol.channel_properties.keep_alive_interval_in_millis"; - public static final String SOL_CHANNEL_PROPERTY_keepAliveLimit + public static final String SOL_CHANNEL_PROPERTY_keepAliveLimit = "sol.channel_properties.keep_alive_limit"; public static final String SOL_CHANNEL_PROPERTY_sendBuffer = "sol.channel_properties.send_buffer"; - public static final String SOL_CHANNEL_PROPERTY_receiveBuffer + public static final String SOL_CHANNEL_PROPERTY_receiveBuffer = "sol.channel_properties.receive_buffer"; - public static final String SOL_CHANNEL_PROPERTY_tcpNoDelay + public static final String SOL_CHANNEL_PROPERTY_tcpNoDelay = "sol.channel_properties.tcp_no_delay"; - public static final String SOL_CHANNEL_PROPERTY_compressionLevel + public static final String SOL_CHANNEL_PROPERTY_compressionLevel = "sol.channel_properties.compression_level"; // Low Importance Persistent Message Properties @@ -130,10 +133,10 @@ public class SolaceSourceConstants { // Allowable values include: NONE, DESTINATION, CORRELATION_ID, // CORRELATION_ID_AS_BYTES public static final String SOL_KAFKA_MESSAGE_KEY = "sol.kafka_message_key"; - + //Low importance Kerberos details public static final String SOL_KERBEROS_LOGIN_CONFIG = "sol.kerberos.login.conf"; - public static final String SOL_KERBEROS_KRB5_CONFIG = "sol.kerberos.krb5.conf"; - + public static final String SOL_KERBEROS_KRB5_CONFIG = "sol.kerberos.krb5.conf"; + } diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java index 643f33b..bfb493f 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java @@ -46,7 +46,7 @@ public class SolaceSourceQueueConsumer { this.solSessionHandler = solSessionHandler; } - public boolean init(BlockingQueue squeue) { + public void init(BlockingQueue squeue) throws JCSMPException { solQueue = JCSMPFactory.onlyInstance().createQueue(lconfig.getString(SolaceSourceConstants.SOL_QUEUE)); final ConsumerFlowProperties flow_prop = new ConsumerFlowProperties(); flow_prop.setEndpoint(solQueue); @@ -54,16 +54,10 @@ public boolean init(BlockingQueue squeue) { flow_prop.setStartState(true); EndpointProperties endpointProps = new EndpointProperties(); endpointProps.setAccessType(EndpointProperties.ACCESSTYPE_NONEXCLUSIVE); - try { - callbackhandler = new SolMessageQueueCallbackHandler(squeue); - recv = solSessionHandler.getSession().createFlow(callbackhandler, flow_prop, endpointProps, - new SolFlowEventCallBackHandler()); - recv.start(); - } catch (JCSMPException je) { - log.info("=========== JCSMP Exception while creating Solace Flow to Queue " + "in SolaceSourceQueueConsumer {} \n", - je.getLocalizedMessage()); - } - return true; + callbackhandler = new SolMessageQueueCallbackHandler(squeue); + recv = solSessionHandler.getSession().createFlow(callbackhandler, flow_prop, endpointProps, + new SolFlowEventCallBackHandler()); + recv.start(); } public void stop() { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java index 3132569..f76f5d2 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java @@ -22,32 +22,30 @@ import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.DeliveryMode; import com.solacesystems.jcsmp.JCSMPException; -import com.solacesystems.jcsmp.JCSMPProperties; import com.solacesystems.jcsmp.JCSMPSession; +import org.apache.kafka.connect.errors.ConnectException; +import org.apache.kafka.connect.source.SourceRecord; +import org.apache.kafka.connect.source.SourceTask; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; -import org.apache.kafka.connect.source.SourceRecord; -import org.apache.kafka.connect.source.SourceTask; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public class SolaceSourceTask extends SourceTask { // implements XMLMessageListener{ private static final Logger log = LoggerFactory.getLogger(SolaceSourceTask.class); - final JCSMPProperties properties = new JCSMPProperties(); - SolaceSourceConnectorConfig connectorConfig; private SolSessionHandler solSessionHandler = null; - BlockingQueue ingressMessages + BlockingQueue ingressMessages = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for any incoming message from PS+ topics and queue - BlockingQueue outstandingAckList + BlockingQueue outstandingAckList = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for Solace Flow messages String skafkaTopic; SolaceSourceTopicListener topicListener = null; @@ -72,10 +70,7 @@ public void start(Map props) { .getConfiguredInstance(SolaceSourceConstants .SOL_MESSAGE_PROCESSOR, SolMessageProcessorIF.class); } catch (Exception e) { - log.info( - "================ Encountered exception in creating the message processor." - + " Cause: {}, Stacktrace: {} ", - e.getCause(), e.getStackTrace()); + throw new ConnectException("Encountered exception in creating the message processor.", e); } skafkaTopic = connectorConfig.getString(SolaceSourceConstants.KAFKA_TOPIC); solSessionHandler = new SolSessionHandler(connectorConfig); @@ -83,24 +78,23 @@ public void start(Map props) { solSessionHandler.configureSession(); solSessionHandler.connectSession(); } catch (JCSMPException e) { - log.info("Received Solace exception {}, with the " - + "following: {} ", e.getCause(), e.getStackTrace()); - log.info("================ Failed to create JCSMPSession Session"); - stop(); + throw new ConnectException("Failed to create JCSMPSession", e); } log.info("================ JCSMPSession Connected"); if (connectorConfig.getString(SolaceSourceConstants.SOL_TOPICS) != null) { topicListener = new SolaceSourceTopicListener(connectorConfig, solSessionHandler); - if (!topicListener.init(ingressMessages)) { - log.info("================ Failed to start topic consumer ... shutting down"); - stop(); + try { + topicListener.init(ingressMessages); + } catch (JCSMPException e) { + throw new ConnectException("Failed to start topic consumer", e); } } if (connectorConfig.getString(SolaceSourceConstants.SOL_QUEUE) != null) { queueConsumer = new SolaceSourceQueueConsumer(connectorConfig, solSessionHandler); - if (!queueConsumer.init(ingressMessages)) { - log.info("================ Failed to start queue consumer ... shutting down"); - stop(); + try { + queueConsumer.init(ingressMessages); + } catch (JCSMPException e) { + throw new ConnectException("Failed to start queue consumer", e); } } } @@ -119,31 +113,37 @@ public synchronized List poll() throws InterruptedException { // There is at least one message to process spinTurns = 0; // init spinTurns again List records = new ArrayList<>(); - int processedInIhisBatch = 0; - int count = 0; + int processedInThisBatch; + int discarded = 0; int arraySize = ingressMessages.size(); - while (count < arraySize) { + for (processedInThisBatch = 0; processedInThisBatch < arraySize; processedInThisBatch++) { BytesXMLMessage msg = ingressMessages.take(); try { processor.process(connectorConfig.getString(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY), msg); } catch (Exception e) { - log.info( - "================ Encountered exception in message processing....discarded." - + " Cause: {}, Stacktrace: {} ", - e.getCause(), e.getStackTrace()); + if (connectorConfig.getBoolean(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR_IGNORE_ERROR)) { + log.warn("================ Encountered exception in message processing....discarded.", e); + scheduleForAck(msg); + discarded++; + continue; + } else { + throw new ConnectException("Encountered exception in message processing", e); + } } Collections.addAll(records, processor.getRecords(skafkaTopic)); - count++; - processedInIhisBatch++; - if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT - || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { - outstandingAckList.add(msg); // enqueue messages received from guaranteed messaging endpoint for later ack - } + scheduleForAck(msg); } - log.debug("Processed {} records in this batch.", processedInIhisBatch); + log.debug("Processed {} records in this batch. Discarded {}", processedInThisBatch - discarded, discarded); return records; } + private synchronized void scheduleForAck(BytesXMLMessage msg) { + if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT + || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { + outstandingAckList.add(msg); // enqueue messages received from guaranteed messaging endpoint for later ack + } + } + /** * Kafka Connect method that write records to disk. */ @@ -181,5 +181,5 @@ public synchronized void stop() { public JCSMPSession getSolSession() { return solSessionHandler.getSession(); } - + } diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java index 1a4a30e..89624ab 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java @@ -50,45 +50,30 @@ public SolaceSourceTopicListener(SolaceSourceConnectorConfig lconfig, SolSession this.solSessionHandler = solSessionHandler; } - public boolean init(BlockingQueue squeue) { - boolean topicListenerStarted = true; + public void init(BlockingQueue squeue) throws JCSMPException { solaceTopics = lconfig.getString(SolaceSourceConstants.SOL_TOPICS); topics = solaceTopics.split(","); - try { - callbackhandler = new SolMessageTopicCallbackHandler(lconfig, squeue); - cons = solSessionHandler.getSession().getMessageConsumer(new SolReconnectCallbackHandler(), callbackhandler); - } catch (JCSMPException je) { - log.info("JCSMP Exception in SolaceSourceTopicListener {} \n", je.getLocalizedMessage()); - } - try { - Topic topic; - int counter = 0; - log.info("Number of topics to add: {} ", topics.length); - while (topics.length > counter) { - log.info("Adding subscription for topic {} ", topics[counter].trim()); - TopicProperties tproperties = new TopicProperties(); - tproperties.setName(topics[counter].trim()); - // Only used for legacy PubSub+ versions - tproperties.setRxAllDeliverToOne(lconfig.getBoolean(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE)); - topic = JCSMPFactory.onlyInstance().createTopic(tproperties); - solSessionHandler.getSession().addSubscription(topic, true); - counter++; - } - } catch (JCSMPException je) { - log.info("JCSMP Exception in SolaceSourceTopicListener {} \n", je.getLocalizedMessage()); + callbackhandler = new SolMessageTopicCallbackHandler(lconfig, squeue); + cons = solSessionHandler.getSession().getMessageConsumer(new SolReconnectCallbackHandler(), callbackhandler); + + Topic topic; + int counter = 0; + log.info("Number of topics to add: {} ", topics.length); + while (topics.length > counter) { + log.info("Adding subscription for topic {} ", topics[counter].trim()); + TopicProperties tproperties = new TopicProperties(); + tproperties.setName(topics[counter].trim()); + // Only used for legacy PubSub+ versions + tproperties.setRxAllDeliverToOne(lconfig.getBoolean(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE)); + topic = JCSMPFactory.onlyInstance().createTopic(tproperties); + solSessionHandler.getSession().addSubscription(topic, true); + counter++; } - try { - cons.start(); - } catch (JCSMPException je) { - log.info("JCSMP Exception in SolaceSourceTopicListener {} \n", je.getLocalizedMessage()); - topicListenerStarted = false; - } + cons.start(); log.info("================Session is Connected"); - return topicListenerStarted; - } public void shutdown() { diff --git a/src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java b/src/template/java/com/solace/connector/kafka/connect/source/VersionUtil.java similarity index 88% rename from src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java rename to src/template/java/com/solace/connector/kafka/connect/source/VersionUtil.java index 258df9b..839a546 100644 --- a/src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java +++ b/src/template/java/com/solace/connector/kafka/connect/source/VersionUtil.java @@ -1,14 +1,12 @@ package com.solace.connector.kafka.connect.source; public class VersionUtil { - + /** * Returns the projects version number for the connector. */ public static String getVersion() { - - return "2.0.2"; - + return "${version}"; } } diff --git a/src/test/java/com/solace/connector/kafka/connect/source/SolSessionHandlerTest.java b/src/test/java/com/solace/connector/kafka/connect/source/SolSessionHandlerTest.java new file mode 100644 index 0000000..643a0cc --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/SolSessionHandlerTest.java @@ -0,0 +1,43 @@ +package com.solace.connector.kafka.connect.source; + +import com.solacesystems.jcsmp.JCSMPProperties; +import org.apache.commons.lang.RandomStringUtils; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import java.util.HashMap; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class SolSessionHandlerTest { + @ParameterizedTest + @CsvSource({ + SolaceSourceConstants.SOL_PASSWORD + ',' + JCSMPProperties.PASSWORD, + SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD + ',' + JCSMPProperties.SSL_KEY_STORE_PASSWORD, + SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD + ',' + JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, + SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD + ',' + JCSMPProperties.SSL_TRUST_STORE_PASSWORD + }) + public void testConfigurePasswords(String connectorProperty, String jcsmpProperty) { + Map properties = new HashMap<>(); + properties.put(connectorProperty, RandomStringUtils.randomAlphanumeric(30)); + SolSessionHandler sessionHandler = new SolSessionHandler(new SolaceSourceConnectorConfig(properties)); + sessionHandler.configureSession(); + assertEquals(properties.get(connectorProperty), sessionHandler.properties.getStringProperty(jcsmpProperty)); + } + + @ParameterizedTest + @CsvSource({ + SolaceSourceConstants.SOL_PASSWORD + ',' + JCSMPProperties.PASSWORD, + SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD + ',' + JCSMPProperties.SSL_KEY_STORE_PASSWORD, + SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD + ',' + JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, + SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD + ',' + JCSMPProperties.SSL_TRUST_STORE_PASSWORD + }) + public void testConfigureNullPasswords(String connectorProperty, String jcsmpProperty) { + Map properties = new HashMap<>(); + properties.put(connectorProperty, null); + SolSessionHandler sessionHandler = new SolSessionHandler(new SolaceSourceConnectorConfig(properties)); + sessionHandler.configureSession(); + assertEquals(properties.get(connectorProperty), sessionHandler.properties.getStringProperty(jcsmpProperty)); + } +} diff --git a/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfigTest.java b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfigTest.java new file mode 100644 index 0000000..ddd00ec --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfigTest.java @@ -0,0 +1,29 @@ +package com.solace.connector.kafka.connect.source; + +import org.apache.commons.lang.RandomStringUtils; +import org.apache.kafka.common.config.types.Password; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import java.util.HashMap; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class SolaceSourceConnectorConfigTest { + @ParameterizedTest + @ValueSource(strings = { + SolaceSourceConstants.SOL_PASSWORD, + SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD, + SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD, + SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD + }) + public void testPasswordsObfuscation(String property) { + Map properties = new HashMap<>(); + properties.put(property, RandomStringUtils.randomAlphanumeric(30)); + SolaceSourceConnectorConfig config = new SolaceSourceConnectorConfig(properties); + Password password = config.getPassword(property); + assertEquals(Password.HIDDEN, password.toString()); + assertEquals(properties.get(property), password.value()); + } +} diff --git a/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceTaskTest.java b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceTaskTest.java new file mode 100644 index 0000000..71667bf --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/SolaceSourceTaskTest.java @@ -0,0 +1,53 @@ +package com.solace.connector.kafka.connect.source; + +import com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor; +import com.solacesystems.jcsmp.JCSMPException; +import org.apache.kafka.common.KafkaException; +import org.apache.kafka.connect.errors.ConnectException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + +public class SolaceSourceTaskTest { + private SolaceSourceTask solaceSourceTask; + + @BeforeEach + void setUp() { + solaceSourceTask = new SolaceSourceTask(); + } + + @AfterEach + void tearDown() { + solaceSourceTask.stop(); + } + + @Test + public void testNoProvidedMessageProcessor() { + Map props = Collections.emptyMap(); + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(props)); + assertThat(thrown.getMessage(), containsString("Encountered exception in creating the message processor.")); + assertThat(thrown.getCause(), instanceOf(KafkaException.class)); + assertThat(thrown.getCause().getMessage(), containsString( + "Could not find a public no-argument constructor for " + SolMessageProcessorIF.class.getName())); + } + + @Test + public void testFailSessionConnect() { + Map props = new HashMap<>(); + props.put(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, SolSampleSimpleMessageProcessor.class.getName()); + + ConnectException thrown = Assertions.assertThrows(ConnectException.class, () -> solaceSourceTask.start(props)); + assertThat(thrown.getMessage(), containsString("Failed to create JCSMPSession")); + assertThat(thrown.getCause(), instanceOf(JCSMPException.class)); + assertThat(thrown.getCause().getMessage(), containsString("Null value was passed in for property (host)")); + } +} diff --git a/src/test/java/com/solace/connector/kafka/connect/source/VersionUtilTest.java b/src/test/java/com/solace/connector/kafka/connect/source/VersionUtilTest.java new file mode 100644 index 0000000..ede8998 --- /dev/null +++ b/src/test/java/com/solace/connector/kafka/connect/source/VersionUtilTest.java @@ -0,0 +1,16 @@ +package com.solace.connector.kafka.connect.source; + +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.regex.Pattern; + +import static org.junit.jupiter.api.Assertions.assertLinesMatch; + +public class VersionUtilTest { + @Test + public void testGetVersion() { + assertLinesMatch(Collections.singletonList(Pattern.compile("^[0-9]+\\.[0-9]+\\.[0-9]+$").pattern()), + Collections.singletonList(VersionUtil.getVersion())); + } +} diff --git a/src/test/resources/log4j2.xml b/src/test/resources/log4j2.xml new file mode 100644 index 0000000..e61d599 --- /dev/null +++ b/src/test/resources/log4j2.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file