diff --git a/.gitignore b/.gitignore index f3d5a45..6705f1c 100644 --- a/.gitignore +++ b/.gitignore @@ -25,7 +25,7 @@ tmp/**/* *~.nib local.properties .classpath -.settings/ +.settings .loadpath .checkstyle @@ -34,3 +34,7 @@ local.properties # Locally stored "Eclipse launch configurations" *.launch +/build/ + +# Unzipped test connector +src/integrationTest/resources/pubsubplus-connector-kafka*/ diff --git a/.settings/org.eclipse.buildship.core.prefs b/.settings/org.eclipse.buildship.core.prefs deleted file mode 100644 index e889521..0000000 --- a/.settings/org.eclipse.buildship.core.prefs +++ /dev/null @@ -1,2 +0,0 @@ -connection.project.dir= -eclipse.preferences.version=1 diff --git a/.travis.yml b/.travis.yml index c06f6a1..bf18541 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,30 @@ language: java install: true - +sudo: required +services: + - docker jdk: -- openjdk8 + - openjdk8 script: - - ./gradlew clean check jar + - ./gradlew clean integrationTest --tests com.solace.connector.kafka.connect.source.it.SourceConnectorIT + +after_success: +- > + if [ "$TRAVIS_PULL_REQUEST" = "false" ] && [ "$TRAVIS_BRANCH" = "master" ]; then + git config --global user.email "travis@travis-ci.org"; + git config --global user.name "travis-ci"; + mkdir gh-pages; # Now update gh-pages + git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/SolaceProducts/pubsubplus-connector-kafka-source gh-pages > /dev/null 2>&1; + rm gh-pages/downloads/pubsubplus-connector-kafka-source* + mv build/distributions/pubsubplus-connector-kafka-source* gh-pages/downloads + cd gh-pages; + pushd downloads + cp index.template index.html; FILENAME=`find . | grep *.zip | cut -d'/' -f2 | sed 's/.\{4\}$//'`; sed -i "s/CONNECTOR_NAME/$FILENAME/g" index.html; + popd; + git add -f .; + git commit -m "Latest connector distribution on successful travis build $TRAVIS_BUILD_NUMBER auto-pushed to gh-pages"; + git remote add origin-pages https://${GH_TOKEN}@github.com/SolaceProducts/pubsubplus-connector-kafka-source.git > /dev/null 2>&1; + git push --quiet --set-upstream origin-pages gh-pages; + echo "Updated and pushed GH pages!"; + fi diff --git a/NOTES.md b/NOTES.md new file mode 100644 index 0000000..ca22c09 --- /dev/null +++ b/NOTES.md @@ -0,0 +1,3 @@ +- Simple process JMS message types support +TextMessage +BytesMessage diff --git a/README.md b/README.md index 8bfcc91..75b86a5 100644 --- a/README.md +++ b/README.md @@ -1,339 +1,355 @@ -[![Build Status](https://travis-ci.org/SolaceLabs/pubsubplus-connector-kafka-sink.svg?branch=development)](https://travis-ci.org/SolaceLabs/pubsubplus-connector-kafka-sink) +[![Build Status](https://travis-ci.org/SolaceProducts/pubsubplus-connector-kafka-source.svg?branch=master)](https://travis-ci.org/SolaceProducts/pubsubplus-connector-kafka-source) +# PubSub+ Connector Kafka Source +This project provides a Solace PubSub+ Event Broker to Kafka [Source Connector](//kafka.apache.org/documentation.html#connect_concepts) (adapter) that makes use of the [Kafka Connect API](//kafka.apache.org/documentation/#connect). -# PubSub+ Connector Kafka Source v1.0 +**Note**: there is also a PubSub+ Kafka Sink Connector available from the [PubSub+ Connector Kafka Sink](https://github.com/SolaceProducts/pubsubplus-connector-kafka-sink) GitHub repository. -## Synopsis +Contents: -This project provides a Solace/Kafka Source Connector (adapter) that makes use of the Kafka Connect API. -The Solace/Kafka adapter consumes Solace real-time queue or topic data events and streams the Solace events to a Kafka topic. + * [Overview](#overview) + * [Use Cases](#use-cases) + * [Downloads](#downloads) + * [Quick Start](#quick-start) + * [Parameters](#parameters) + * [User Guide](#user-guide) + + [Deployment](#deployment) + + [Troubleshooting](#troubleshooting) + + [Message Processors](#message-processors) + + [Performance Considerations](#performance-considerations) + + [Security Considerations](#security-considerations) + * [Developers Guide](#developers-guide) -The Solace Source Connector was created using Solace's high performance Java API to move Solace data events to the Kafka Broker. -Unlike many other message brokers, Solace supports transparent protocol and API messaging transformations, therefore, -any message that reaches the Solace broker, regardless of the many Solace supported message transports or language/API, can be moved to a Topic -(Key or not Keyed) on the Kafka Broker via the single Solace Source Connector. +## Overview -Consider the following diagram: +The PubSub+ Source Connector consumes PubSub+ event broker real-time queue or topic data events and streams them to a Kafka topic as Source Records. -![Architecture Overview](resources/KSource3.png) +The connector was created using PubSub+ high performance Java API to move PubSub+ data events to the Kafka Broker. -It does not matter if the ingress message to the Solace broker (appliance, software or cloud) is from an iPhone, a REST POST or an AMQP, JMS or MQTT message, it can be sent automatically to the Kafka Topic via the Solace Source Connector. +## Use Cases -The Solace Source Connector also ties Kafka records into the Solace Event Mesh. -The Event Mesh is a clustered group of Solace PubSub+ Brokers that transparently, in real-time, route data events to any Service that is part of the Event Mesh. -Solace PubSub+ Brokers (Appliances, Software and SolaceCloud) are connected to each other as a multi-connected mesh that to individual services -(consumers or producers of data events) appears to be a single Event Broker. Events messages are seamlessly transported within the entire Solace Event -Mesh regardless of where the event is created and where the process exists that has registered interested in consuming the event. -Simply by having the Solace Source Connector register interest in receiving events, the entire Event Mesh becomes aware of the registration request and will know how to securely -route the appropriate events generated by other service on the Event Mesh to the Solace Source Connector. The Solace Source Connector takes those event - messages and sends them as Kafka Source Records to the Kafka broker for storage in a Kafka Topic. +#### Protocol and API Messaging Transformations -The Solace Source Connector allows any event from any service in the Solace Event Mesh to generate a new record in Kafka. -It does not matter which service in the Event Mesh created the event, they are all potentially available for storage in Kafka via - a single Solace Source Connctor.There is no longer a requirement for separate Kakfa Source Connectors to each of the separate services. -The single Solace Source Connector is all that is required. Any event that the connector registers interest in receiving from the Event Mesh is stored in Kafkfa. +Unlike many other message brokers, the Solace PubSub+ Event Broker supports transparent protocol and API messaging transformations. -The Solace Source Connector eliminates the complexity and overhead of maintaining separate Source Connectors for each and every service that generates data events that Kakfa may wish to consume. -There is the added benefit of access to services where there is no Kafka Source Connector available, thereby eliminating the need to create and maintain a new connector for -services from which Kafka may wish to store the data. +As the following diagram shows, any message that reaches the PubSub+ broker via the many supported message transports or language/API (examples can include an iPhone via C API, a REST POST, an AMQP, JMS or MQTT message) can be moved to a Topic (Key or not Keyed) on the Kafka broker via the single PubSub+ Source Connector. -Consider the following: +![Messaging Transformations](/doc/images/KSource.png) -![Event Mesh](resources/EventMesh.png) +#### Tying Kafka into the PubSub+ Event Mesh -A single Solace Source Connector will be able to move service events from any upstream service to Kafka via a single connector. +The [PubSub+ Event Mesh](//docs.solace.com/Solace-PubSub-Platform.htm#PubSub-mesh) is a clustered group of PubSub+ Event Brokers, which appears to individual services (consumers or producers of data events) to be a single transparent event broker. The Event Mesh routes data events in real-time to any of its client services. The Solace PubSub+ brokers can be any of the three categories: dedicated extreme performance hardware appliances, high performance software brokers that are deployed as software images (deployable under most Hypervisors, Cloud IaaS and PaaS layers and in Docker) or provided as a fully-managed Cloud MaaS (Messaging as a Service). -The Solace Source Connector also ties into Solace's location transparency for the Event Mesh PubSub+ brokers. -Solace supports a wide range of brokers for deployment. -There are three major categories of Solace PubSub+ brokers: dedicated extreme performance hardware appliances, -high performance software brokers that are deployed as software images (deployable under most Hypervisors, Cloud IaaS and PaaS layers and in Docker) -and provided as a fully managed Cloud MaaS (Messaging as a Service). +Simply by having the PubSub+ Source Connector register interest in receiving events, the entire Event Mesh becomes aware of the registration request and will know how to securely route the appropriate events generated by other service on the Event Mesh to the PubSub+ Source Connector. The PubSub+ Source Connector takes those event messages and sends them as Kafka Source Records to the Kafka broker for storage in a Kafka Topic, regardless where in the Service Mesh the service is located that generated the event. -It does not matter what Solace Broker is used or where it is deployed, it can become part of the Solace Event Mesh. -Therefore, there are no restrictions on where the Solace Source Connector is deployed or what PubSub+ broker is used to connect Kafka to the Solace Event Bus. -The Solace Event Mesh infrastructure will allow, via the Solace Source Connector, any events in the Event Mesh to be transported and stored in Kafka. +![Messaging Transformations](/doc/images/EventMesh.png) -Consider the following: +#### Eliminating the Need for Separate Source Connectors -![Location Independence](resources/SolaceCloud1.png) +The PubSub+ Source Connector eliminates the complexity and overhead of maintaining separate Source Connectors for each and every upstream service that generates data events that Kafka may wish to consume. There is the added benefit of access to services where there is no Kafka Source Connector available, thereby eliminating the need to create and maintain a new connector for services from which Kafka may wish to store the data. -It does not matter where in the Service Mesh the Service is located that generated the event. -It does not matter if Solace Source Connector was connected to a Solace PubSub+ broker that was an appliance, -on premise or Cloud software, or the the Cloud managed MaaS, it will immediately, in real time, be available to pass that data for Storage in Kafka. +![Messaging Transformations](/doc/images/SingleConnector.png) -It is important to mention that there is also a Solace Sink Connector for Kafka available. The Solace Sink Connector allows consumption of new Kafka Records added to the Kafka brokers to - any service that has registered - interest in consuming any new Kafka Record events from the Service Mesh. Please refer to the Solace Sink Connector GitHub repository for more details. +## Downloads -## Usage +The PubSub+ Kafka Source Connector is available as a ZIP or TAR package from the [downloads](//solaceproducts.github.io/pubsubplus-connector-kafka-source/downloads/) page. -This is a Gradle project that references all the required dependencies. To check the code style and find bugs you can use: +The package includes jar libraries, documentation with license information and sample property files. Download and extract it into a directory that is on the `plugin.path` of your `connect-standalone` or `connect-distributed` properties file. -```ini -./gradlew clean check -``` +## Quick Start -To actually create the Connector Jar file use: +This example demonstrates an end-to-end scenario similar to the [Protocol and API messaging transformations](#protocol-and-api-messaging-transformations) use case, using the WebSocket API to publish a message to the PubSub+ event broker. -```ini -./gradlew clean jar -``` +It builds on the open source [Apache Kafka Quickstart tutorial](https://kafka.apache.org/quickstart) and walks through getting started in a standalone environment for development purposes. For setting up a distributed environment for production purposes, refer to the User Guide section. -## Deployment +**Note**: The steps are similar if using [Confluent Kafka](//www.confluent.io/download/); there may be difference in the root directory where the Kafka binaries (`bin`) and properties (`etc/kafka`) are located. -The Solace Source Connector has been tested in three environments: Apache Kafka, Confluent Kafka and the AWS Confluent Platform. -For testing, it is recommended to use the single node deployment of Apache or Confluent Kafka software. +**Steps** -To deploy the Connector, as described in the Kafka documentation, it is necessary to move the Connector jar file and the required third party jar files to a directory that is part of the Worker-defined classpath. Details for installing the Solace Source Connector are described in the next two sub sections. +1. Install Kafka. Follow the [Apache tutorial](//kafka.apache.org/quickstart#quickstart_download) to download the Kafka release code, start the Zookeeper and Kafka servers in separate command line sessions, then create a topic named `test` and verify it exists. -#### Apache Kafka +2. Install PubSub+ Source Connector. Designate and create a directory for the PubSub+ Source Connector - assuming it is named `connectors`. Edit `config/connect-standalone.properties` and ensure the `plugin.path` parameter value includes the absolute path of the `connectors` directory. +[Download]( https://solaceproducts.github.io/pubsubplus-connector-kafka-source/downloads ) and extract the PubSub+ Source Connector into the `connectors` directory. -For Apache Kafka, the software is typically found, for example for the 2.11 version, under the root directory: "/opt/kafka-apache/"kafka_2.11-1.1.0". Typically the Solace Source Connector would be placed under the "libs" directory under the root directory. All required Solace JCSMP JAR files should be placed under the same "libs" directory. The properties file for the connector would typically be placed under the "config" directory below the root directory. +3. Acquire access to a PubSub+ message broker. If you don't already have one available, the easiest option is to get a free-tier service in a few minutes in [PubSub+ Cloud](//solace.com/try-it-now/) , following the instructions in [Creating Your First Messaging Service](https://docs.solace.com/Solace-Cloud/ggs_signup.htm). -To start the connector in stand-alone mode while in the "bin" directory the command would be similar to: +4. Configure the PubSub+ Source Connector: -```ini -./connect-standalone.sh ../config/connect-standalone.properties ../config/solaceSource.properties -``` + a) Locate the following connection information of your messaging service for the "Solace Java API" (this is what the connector is using inside): + * Username + * Password + * Message VPN + * one of the Host URIs + + b) edit the PubSub+ Source Connector properties file located at `connectors/pubsubplus-connector-kafka-source-/etc/solace_source.properties` updating following respective parameters so the connector can access the PubSub+ event broker: + * `sol.username` + * `sol.password` + * `sol.vpn_name` + * `sol.host` + + **Note**: In the configured source and destination information, the `sol.topics` parameter specifies the ingress topic on PubSub+ (`sourcetest`) and `kafka.topic` is the Kafka destination topic (`test`), created in Step 1. + +5. Start the connector in standalone mode. In a command line session run: + ```sh + bin/connect-standalone.sh \ + config/connect-standalone.properties \ + connectors/pubsubplus-connector-kafka-source-/etc/solace_source.properties + ``` + After startup, the logs will eventually contain following line: + ``` + ================Session is Connected + ``` + +6. Start to watch messages arriving to Kafka. See the instructions in the Kafka [tutorial](//kafka.apache.org/quickstart#quickstart_consume) to start a consumer on the `test` topic. + +7. Demo time! To generate an event into PubSub+, we use the "Try Me!" test service of the browser-based administration console to publish test messages to the `sourcetest` topic. Behind the scenes, "Try Me!" uses the JavaScript WebSocket API. + + * If you are using PubSub+ Cloud for your messaging service follow the instructions in [Trying Out Your Messaging Service](//docs.solace.com/Solace-Cloud/ggs_tryme.htm). + + * If you are using an existing event broker, log in to its [PubSub+ Manager admin console](//docs.solace.com/Solace-PubSub-Manager/PubSub-Manager-Overview.htm#mc-main-content) and follow the instructions in [How to Send and Receive Test Messages](//docs.solace.com/Solace-PubSub-Manager/PubSub-Manager-Overview.htm#Test-Messages). + + In both cases, ensure to set the topic to `sourcetest`, which the connector is listening to. + + The Kafka consumer from Step 6 should now display the new message arriving to Kafka through the PubSub+ Kafka Source Connector: + ``` + Hello world! + ``` + +## Parameters + +The Connector parameters consist of [Kafka-defined parameters](https://kafka.apache.org/documentation/#connect_configuring) and PubSub+ connector-specific parameters. + +Refer to the in-line documentation of the [sample PubSub+ Kafka Source Connector properties file](/etc/solace_source.properties) and additional information in the [Configuration](#Configuration) section. + +## User Guide + +### Deployment -In this case "solaceSource.properties" is the configuration file that you created to define the connectors behavior. -Please refer to the sample included in this project. +The PubSub+ Source Connector deployment has been tested on Apache Kafka 2.4 and Confluent Kafka 5.4 platforms. The Kafka software is typically placed under the root directory: `/opt//`. -When the connector starts in stand-alone mode, all output goes to the console. If there are errors they should be visible on the console. -If you do not want the output to console, simply add the "-daemon" option and all output will be directed to the logs directory. +Kafka distributions may be available as install bundles, Docker images, Kubernetes deployments, etc. They all support Kafka Connect which includes the scripts, tools and sample properties for Kafka connectors. -#### Confluent Kafka +Kafka provides two options for connector deployment: [standalone mode and distributed mode](//kafka.apache.org/documentation/#connect_running). -The Confluent Kakfa software is typically placed under the root directory: "/opt/confluent/confluent-4.1.1". In this case it is for the 4.1.1 version of Confluent. -By default, the Confluent software is started in distributed mode with the REST Gateway started. +* In standalone mode, recommended for development or testing only, configuration is provided together in the Kafka `connect-standalone.properties` and in the PubSub+ Source Connector `solace_source.properties` files and passed to the `connect-standalone` Kafka shell script running on a single worker node (machine), as seen in the [Quick Start](#quick-start). -The Solace Source Connector would typically be placed in the "/opt/confluent/confluent-4.1.1/share/java/kafka-connect-solace" directory. -You will need to create the "kafka-connect-solace" directory. -You must place all the required Solace JCSMP JAR files under this same directory. -If you plan to run the Source Connector in stand-alone mode, it is suggested to place the properties file under the same directory. +* In distributed mode, Kafka configuration is provided in `connect-distributed.properties` and passed to the `connect-distributed` Kafka shell script, which is started on each worker node. The `group.id` parameter identifies worker nodes belonging the same group. The script starts a REST server on each worker node and PubSub+ Source Connector configuration is passed to any one of the worker nodes in the group through REST requests in JSON format. -After the Solace files are installed and if you are familiar with Kakfa administration, it is recommended to restart the Confluent Connect software if Confluent is running in Distributed mode. Alternatively, it is simpler to just start and restart the Confluent software with the "confluent" command. +To deploy the Connector, for each target machine, [download]( https://solaceproducts.github.io/pubsubplus-connector-kafka-source/downloads ) and extract the PubSub+ Source Connector into a directory and ensure the `plugin.path` parameter value in the `connect-*.properties` includes the absolute path to that directory. Note that Kafka Connect, i.e., the `connect-standalone` or `connect-distributed` Kafka shell scripts, must be restarted (or equivalent action from a Kafka console is required) if the PubSub+ Source Connector deployment is updated. -At this point you can test to confirm the Solace Source Connector is available for use in distributed mode with the command: +Some PubSub+ Source Connector configurations may require the deployment of additional specific files like keystores, truststores, Kerberos config files, etc. It does not matter where these additional files are located, but they must be available on all Kafka Connect Cluster nodes and placed in the same location on all the nodes because they are referenced by absolute location and configured only once through one REST request for all. + +#### REST JSON Configuration + +First test to confirm the PubSub+ Source Connector is available for use in distributed mode with the command: ```ini curl http://18.218.82.209:8083/connector-plugins | jq ``` -In this case the IP address is one of the nodes running the Distributed mode Worker process. If the Connector is loaded correctly, you should see something similar to: +In this case the IP address is one of the nodes running the distributed mode worker process, the port defaults to 8083 or as specified in the `rest.port` property in `connect-distributed.properties`. If the connector is loaded correctly, you should see a response similar to: -![Connector List](resources/RESTConnectorListSmall.png) +``` + { + "class": "com.solace.connector.kafka.connect.source.SolaceSourceConnector", + "type": "source", + "version": "2.0.0" + }, +``` At this point, it is now possible to start the connector in distributed mode with a command similar to: ```ini -curl -X POST -H "Content-Type: application/json" -d @solace_source_properties.json http://18.218.82.209:8083/connectors +curl -X POST -H "Content-Type: application/json" \ + -d @solace_source_properties.json \ + http://18.218.82.209:8083/connectors ``` -Again, the IP address is one of the nodes running the Distributed mode Worker process. The connector's JSON configuration file, in this case, -is called "solace_source_properties.json". - -You can determine if the Source Connector is running with the following command: +The connector's JSON configuration file, in this case, is called `solace_source_properties.json`. A sample is available [here](/etc/solace_source_properties.json), which can be extended with the same properties as described in the [Parameters section](#parameters). +Determine if the Source Connector is running with the following command: ```ini curl 18.218.82.209:8083/connectors/solaceSourceConnector/status | jq ``` +If there was an error in starting, the details are returned with this command. -If there was an error in starting, the details will be returned with this command. +### Troubleshooting -## Configuration +In standalone mode, the connect logs are written to the console. If you do not want to send the output to the console, simply add the "-daemon" option to have all output directed to the logs directory. -The Solace Source Connector configuration is managed by the configuration file. For stand-alone Kafka deployments a properties file is used. A sample is enclosed with the project. +In distributed mode, the logs location is determined by the `connect-log4j.properties` located at the `config` directory in the Apache Kafka distribution or under `etc/kafka/` in the Confluent distribution. -For distributed Kafka deployments the connector can be deployed via REST as a JSON configuration file. A sample is enclosed with the project. +If logs are redirected to the standard output, here is a sample log4j.properties snippet to direct them to a file: +``` +log4j.rootLogger=INFO, file +log4j.appender.file=org.apache.log4j.RollingFileAppender +log4j.appender.file.File=/var/log/kafka/connect.log +log4j.appender.file.layout=org.apache.log4j.PatternLayout +log4j.appender.file.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +log4j.appender.file.MaxFileSize=10MB +log4j.appender.file.MaxBackupIndex=5 +log4j.appender.file.append=true +``` -The Solace configuration of the connector's Solace Session, Transport and Security properties are all available and defined in the **SolaceSourceConstants.java** file. These are the -equivalent to the details for the Solace **JCSMPSessionProperties** class. Details and documentation for this JCSMPProperies class can be found here: +To troubleshoot PubSub+ connection issues, increase logging level to DEBUG by adding following line: +``` +log4j.logger.com.solacesystems.jcsmp=DEBUG +``` +Ensure that you set it back to INFO or WARN for production. -[Solace Java API](https://docs.solace.com/API-Developer-Online-Ref-Documentation/java/index.html) +### Event Processing -For tuning, performance and scaling (multiple tasks is supported with this connector) of the Solace Source Connector, please refer to the Solace PubSub+ documentation that can be found here: +#### Message Processors -[Solace PubSub+ Documentation](https://docs.solace.com/) +There are many ways to map PubSub+ messages to Kafka topics, partitions, keys, and values, depending on the application behind the events. -There is a bare minimum requirement to configure access to the Solace PubSub+ broker. - A username, their password and VPN (Solace Virtual Private Network - a "virtual broker" used in Solace multi-tenancy configurations) and host - reference are mandatory configuration details. An example of the required configuration file entries is as follows: +The PubSub+ Source Connector comes with two sample message processors that can be used as-is, or as a starting point to develop a customized message processor. -```ini -sol.username=user1 -sol.password=password1 -sol.vpn_name=kafkavpn -sol.host=160.101.136.33 +* **SimpleMessageProcessor**: Takes the PubSub+ message as a binary payload and creates a Kafka Source record with a Binary Schema for the value (from the PubSub+ message payload). +* **SampleKeyedMessageProcessor**: A more complex sample that allows the flexibility of changing the source record Key Schema and which value from the PubSub+ message to use as a key. The option of no key in the record is also possible. + +The desired message processor is loaded at runtime based on the configuration of the JSON or properties configuration file, for example: +``` +sol.message_processor_class=com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor ``` -If you have just installed a Solace PubSub+ broker and you are not that familiar with Solace administration, -you can test your Source Connector by using "default" as value for the username, password and VPN name. -The host should match the IP address of the broker. +It is possible to create more custom message processors based on your Kafka record requirements for keying and/or value serialization and the desired format of the PubSub+ event message. Simply add the new message processor classes to the project. The desired message processor is installed at run time based on the configuration file. -For connectivity to Kafka, the Source Connector has four basic configuration requirements: name for the Connector Plugin, the name of the Java Class -for the connector, the number of Tasks the connector should deploy and the name of the Kakfa Topic. The following is an example for the Solace Source Connector: +Refer to the [Developers Guide](#developers-guide) for more information about building the Source Connector and extending message processors. -```ini -name=solaceConnector -connector.class=com.solace.source.connector.SolaceSourceConnector -tasks.max=2 -kafka.topic=solacetest -``` +#### Message Replay -A more details example is included with this project. This project also includes a JSON configuration file. +By default, the Source Connector will process live events from the PubSub+ event broker. If replay of past recorded events is required for later consumption, it also is possible to use the PubSub+ [Message Replay](//docs.solace.com/Configuring-and-Managing/Message-Replay.htm) feature, initiated through [PubSub+ management](//docs.solace.com/Solace-PubSub-Manager/PubSub-Manager-Overview.htm). -### Security Considerations +### Performance and Reliability Considerations -The Source Connector supports both PKI and Kerberos for more secure authentication beyond the simple user name/password. The PKI/TLS support is well document in -the Solace literature, and will not be repeated here. All the PKI required configuration parameters are part of the configuration variable for the Solace session and transport as referenced above in the Configuration Section. Sample parameters are found in the included properties and JSON configuration files. +#### Ingesting from PubSub+ Topics -Kerberos authentication support is also available. It requires a bit more configuration than PKI since it is not defined as part of the Solace session or transport. Typical Kerberos client applications require details about the Kerberos configuration and details for the authentication. Since the Source Connector is a server application (i.e. no direct user interaction) a Kerberos keytab file is required as part of the authentication. +The event broker uses a "best effort" approach to deliver the events from PubSub+ Topics to the Source Connector. If the connector is down, or messages are constantly generated at a rate faster than can be written to Kafka, there is a potential for data loss. When Kafka is configured for its highest throughput, it is susceptible to loss and, obviously, the connector cannot add records if the Kafka broker is unavailable. -The enclosed configuration files are samples that will allow automatic Kerberos authentication for the Source Connector when it is deployed to the Connect Cluster. The sample files included are -the "krb5.conf" and "login.conf". It does not matter where the files are located, but they must be available on all Kafka Connect CLuster nodes and placed in the same location on all the nodes. The files are then referenced in the connector configuration files, for example: +When a Kafka Topic is configured for high throughput the use of topics to receive data event messages is acceptable and recommended. -```ini -sol.kerberos.login.conf=/opt/kerberos/login.conf -sol.kerberos.krb5.conf=/opt/kerberos/krb5.conf -``` +The connector can ingest using a list of topic subscriptions, where each can be a [wild-card](//docs.solace.com/PubSub-Basics/Wildcard-Charaters-Topic-Subs.htm) subscription. -There is also one other important configuration file entry that is required to tell the Solace connector to use Kerberos Authentication, which is also part of the Solace parameters mentioned in the Configuration Section of this document. The properties is: +#### Ingesting from PubSub+ Queues -```ini -sol.authentication_scheme=AUTHENTICATION_SCHEME_GSS_KRB -``` +It is also possible to have the PubSub+ Source Connector attract data events from a PubSub+ Queue. A queue guarantees order of delivery, provides High Availability and Disaster Recovery (depending on the setup of the PubSub+ brokers) and provides an acknowledgment to the message producer (in this case the PubSub+ event producer application) when the event is stored in all HA and DR members and flushed to disk. This is a higher guarantee than is provided by Kafka even for Kafka idempotent delivery. -Sample configuration files that include the rquires Kerberos parameters are also included with this project: +When a Kafka Topic is configured with its highest quality-of-service with respect to record loss or duplication, it results in a large reduction in record processing throughput. However, in some application requirements this QoS is required. In this case, the PubSub+ Source Connector should use Queues for the consumption of events from the Event Mesh. -```ini -solace_source_krb5Pporoperties.json -solace_kerberos.properties -``` +Note that one connector can ingest from only one queue. -Kerberos has some very specific requirements to operate correctly. If these are also not configured, the Kerberos Authentication will not operate correctly: -* DNS must be operating correctly both in the Kafka brokers and on the Solace PS+ broker. -* Time services are recommended for use with the Kafka Cluster nodes and the Solace PS+ broker. If there is too much drift in the time between the nodes Kerberos will fail. -* You must use the DNS name in the Solace PS+ host URI in the Connector configuration file and not the IP address -* You must use the full Kerberos user name (including the Realm) in the configuration property, obviously no password is required. +##### Recovery from Kafka Connect API or Kafka Broker Failure -The secuirty setup and operation between he PS+ broker and the Source Connector and Kafka and the Source Connector operate completely independently. -The security setup between the Source Connector and the Kafka Brokers is controlled by the Kafka Connect Libraries. These are exposed in the configuration file as parameters based on the Kafka-documented parameters and configuration. Please refer to the Kafka documentation for details on securing the Connector to the Kafka brokers for both PKI/TLS and Kerberos. +When the connector is consuming from a PubSub+ queue, a timed Kafka Connect process commits the source records and offset to disk on the Kafka broker and calls the connector to acknowledge the messages that were processed so far, which removes these event messages from the event broker queue. -#### Solace Message Processor +If the Kafka Connect API or the Kafka broker goes down, unacknowledged messages are not lost; they will be retransmitted as soon as the Connect API or Kafka broker is restarted. It is important to note that while the Connect API or the Kafka Broker are off-line, the PubSub+ queue continues to add event messages, so there is no loss of new data from the PubSub+ Event Mesh. -The processing of the Solace message to create a Kafka Source Record is handled by an interface definition defined in `SolaceMessageProcessor.java`. This is a simple interface that is used to create the Kafka Source Records from the Solace messages. There are two examples included of classes that implement this interface: +The commit time interval is configurable via the `offset.flush.interval.ms` parameter (default 60,000 ms) in the worker's `connect-distributed.properties` configuration file. If high message rate is expected the parameter shall be tuned, taking into consideration that each task (in case of [Multiple Workers](#multiple-workers)) shall not allow excessively large (for example, 10,000 or more) amount of unacknowledged messages. -* **SolSimpleMessageProcessor.java** - which takes the Solace message as a binary payload and creates a Kafka Source record with a Binary Schema for the value (Solace message payload) and a Binary Schema for the record key. -* **SolaceSampleKeyedMessageProcessor** - A more complex sample that allows the flexibility of changing the Source Record Key Schema and which value from the Solace message to use as a key. The option of no key in the record is also possible. +##### Queue Handling of Data Bursts -The desired message processor is loaded at runtime based on the configuration of the JSON or properties configuration file, for example: +If the throughput through the Connect API is not high enough, and messages are starting to accumulate in the PubSub+ Queue, scaling of the Connector is recommended as discussed [below](#multiple-workers). -`sol.message_processor_class=com.solace.source.connector.msgprocessors.SolSampleSimpleMessageProcessor` +If the Source Connector has not been scaled to a sufficient level to deal with bursts, the Queue can act as a "shock absorber" to deal with micro-bursts or sustained periods of heavy event generation in the Event Mesh so that data events are no longer lost due to an unforeseen event. -It is possible to create more custom Message Processors based on you Kafka record requirements for keying and/or value serialization and the desired format of the Solace event message. Simply add the new message processor classes to the project. The desired message processor is installed at run time based on the configuration file. +#### Ingesting From a Queue Configured with Topic-To-Queue Mapping -More information on Kakfa Connect can be found here: +The Topic-to-Queue Mapping is the simple process of configuring a PubSub+ Queue to attract PubSub+ Topic data event. These data events are immediately available via the queue with a protection against record loss and with the "shock absorber" advantage that use of a Queue provides. -[Apache Kafka Connect](https://kafka.apache.org/documentation/) +Topic-to-Queue Mapping, just like any PubSub+ topic subscriptions, allows wild-card subscriptions to multiple topics. -[Confluent Kafka Connect](https://docs.confluent.io/current/connect/index.html) +#### Multiple Workers +The PubSub+ broker supports far greater throughput than can be afforded through a single instance of the Connect API. The Kafka Broker can also process records at a rate far greater than available through a single instance of the Connector. +Therefore, multiple instances of the Source Connector can increase throughput from the Kafka broker to the Solace PubSub+ broker. -#### Scaling the Source Connector +You can deploy and automatically and spread multiple connector tasks across all available Connect API workers simply by indicating the number of desired tasks in the connector configuration file. -The Solace Source Connector will scale when more performance is required. There is only so much throughput that can be pushed through the -Connect API. The Solace broker supports far greater throughput than can be afforted through a single instance of the Connect API. -The Kafka Broker can also produce records at a rate far greater than available through a single instance of the Connector. -Therefore, multiple instances of the Source Connector will increase throughput from the Kafka broker to the Solace PubSub+ broker. +PubSub+ queue or topic subscriptions must be configured properly to support distributed consumption, so events are automatically load balanced between the multiple workers: -Multiple Connector tasks are automatically deployed and spread across all available Connect Workers simply by indicating the number of desired -tasks in the connector configuration file. +* If the ingestion source is a Queue, it must be configured as [non-exclusive](//docs.solace.com/PubSub-Basics/Endpoints.htm#Queue_Access_Types), which permits multiple consumers to receive messages in a round-robin fashion. -When the Source Connector is consuming from Kafka and the event records are expected to be placed in to a Solace Queue, -there are no special requirements for the Queue definition. As more instance of the connector are defined in the configuration, -the Solace broker will automatically load balance the consumption of messages from the Solace Queue to the multiple Source Connectors. +* By the nature of Topics, if there are multiple subscribers to a topic, +all subscribers receive all of the same topic data event messages. Load balancing can be achieved by applying [Shared Subscriptions](//docs.solace.com/PubSub-Basics/Direct-Messages.htm#Shared), which ensures that messages are delivered to only one active subscriber at a time. -If the Source Connector is interested in consuming messages there are two options for scaling the connector with respect to the consumption of events -form Solace: -Topic-to-Queue Bridging and Solace Deliver-to-One processing. +Note that Shared Subscriptions may need to be [administratively enabled](//docs.solace.com/Configuring-and-Managing/Configuring-Client-Profiles.htm#Allowing-Shared-Subs) in the event broker Client Profile. Also note that Shared Subscriptions are not available on older versions of the event broker. The deprecated [DTO (Deliver-To-One) feature](//docs.solace.com/Configuring-and-Managing/DTO.htm) can be used instead. -The Topic-to-Queue bridging is the simple process of configuring a Solace Queue to attract Solace Topic data event. -These data events will immediately be available via the Solace Queue and the setup for scaling is as described above for queues. +### Security Considerations -Solace also supports Deliver-to-One processing for Solace Topic messages. By the nature of Topics, if there are multiple consumers on a Topic, -all consumers will receive all of the same topic data event messages. Solace provides scaling of Topic data using Deliver-to-One (DTO). -Kafka provides scaling similar to DTO for consuming Kafka Topic data using -Consumer Groups. +The security setup and operation between the PubSub+ broker and the Source Connector and Kafka broker and the Source Connector operate completely independently. + +The Source Connector supports both PKI and Kerberos for more secure authentication beyond the simple user name/password, when connecting to the PubSub+ event broker. +The security setup between the Source Connector and the Kafka brokers is controlled by the Kafka Connect libraries. These are exposed in the configuration file as parameters based on the Kafka-documented parameters and configuration. Please refer to the [Kafka documentation](//docs.confluent.io/current/connect/security.html) for details on securing the Source Connector to the Kafka brokers for both PKI/TLS and Kerberos. -For the Source Connector to receive DTO messages, DTO override must be disabled for the connector. This is accomplished by adding the following line to the properties or JSON configuration file: +#### PKI/TLS + +The PKI/TLS support is well documented in the [Solace Documentation](//docs.solace.com/Configuring-and-Managing/TLS-SSL-Service-Connections.htm), and will not be repeated here. All the PKI required configuration parameters are part of the configuration variable for the Solace session and transport as referenced above in the [Parameters section](#parameters). Sample parameters are found in the included [properties file](/etc/solace_source.properties). + +#### Kerberos Authentication + +Kerberos authentication support requires a bit more configuration than PKI since it is not defined as part of the Solace session or transport. + +Typical Kerberos client applications require details about the Kerberos configuration and details for the authentication. Since the Source Connector is a server application (i.e. no direct user interaction) a Kerberos _keytab_ file is required as part of the authentication, on each Kafka Connect Cluster worker node where the connector is deployed. + +The enclosed [krb5.conf](/etc/krb5.conf) and [login.conf](/etc/login.conf) configuration files are samples that allow automatic Kerberos authentication for the Source Connector when it is deployed to the Connect Cluster. Together with the _keytab_ file, they must be also available on all Kafka Connect cluster nodes and placed in the same (any) location on all the nodes. The files are then referenced in the Source Connector properties, for example: +```ini +sol.kerberos.login.conf=/opt/kerberos/login.conf +sol.kerberos.krb5.conf=/opt/kerberos/krb5.conf +``` +The following property entry is also required to specify Kerberos Authentication: ```ini -sol.subscriber_dto_override=false +sol.authentication_scheme=AUTHENTICATION_SCHEME_GSS_KRB ``` -This means that the Source Connector will provide load balancing of Solace Topic messages. -There is no practical scaling limitation with DTO; you can create as many DTO consumers as you like. More instances of the Source Connector -will automatically be load balanced for consumption from the Solace Topic(s). +Kerberos has some very specific requirements to operate correctly. Some additional tips are as follows: +* DNS must be operating correctly both in the Kafka brokers and on the Solace PS+ broker. +* Time services are recommended for use with the Kafka Cluster nodes and the Solace PS+ broker. If there is too much drift in the time between the nodes, Kerberos will fail. +* You must use the DNS name and not the IP address in the Solace PS+ host URI in the Connector configuration file +* You must use the full Kerberos user name (including the Realm) in the configuration property; obviously, no password is required. -For DTO processing, it is important to note that the message producer must also send messages that are tagged for DTO processing. -If the Source Connector is consuming on a topic that the producer did not send DTO-tagged messages, the connector will simple act as if -DTO override is enabled and write duplicate messages into the Kafka Topic. +## Developers Guide -If the Solace producers are sending on some Solace Topics with DTO tagged messages and some other Solace topics without DTO, -then it will require two deployments of the connector if scaling is required. -One connector will include the topics where DTO is used by the message producers. -A second connector will use a queue and the non-DTO producer topics will be bridged to the referenced queue. - -For more detail on DTO and Topic-to-Queue bridging is available in the Solace documentation: +### Build and Test the Project -[Solace DTO](https://docs.solace.com/Features/Direct-Messages.htm?Highlight=deliver%20to%20one#Load-Balance-DTO) +JDK 8 or higher is required for this project. -It is also important to note that the Solace Sink Connector can be configured to send DTO flagged messages. Therefore, when new record are placed into -Kakfa, the Solace Sink Connector can be used to generate DTO tagged messages. +First, clone this GitHub repo: +``` +git clone https://github.com/SolaceProducts/pubsubplus-connector-kafka-source.git +cd pubsubplus-connector-kafka-source +``` -#### Sending Solace Events to Kafka +Then run the build script: +``` +gradlew clean build +``` -The Solace Source Connector consumes registered Solace Data Even Messages and writes them to Kafka as Source Records. The connector can register -interest in receiving Solace Queue or Topic message events. +This script creates artifacts in the `build` directory, including the deployable packaged PubSub+ Source Connector archives under `build\distributions`. -When using Topics, Solace uses "best effort" to deliver the events to the Sink Connector. If the Connector is down, or messages are constantly generated - at a rate fast than can be written to Kafka, there will be potential for data loss. If Kafka is configured for it's highest throughput, it also - is susceptible for loss and obviously, cannot add records if the Kafka broker is unavailable. Therefore, when the Kafka Topic is configured for high throuput - the use of Solace Topics to receive the data event messages is acceptable and recommended. - - As mentioned above, the Connect API may become the limiting factor for processing of the events. - To prevent potential data loss thorough the Connector when using topics, it may be necessary to scale the number of connector task instances. - -It is also possible to have the Solace Source Connector register interest in the Service Mesh to attract data events from Solace Queues -. A Solace Queue guarantees order of deliver, provides High Availability and Disaster Recovery (depending on the setup of the PubSub+ brokers) - and provides an acknowledgment to the message producer (in this case the Solace event producer application) when the event is stored in all - HA and DR members and flushed to disk. This is a higher guarantee than is provided by Kakfa even for Kafka idempotent delivery. - -When A Kafka Topic is configured with it's highest quality-of-service, with respect to record loss or duplication, it results in a large reduction in record processing throughput. -However, in some application requirements this QoS is required. In this case, the Solace Source Connector should use Solace Queues for the consumption -of events from the Event Mesh. - -The Solace Source Connector consumes messages from the queue and streams the records to the Kafka Topic. A timed process (which is configurable in the - in the Worker's configuration file), flushes the records and offset to to disk. The Solace Connector will consume/process the messages from the queue - and when 500 messages are processed or the current connector "poll()" method completes, it will force the Kafka to flush records and the offset. The poll() method will also acknowledge all the Solace Queue data event messages that were committed to the Kafka Topic. Acknowledging the Solace Queue messages that were processed - removes these event messages from the Solace Queue. - - If the Connector or Kafka fail before the timed or forced commit, the Solace messages are - not lost, they will be retransmitted as soon as the connector or Kafka are restarted. It is important to note that while connector or the - Kafka Broker are offline, the Solace Queue will continue to add event messages, so there will be no loss of new data from the Solace Event Mesh. - -When the Kafka consumers require the highest level of QoS, it is recommended to use the Solace Source Connector against Solace Queues. If the throughput -through the Connect is not high enough, and messages are starting to accumulate in the Solace Queue, scaling of the Connector is recommended -as discussed above. If the Source Connector has not been scaled to a required level to deal with bursts the Solace Queue can act as a "shock absorber" -to deal with micro-bursts or sustained periods of heavy event generation in the Event Mesh. Data events will no be lost of the connector is under-scaled due to -an unforeseen burst event. +An integration test suite is also included, which spins up a Docker-based deployment environment that includes a PubSub+ event broker, Zookeeper, Kafka broker, Kafka Connect. It deploys the connector to Kafka Connect and runs end-to-end tests. +``` +gradlew clean integrationTest --tests com.solace.connector.kafka.connect.source.it.SourceConnectorIT +``` -## Additional Information +### Build a New Message Processor + +The processing of the Solace message to create a Kafka source record is handled by an interface defined in [`SolaceMessageProcessorIF.java`](/src/main/java/com/solace/connector/kafka/connect/source/SolMessageProcessorIF.java). This is a simple interface that creates the Kafka source records from the PubSub+ messages. This project includes two examples of classes that implement this interface: -For additional information, use cases and explanatory videos, please visit the [Solace/Kafka Integration Guide](https://docs.solace.com/Developer-Tools/Integration-Guides/Kafka-Connect.htm). +* [SolSampleSimpleMessageProcessor](/src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolSampleSimpleMessageProcessor.java) +* [SolaceSampleKeyedMessageProcessor](/src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolaceSampleKeyedMessageProcessor.java) + +You can use these examples as starting points for implementing your own custom message processors. + +More information on Kafka source connector development can be found here: +- [Apache Kafka Connect](https://kafka.apache.org/documentation/) +- [Confluent Kafka Connect](https://docs.confluent.io/current/connect/index.html) + +## Additional Information +For additional information, use cases and explanatory videos, please visit the [PubSub+/Kafka Integration Guide](https://docs.solace.com/Developer-Tools/Integration-Guides/Kafka-Connect.htm). ## Contributing @@ -345,11 +361,11 @@ See the list of [contributors](../../graphs/contributors) who participated in th ## License -This project is licensed under the Apache License, Version 2.0. - See the [LICENSE](LICENSE) file for details. +This project is licensed under the Apache License, Version 2.0. See the [LICENSE](LICENSE) file for details. ## Resources -For more information about Solace technology in general please visit these resources: +For more information about Solace technology in general, please visit these resources: - The [Solace Developers website](https://www.solace.dev/) - Understanding [Solace technology]( https://solace.com/products/tech/) diff --git a/build.gradle b/build.gradle index 4b4250c..77aaf0e 100644 --- a/build.gradle +++ b/build.gradle @@ -1,110 +1,87 @@ -/* - * This build file was generated by the Gradle 'init' task. - * - * This generated file contains a sample Java Library project to get you started. - * For more details take a look at the Java Libraries chapter in the Gradle - * user guapply plugin: e available at https://docs.gradle.org/3.5/userguapply plugin: e/java_library_plugin.html - */ - -// Apply the java-library plugin to add support for Java Library -apply plugin: 'java-library' apply plugin: 'java' -apply plugin: 'checkstyle' -apply plugin: 'findbugs' -apply plugin: 'pmd' -apply plugin: 'jacoco' -//apply plugin: 'net.researchgate.release' version '2.4.0' -//apply plugin: "com.jfrog.bintray" version '1.7' -apply plugin: 'maven' -apply plugin: 'maven-publish' +apply plugin: 'distribution' +apply plugin: 'org.unbroken-dome.test-sets' ext { - //kafkaVersion = '0.10.0.0' - //kafkaVersion = '0.11.0.0' - //kafkaVersion = '1.1.0' - kafkaVersion = '2.0.0' + kafkaVersion = '2.4.1' + solaceJavaAPIVersion = '10.6.0' } -// In this section you declare where to find the dependencies of your project repositories { - // Use jcenter for resolving your dependencies. - // You can declare any Maven/Ivy/file repository here. - jcenter() + mavenLocal() + mavenCentral() } -dependencies { - // This dependency is exported to consumers, that is to say found on their compile classpath. - api 'org.apache.commons:commons-math3:3.6.1' - - // This dependency is used internally, and not exposed to consumers on their own compile classpath. - implementation 'com.google.guava:guava:21.0' - - // Use JUnit test framework - testImplementation 'junit:junit:4.12' +buildscript { + repositories { + maven { + url "https://plugins.gradle.org/m2/" + } + } + dependencies { + classpath "com.github.spotbugs:spotbugs-gradle-plugin:3.0.0" + classpath "org.unbroken-dome.test-sets:org.unbroken-dome.test-sets.gradle.plugin:2.2.1" + } } -// In this section you declare where to find the dependencies of your project -repositories { - // Use jcenter for resolving your dependencies. - // You can declare any Maven/Ivy/file repository here. - jcenter() - mavenCentral() - - maven { url "https://mvnrepository.com/artifact/com.solacesystems/sol-jcsmp" } - // https://mvnrepository.com/artifact/com.solacesystems/sol-jcsmp - - +testSets { + integrationTest } dependencies { - // This dependency is exported to consumers, that is to say found on their compile classpath. - //api 'org.apache.commons:commons-math3:3.6.1' - - // This dependency is used internally, and not exposed to consumers on their own compile classpath. - implementation 'com.google.guava:guava:21.0' - - // Use JUnit test framework - testImplementation 'junit:junit:4.12' - - testCompile group: 'junit', name: 'junit', version: '4.12' + integrationTestImplementation 'junit:junit:4.12' + integrationTestImplementation 'org.junit.jupiter:junit-jupiter-api:5.5.2' + integrationTestImplementation 'org.junit.jupiter:junit-jupiter-engine:5.5.2' + integrationTestImplementation 'org.junit.jupiter:junit-jupiter-params:5.5.2' + integrationTestImplementation 'org.junit.platform:junit-platform-engine:1.5.2' + integrationTestImplementation 'org.mockito:mockito-core:3.2.4' + integrationTestImplementation 'org.mockito:mockito-junit-jupiter:3.2.4' + integrationTestImplementation 'org.testcontainers:testcontainers:1.12.4' + integrationTestImplementation 'org.testcontainers:junit-jupiter:1.12.4' + integrationTestImplementation 'org.slf4j:slf4j-api:1.7.28' + integrationTestImplementation 'org.slf4j:slf4j-simple:1.7.28' + integrationTestImplementation 'org.apache.commons:commons-configuration2:2.6' + integrationTestImplementation 'commons-beanutils:commons-beanutils:1.9.4' + integrationTestImplementation 'com.google.code.gson:gson:2.3.1' + integrationTestImplementation 'commons-io:commons-io:2.4' + integrationTestImplementation 'com.squareup.okhttp3:okhttp:4.4.0' + integrationTestImplementation 'org.apache.kafka:kafka-clients:$kafkaVersion' compile "org.apache.kafka:connect-api:$kafkaVersion" - compile 'org.eclipse.paho:org.eclipse.paho.client.mqttv3:1.0.2' - compile 'org.bouncycastle:bcprov-jdk15on:1.54' - compile 'org.bouncycastle:bcpkix-jdk15on:1.54' - compile 'org.bouncycastle:bcpg-jdk15on:1.54' - compile 'commons-io:commons-io:2.4' - compile 'org.slf4j:slf4j-api:1.7.14' - testCompile 'org.slf4j:slf4j-simple:1.7.14' - compile group: 'com.solacesystems', name: 'sol-jcsmp', version: '10.4.0' - //compile 'com.puppycrawl.tools:checkstyle:8.12' + compile "com.solacesystems:sol-jcsmp:$solaceJavaAPIVersion" } -tasks.withType(FindBugs) { - reports { - xml.enabled = true - html.enabled = false +task('prepDistForIntegrationTesting') { + dependsOn assembleDist + doLast { + copy { + from zipTree(file('build/distributions').listFiles().findAll {it.name.endsWith('.zip')}[0]) + into (file('src/integrationTest/resources')) + } + copy { + from zipTree(file('build/distributions').listFiles().findAll {it.name.endsWith('.zip')}[0]) + into (file('build/resources/integrationTest')) + } } } -task copyRuntimeLibs(type: Copy) { - into "$buildDir/output/lib" - from configurations.runtime +project.integrationTest { + useJUnitPlatform() + outputs.upToDateWhen { false } + dependsOn prepDistForIntegrationTesting } -checkstyle { - repositories { - mavenCentral() - } - configurations { - checkstyle +distributions { + main { + contents { + from('etc/solace_source.properties') { into 'etc' } + from('etc/solace_source_properties.json') { into 'etc' } + from('doc/distribution-readme.md') { into 'doc' } + from('LICENSE') { into 'doc' } + into('lib') { + from jar + from(project.configurations.runtime) + } + // from jar + } } - dependencies { - //checkstyle 'com.puppycrawl.tools:checkstyle:6.12.1' - checkstyle 'com.puppycrawl.tools:checkstyle:8.12' - - } -} - -processResources { - expand project.properties } diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml deleted file mode 100644 index c4bb069..0000000 --- a/config/checkstyle/checkstyle.xml +++ /dev/null @@ -1,255 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/doc/distribution-readme.md b/doc/distribution-readme.md new file mode 100644 index 0000000..f61270f --- /dev/null +++ b/doc/distribution-readme.md @@ -0,0 +1,11 @@ +# Solace PubSub+ Connector Kafka Source + +This package provides a Solace PubSub+ Event Broker to Kafka Source Connector. + +For detailed description refer to the project GitHub page at [https://github.com/SolaceProducts/pubsubplus-connector-kafka-source](https://github.com/SolaceProducts/pubsubplus-connector-kafka-source) + +Package directory contents: + +- doc: this readme and license information +- lib: Source Connector jar file and dependencies +- etc: sample configuration properties and JSON file diff --git a/doc/images/EventMesh.png b/doc/images/EventMesh.png new file mode 100644 index 0000000..eaea891 Binary files /dev/null and b/doc/images/EventMesh.png differ diff --git a/resources/KSource3.png b/doc/images/KSource.png similarity index 100% rename from resources/KSource3.png rename to doc/images/KSource.png diff --git a/doc/images/SingleConnector.png b/doc/images/SingleConnector.png new file mode 100644 index 0000000..5c0db07 Binary files /dev/null and b/doc/images/SingleConnector.png differ diff --git a/krb5.conf b/etc/krb5.conf similarity index 100% rename from krb5.conf rename to etc/krb5.conf diff --git a/login.conf b/etc/login.conf similarity index 100% rename from login.conf rename to etc/login.conf diff --git a/etc/solace_source.properties b/etc/solace_source.properties new file mode 100644 index 0000000..73b7d48 --- /dev/null +++ b/etc/solace_source.properties @@ -0,0 +1,118 @@ +# PubSub+ Kafka Source Connector parameters +# GitHub project https://github.com/SolaceProducts/pubsubplus-connector-kafka-source +####################################################################################### + +# Kafka connect params +# Refer to https://kafka.apache.org/documentation/#connect_configuring +name=solaceSourceConnector +connector.class=com.solace.connector.kafka.connect.source.SolaceSourceConnector +tasks.max=1 +value.converter=org.apache.kafka.connect.converters.ByteArrayConverter +key.converter=org.apache.kafka.connect.storage.StringConverter + +# Destination Kafka topic the connector will write to +kafka.topic=test + +# PubSub+ connection information +sol.host=tcp://192.168.99.113:55555 +sol.username=default +sol.password=default +sol.vpn_name=default + +# Comma separated list of PubSub+ topics to subscribe to +# If tasks.max>1, use shared subscriptions otherwise each task's subscription will receive same message +# Refer to https://docs.solace.com/PubSub-Basics/Direct-Messages.htm#Shared +# example shared subscription to "topic": "#share/group1/topic" +sol.topics=sourcetest + +# PubSub+ queue name to consume from, must exist on event broker +#sol.queue=testQ + +# PubSub+ Kafka Source connector message processor +# Refer to https://github.com/SolaceProducts/pubsubplus-connector-kafka-source +sol.message_processor_class=com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor + +# When using SolaceSampleKeyedMessageProcessor, defines which part of a +# PubSub+ message shall be converted to a Kafka record key +# Allowable values include: NONE, DESTINATION, CORRELATION_ID, CORRELATION_ID_AS_BYTES +#sol.kafka_message_key=NONE + +# Connector TLS session to PubSub+ message broker properties +# Specify if required when using TLS / Client certificate authentication +# May require setup of keystore and truststore on each host where the connector is deployed +# Refer to https://docs.solace.com/Overviews/TLS-SSL-Message-Encryption-Overview.htm +# and https://docs.solace.com/Overviews/Client-Authentication-Overview.htm#Client-Certificate +#sol.authentication_scheme= +#sol.ssl_connection_downgrade_to= +#sol.ssl_excluded_protocols= +#sol.ssl_cipher_suites= +#sol.ssl_validate_certificate= +#sol.ssl_validate_certicate_date= +#sol.ssl_trust_store= +#sol.ssl_trust_store_password= +#sol.ssl_trust_store_format= +#sol.ssl_trusted_common_name_list= +#sol.ssl_key_store= +#sol.ssl_key_store_password= +#sol.ssl_key_store_format= +#sol.ssl_key_store_normalized_format= +#sol.ssl_private_key_alias= +#sol.ssl_private_key_password= + +# Connector Kerberos authentication of PubSub+ message broker properties +# Specify if required when using Kerberos authentication +# Refer to https://docs.solace.com/Overviews/Client-Authentication-Overview.htm#Kerberos +# Example: +#sol.authentication_scheme=AUTHENTICATION_SCHEME_GSS_KRB +#sol.kerberos.login.conf=/opt/kerberos/login.conf +#sol.kerberos.krb5.conf=/opt/kerberos/krb5.conf +#sol.krb_service_name= + +# Solace Java properties to tune for creating a channel connection +# Leave at default unless required +# Look up meaning at https://docs.solace.com/API-Developer-Online-Ref-Documentation/java/com/solacesystems/jcsmp/JCSMPChannelProperties.html +#sol.channel_properties.connect_timout_in_millis= +#sol.channel_properties.read_timeout_in_millis= +#sol.channel_properties.connect_retries= +#sol.channel_properties.reconnect_retries= +#sol.channnel_properties.connect_retries_per_host= +#sol.channel_properties.reconnect_retry_wait_in_millis= +#sol.channel_properties.keep_alive_interval_in_millis= +#sol.channel_properties.keep_alive_limit= +#sol.channel_properties.send_buffer= +#sol.channel_properties.receive_buffer= +#sol.channel_properties.tcp_no_delay= +#sol.channel_properties.compression_level= + +# Solace Java tuning properties +# Leave at default unless required +# Look up meaning at https://docs.solace.com/API-Developer-Online-Ref-Documentation/java/com/solacesystems/jcsmp/JCSMPProperties.html +#sol.message_ack_mode= +#sol.session_name= +#sol.localhost= +#sol.client_name= +#sol.generate_sender_id= +#sol.generate_rcv_timestamps= +#sol.generate_send_timestamps= +#sol.generate_sequence_numbers= +#sol.calculate_message_expiration= +#sol.reapply_subscriptions= +#sol.pub_multi_thread= +#sol.pub_use_immediate_direct_pub= +#sol.message_callback_on_reactor= +#sol.ignore_duplicate_subscription_error= +#sol.ignore_subscription_not_found_error= +#sol.no_local= +#sol.ack_event_mode= +#sol.sub_ack_window_size= +#sol.pub_ack_window_size= +#sol.sub_ack_time= +#sol.pub_ack_time= +#sol.sub_ack_window_threshold= +#sol.max_resends= +#sol.gd_reconnect_fail_action= +#sol.susbcriber_local_priority= +#sol.susbcriber_network_priority= +#sol.subscriber_dto_override= + + diff --git a/etc/solace_source_properties.json b/etc/solace_source_properties.json new file mode 100644 index 0000000..3bc7df1 --- /dev/null +++ b/etc/solace_source_properties.json @@ -0,0 +1,16 @@ +{ + "name": "solaceSourceConnector", + "config": { + "name": "solaceSourceConnector", + "connector.class": "com.solace.connector.kafka.connect.source.SolaceSourceConnector", + "tasks.max": "1", + "kafka.topic": "test", + "sol.host": "tcp://192.168.99.113:55555", + "sol.username": "default", + "sol.password": "default", + "sol.vpn_name": "default", + "sol.topics": "sourcetest", + "sol.message_processor_class": "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor", + "value.converter": "org.apache.kafka.connect.converters.ByteArrayConverter", + "key.converter": "org.apache.kafka.connect.storage.StringConverter" } +} \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 0ca44e8..e997a9a 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1 @@ -version=1.0.2 \ No newline at end of file +version=2.0.0 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 4ab32b0..cc4fdc2 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index eb913a0..1b16c34 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Fri Aug 03 20:38:33 EDT 2018 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-3.5-bin.zip diff --git a/gradlew b/gradlew index 4453cce..2fe81a7 100755 --- a/gradlew +++ b/gradlew @@ -1,5 +1,21 @@ #!/usr/bin/env sh +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + ############################################################################## ## ## Gradle start up script for UN*X @@ -28,16 +44,16 @@ APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" -warn ( ) { +warn () { echo "$*" } -die ( ) { +die () { echo echo "$*" echo @@ -109,8 +125,8 @@ if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` JAVACMD=`cygpath --unix "$JAVACMD"` @@ -138,35 +154,30 @@ if $cygwin ; then else eval `echo args$i`="\"$arg\"" fi - i=$((i+1)) + i=`expr $i + 1` done case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi # Escape application args -save ( ) { +save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } -APP_ARGS=$(save "$@") +APP_ARGS=`save "$@"` # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat index e95643d..24467a1 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -1,3 +1,19 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + @if "%DEBUG%" == "" @echo off @rem ########################################################################## @rem @@ -14,7 +30,7 @@ set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome diff --git a/resources/EventMesh.png b/resources/EventMesh.png deleted file mode 100644 index 438aad4..0000000 Binary files a/resources/EventMesh.png and /dev/null differ diff --git a/resources/RESTConnectorList.png b/resources/RESTConnectorList.png deleted file mode 100644 index 0ed9e0b..0000000 Binary files a/resources/RESTConnectorList.png and /dev/null differ diff --git a/resources/RESTConnectorListSmall.png b/resources/RESTConnectorListSmall.png deleted file mode 100644 index 5ca5543..0000000 Binary files a/resources/RESTConnectorListSmall.png and /dev/null differ diff --git a/resources/RESTStatus.png b/resources/RESTStatus.png deleted file mode 100644 index 77b5781..0000000 Binary files a/resources/RESTStatus.png and /dev/null differ diff --git a/resources/RESTStatusSmall.png b/resources/RESTStatusSmall.png deleted file mode 100644 index b60176b..0000000 Binary files a/resources/RESTStatusSmall.png and /dev/null differ diff --git a/resources/SolInterop.png b/resources/SolInterop.png deleted file mode 100644 index df05739..0000000 Binary files a/resources/SolInterop.png and /dev/null differ diff --git a/resources/SolaceAPI.png b/resources/SolaceAPI.png deleted file mode 100644 index 1dd36a1..0000000 Binary files a/resources/SolaceAPI.png and /dev/null differ diff --git a/resources/SolaceCloud.png b/resources/SolaceCloud.png deleted file mode 100644 index ea80779..0000000 Binary files a/resources/SolaceCloud.png and /dev/null differ diff --git a/resources/SolaceCloud1.png b/resources/SolaceCloud1.png deleted file mode 100644 index 87502bc..0000000 Binary files a/resources/SolaceCloud1.png and /dev/null differ diff --git a/settings.gradle b/settings.gradle index f8d80cb..171ef0c 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,18 +1 @@ -/* - * This settings file was generated by the Gradle 'init' task. - * - * The settings file is used to specify which projects to include in your build. - * In a single project build this file can be empty or even removed. - * - * Detailed information about configuring a multi-project build in Gradle can be found - * in the user guide at https://docs.gradle.org/3.5/userguide/multi_project_builds.html - */ - -/* -// To declare projects as part of a multi-project build use the 'include' method -include 'shared' -include 'api' -include 'services:webservice' -*/ - -rootProject.name = 'SolaceSourceConnector' +rootProject.name = 'pubsubplus-connector-kafka-source' diff --git a/solace.properties b/solace.properties deleted file mode 100644 index f22046c..0000000 --- a/solace.properties +++ /dev/null @@ -1,46 +0,0 @@ -name=solaceConnector -connector.class=com.solace.source.connector.SolaceSourceConnector -tasks.max=2 -kafka.topic=solacetest -sol.host=160.101.136.60 -#sol.host=tcps://160.101.136.33:55443 -sol.username=heinz1 -sol.password=heinz1 -sol.vpn_name=heinzvpn -#sol.topics=soltest, soltest1,solacetest2 -sol.topics=soltest -sol.queue=testQ -sol.message_callback_on_reactor=false -sol.message_processor_class=com.solace.source.connector.msgprocessors.SolaceSampleKeyedMessageProcessor -#sol.message_processor_class=com.solace.source.connector.msgprocessors.SolSampleSimpleMessageProcessor -sol.generate_send_timestamps=false -sol.generate_rcv_timestamps=false -sol.sub_ack_window_size=255 -sol.generate_sequence_numbers=true -sol.calculate_message_expiration=true -sol.subscriber_dto_override=false -sol.channel_properties.connect_retries=-1 -sol.channel_properties.reconnect_retries=-1 -sol.kafka_message_key=DESTINATION -#sol.ssl_validate_certificate=false -#sol.ssl_validate_certicate_date=false -#sol.ssl_connection_downgrade_to=PLAIN_TEXT -sol.ssl_trust_store=/opt/PKI/skeltonCA/heinz1.ts -sol.ssl_trust_store_pasword=sasquatch -sol.ssl_trust_store_format=JKS -#sol.ssl_trusted_command_name_list -sol.ssl_key_store=/opt/PKI/skeltonCA/heinz1.ks -sol.ssl_key_store_password=sasquatch -sol.ssl_key_store_format=JKS -sol.ssl_key_store_normalized_format=JKS -sol.ssl_private_key_alias=heinz1 -sol.ssl_private_key_password=sasquatch -#sol.authentication_scheme=AUTHENTICATION_SCHEME_CLIENT_CERTIFICATE -key.converter.schemas.enable=true -value.converter.schemas.enable=true -#key.converter=org.apache.kafka.connect.converters.ByteArrayConverter -value.converter=org.apache.kafka.connect.converters.ByteArrayConverter -#key.converter=org.apache.kafka.connect.json.JsonConverter -key.converter=org.apache.kafka.connect.storage.StringConverter -#value.converter=org.apache.kafka.connect.json.JsonConverter -#value.converter=org.apache.kafka.connect.storage.StringConverter \ No newline at end of file diff --git a/solace_kerberos.properties b/solace_kerberos.properties deleted file mode 100644 index e7d67cd..0000000 --- a/solace_kerberos.properties +++ /dev/null @@ -1,29 +0,0 @@ -name=solaceConnector -connector.class=com.solace.source.connector.SolaceSourceConnector -tasks.max=1 -kafka.topic=solacetest2 -sol.host=vmr90.heinz.org -sol.username=testKerb@HEINZ.ORG -sol.vpn_name=heinzKerberos -sol.topics=test -sol.message_processor_class=com.solace.source.connector.msgprocessors.SolSampleSimpleMessageProcessor -sol.generate_send_timestamps=true -sol.sol_generate_rcv_timestamps=true -sol.sub_ack_window_size=255 -sol.generate_sequence_numbers=true -sol.calculate_message_expiration=true -sol.subscriber_dto_override=true -sol.channel_properties.connect_retries=-1 -sol.channel_properties.reconnect_retries=-1 -sol.kafka_message_key=DESTINATION -sol.authentication_scheme=AUTHENTICATION_SCHEME_GSS_KRB -sol.kerberos.login.conf=/opt/kerberos/login.conf -sol.kerberos.krb5.conf=/opt/kerberos/krb5.conf -key.converter.schemas.enable=true -value.converter.schemas.enable=true -#key.converter=org.apache.kafka.connect.converters.ByteArrayConverter -value.converter=org.apache.kafka.connect.converters.ByteArrayConverter -#key.converter=org.apache.kafka.connect.json.JsonConverter -key.converter=org.apache.kafka.connect.storage.StringConverter -#value.converter=org.apache.kafka.connect.json.JsonConverter -#value.converter=org.apache.kafka.connect.storage.StringConverter \ No newline at end of file diff --git a/solace_source_kerb5_properties.json b/solace_source_kerb5_properties.json deleted file mode 100644 index 743b592..0000000 --- a/solace_source_kerb5_properties.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "name": "solaceSourceConnector", - "config": { - "connector.class": "com.solace.source.connector.SolaceSourceConnector", - "tasks.max": "1", - "kafka.topic": "solacetest2", - "sol.host": "tcp://vmr90.heinz.org", - "sol.username": "testKerb@HEINZ.ORG", - "sol.password": "heinz2", - "sol.vpn_name": "heinzKerberos", - "sol.topics": "test", - "sol.message_processor_class": "com.solace.source.connector.msgprocessors.SolaceSampleKeyedMessageProcessor", - "sol.generate_send_timestamps": "true", - "sol.sol_generate_rcv_timestamps": "true", - "sol.sub_ack_window_size": "255", - "sol.generate_sequence_numbers": "true", - "sol.calculate_message_expiration": "true", - "sol.subscriber_dto_override": "true", - "sol.channel_properties.connect_retries": "-1", - "sol.channel_properties.reconnect_retries": "-1", - "sol.kafka_message_key": "DESTINATION", - "sol.ssl_private_key_alias": "heinz1", - "sol.kerberos.login.conf": "/opt/kerberos/login.conf", - "sol.kerberos.krb5.conf": "/opt/kerberos/krb5.conf", - "sol.authentication_scheme": "AUTHENTICATION_SCHEME_GSS_KRB" - } - -} \ No newline at end of file diff --git a/solace_source_properties.json b/solace_source_properties.json deleted file mode 100644 index 1daf670..0000000 --- a/solace_source_properties.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "name": "solaceSourceConnector", - "config": { - "connector.class": "com.solace.source.connector.SolaceSourceConnector", - "tasks.max": "1", - "kafka.topic": "solacetest", - "sol.host": "160.101.136.33", - "sol.username": "heinz1", - "sol.password": "heinz1", - "sol.vpn_name": "heinzvpn", - "sol.topics": "soltest, soltest1,solacetest2", - "sol.queue": "testQ", - "sol.message_processor_class": "com.solace.source.connector.msgProcessors.SolaceSampleKeyedMessageProcessor", - "sol.generate_send_timestamps": "true", - "sol.generate_rcv_timestamps": "true", - "sol.sub_ack_window_size": "255", - "sol.generate_sequence_numbers": "true", - "sol.calculate_message_expiration": "true", - "sol.subscriber_dto_override": "true", - "sol.channel_properties.connect_retries": "-1", - "sol.channel_properties.reconnect_retries": "-1", - "sol.kafka_message_key": "DESTINATION", - "sol.ssl_trust_store": "/opt/PKI/skeltonCA/heinz1.ts", - "sol.ssl_trust_store_pasword": "sasquatch", - "sol.ssl_trust_store_format": "JKS", - "sol.ssl_key_store": "/opt/PKI/skeltonCA/heinz1.ks", - "sol.ssl_key_store_password": "sasquatch", - "sol.ssl_key_store_format": "JKS", - "sol.ssl_key_store_normalized_format": "JKS", - "sol.ssl_private_key_alias": "heinz1", - "sol.ssl_private_key_password": "sasquatch" - } - -} \ No newline at end of file diff --git a/src/.DS_Store b/src/.DS_Store deleted file mode 100644 index 25d3c91..0000000 Binary files a/src/.DS_Store and /dev/null differ diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java new file mode 100644 index 0000000..e2a1294 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupApache.java @@ -0,0 +1,63 @@ +package com.solace.connector.kafka.connect.source.it; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.FixedHostPortGenericContainer; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Container; + +public class DockerizedPlatformSetupApache implements MessagingServiceFullLocalSetupApache { + + @Container + public final static GenericContainer KAFKA_CONNECT_REST = new FixedHostPortGenericContainer<>("bitnami/kafka:2") + .withEnv("KAFKA_CFG_ZOOKEEPER_CONNECT", dockerIpAddress + ":2181") + .withEnv("ALLOW_PLAINTEXT_LISTENER", "yes") + .withCommand("/bin/sh", "-c", //"sleep 10000") + "sed -i 's/bootstrap.servers=.*/bootstrap.servers=" + dockerIpAddress + + ":39092/g' /opt/bitnami/kafka/config/connect-distributed.properties; " + + "echo 'plugin.path=/opt/bitnami/kafka/jars' >> /opt/bitnami/kafka/config/connect-distributed.properties; " + + "echo 'rest.port=28083' >> /opt/bitnami/kafka/config/connect-distributed.properties; " + + "/opt/bitnami/kafka/bin/connect-distributed.sh /opt/bitnami/kafka/config/connect-distributed.properties") + .withFixedExposedPort(28083,28083) + .withExposedPorts(28083) +//// +// // Enable remote debug session at default port 5005 +// .withEnv("KAFKA_DEBUG", "y") +// .withEnv("DEBUG_SUSPEND_FLAG", "y") +//// + .withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", + "/opt/bitnami/kafka/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY) +// .withStartupTimeout(Duration.ofSeconds(120)) + .waitingFor( Wait.forLogMessage(".*Finished starting connectors and tasks.*", 1) ) + ; + + @BeforeAll + static void setUp() { + assert(KAFKA_CONNECT_REST != null); // Required to instantiate + } + + @DisplayName("Local MessagingService connection tests") + @Nested + class MessagingServiceConnectionTests { + @DisplayName("Setup the dockerized platform") + @Test + @Disabled + void setupDockerizedPlatformTest() { + String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); + assertNotNull(host); + try { + Thread.sleep(36000000l); + } catch (InterruptedException e) { + e.printStackTrace(); + } + + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java new file mode 100644 index 0000000..10a2703 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/DockerizedPlatformSetupConfluent.java @@ -0,0 +1,75 @@ +package com.solace.connector.kafka.connect.source.it; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.FixedHostPortGenericContainer; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Container; + +public class DockerizedPlatformSetupConfluent implements MessagingServiceFullLocalSetupConfluent { + + @Container + public final static GenericContainer connector = new FixedHostPortGenericContainer<>( + "confluentinc/cp-kafka-connect-base:5.4.0") + .withEnv("CONNECT_BOOTSTRAP_SERVERS", COMPOSE_CONTAINER_KAFKA.getServiceHost("kafka_1", 39092) + ":39092") + .withFixedExposedPort(28083, 28083).withFixedExposedPort(5005, 5005).withExposedPorts(28083, 5005) + .withEnv("CONNECT_REST_PORT", "28083") +// +// // Enable remote debug session at default port 5005 +// .withEnv("KAFKA_DEBUG", "y") +// .withEnv("DEBUG_SUSPEND_FLAG", "y") +// + .withEnv("CONNECT_GROUP_ID", "testconnect-avro") + .withEnv("CONNECT_CONFIG_STORAGE_TOPIC", "testconnect-avro-config") + .withEnv("CONNECT_OFFSET_STORAGE_TOPIC", "testconnect-avro-offsets") + .withEnv("CONNECT_STATUS_STORAGE_TOPIC", "testconnect-avro-status") + .withEnv("CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR", "1") + .withEnv("CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR", "1") + .withEnv("CONNECT_STATUS_STORAGE_REPLICATION_FACTOR", "1") +// .withEnv("CONNECT_OFFSET_FLUSH_INTERVAL_MS", "100") + .withEnv("CONNECT_KEY_CONVERTER", "io.confluent.connect.avro.AvroConverter") + .withEnv("CONNECT_VALUE_CONVERTER", "io.confluent.connect.avro.AvroConverter") + .withEnv("CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL", + "http://" + COMPOSE_CONTAINER_KAFKA.getServiceHost("schema-registry_1", 8081) + ":8081") + .withEnv("CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL", + "http://" + COMPOSE_CONTAINER_KAFKA.getServiceHost("schema-registry_1", 8081) + ":8081") + .withEnv("CONNECT_INTERNAL_KEY_CONVERTER", "org.apache.kafka.connect.json.JsonConverter") + .withEnv("CONNECT_INTERNAL_VALUE_CONVERTER", "org.apache.kafka.connect.json.JsonConverter") +// + .withEnv("CONNECT_REST_ADVERTISED_HOST_NAME", "localhost").withEnv("CONNECT_LOG4J_ROOT_LOGLEVEL", "INFO") + .withEnv("CONNECT_PLUGIN_PATH", "/usr/share/java,/etc/kafka-connect/jars") + .withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", + "/etc/kafka-connect/jars/pubsubplus-connector-kafka", BindMode.READ_ONLY) +// .waitingFor( Wait.forHealthcheck() ); + .waitingFor(Wait.forLogMessage(".*Kafka Connect started.*", 1)); + + @BeforeAll + static void setUp() { + assert(connector != null); + } + + @DisplayName("Local MessagingService connection tests") + @Nested + class MessagingServiceConnectionTests { + @DisplayName("Setup the dockerized platform") + @Test +// @Disabled + void setupDockerizedPlatformTest() { + String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); + assertNotNull(host); + try { + Thread.sleep(36000000l); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java new file mode 100644 index 0000000..2ff0b8c --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupApache.java @@ -0,0 +1,48 @@ +package com.solace.connector.kafka.connect.source.it; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.io.File; +import org.junit.jupiter.api.BeforeAll; +import org.testcontainers.containers.DockerComposeContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.containers.wait.strategy.Wait; + +@Testcontainers +public interface MessagingServiceFullLocalSetupApache extends TestConstants { + + @Container + public static final DockerComposeContainer COMPOSE_CONTAINER_PUBSUBPLUS = + new DockerComposeContainer( + new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-solace.yml")) + .withEnv("PUBSUB_NETWORK_NAME", PUBSUB_NETWORK_NAME) + .withEnv("PUBSUB_HOSTNAME", PUBSUB_HOSTNAME) + .withEnv("PUBSUB_TAG", PUBSUB_TAG) + .withServices(SERVICES) + .withLocalCompose(true) + .withPull(false) + .waitingFor("solbroker_1", + Wait.forLogMessage(".*System startup complete.*", 1) ); + + public static final String dockerReportedAddress = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); + public static final String dockerIpAddress = (dockerReportedAddress == "localhost" || dockerReportedAddress == "127.0.0.1" ? + Tools.getIpAddress() : dockerReportedAddress); + + @Container + public static final DockerComposeContainer COMPOSE_CONTAINER_KAFKA = + new DockerComposeContainer( + new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-kafka-apache.yml")) + .withEnv("KAFKA_TOPIC", KAFKA_SOURCE_TOPIC) + .withEnv("KAFKA_HOST", dockerIpAddress) + .withLocalCompose(true) + .waitingFor("schema-registry_1", + Wait.forHttp("/subjects").forStatusCode(200)); + + @BeforeAll + static void checkContainer() { + String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); + assertNotNull(host); + } +} + diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java new file mode 100644 index 0000000..727deb4 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/MessagingServiceFullLocalSetupConfluent.java @@ -0,0 +1,44 @@ +package com.solace.connector.kafka.connect.source.it; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.io.File; +import org.junit.jupiter.api.BeforeAll; +import org.testcontainers.containers.DockerComposeContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.containers.wait.strategy.Wait; + +@Testcontainers +public interface MessagingServiceFullLocalSetupConfluent extends TestConstants { + + @Container + public static final DockerComposeContainer COMPOSE_CONTAINER_PUBSUBPLUS = + new DockerComposeContainer( + new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-solace.yml")) + .withEnv("PUBSUB_NETWORK_NAME", PUBSUB_NETWORK_NAME) + .withEnv("PUBSUB_HOSTNAME", PUBSUB_HOSTNAME) + .withEnv("PUBSUB_TAG", PUBSUB_TAG) + .withServices(SERVICES) + .withLocalCompose(true) + .withPull(false) + .waitingFor("solbroker_1", + Wait.forLogMessage(".*System startup complete.*", 1) ); + + @Container + public static final DockerComposeContainer COMPOSE_CONTAINER_KAFKA = + new DockerComposeContainer( + new File(FULL_DOCKER_COMPOSE_FILE_PATH + "docker-compose-kafka-confluent.yml")) + .withEnv("KAFKA_TOPIC", KAFKA_SOURCE_TOPIC) + .withEnv("KAFKA_HOST", COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080)) + .withLocalCompose(true) + .waitingFor("schema-registry_1", + Wait.forHttp("/subjects").forStatusCode(200)); + + @BeforeAll + static void checkContainer() { + String host = COMPOSE_CONTAINER_PUBSUBPLUS.getServiceHost("solbroker_1", 8080); + assertNotNull(host); + } +} + diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java new file mode 100644 index 0000000..a0c64a9 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/ParameterTesting.java @@ -0,0 +1,74 @@ +package com.solace.connector.kafka.connect.source.it; + +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +import com.solace.connector.kafka.connect.source.SolaceSourceTask; +import com.solacesystems.jcsmp.JCSMPChannelProperties; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; + +public class ParameterTesting implements TestConstants { + + + @DisplayName("Default Parameter test") + @Test + void CheckDefaultParams() { + SolaceSourceTask testSourceTask = new SolaceSourceTask(); + + Map props = new HashMap(); + /* ("sol.host", "tcp://" + MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_PUBSUBPLUS + .getServiceHost("solbroker_1", 55555) + ":55555"); + jobject.addProperty("sol.username", SOL_ADMINUSER_NAME); + jobject.addProperty("sol.password", SOL_ADMINUSER_PW); + jobject.addProperty("sol.vpn_name", SOL_VPN); */ + props.put("sol.host", "tcp://" + MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_PUBSUBPLUS + .getServiceHost("solbroker_1", 55555) + ":55555"); + props.put("sol.username", SOL_ADMINUSER_NAME); + props.put("sol.password", SOL_ADMINUSER_PW); + props.put("sol.vpn_name", SOL_VPN); + + testSourceTask.start(props); + JCSMPSession solSession = testSourceTask.getSolSession(); + assert(!solSession.isClosed()); + JCSMPChannelProperties chanProperties = + (JCSMPChannelProperties) solSession.getProperty(JCSMPProperties.CLIENT_CHANNEL_PROPERTIES); + boolean GENERATE_SEND_TIMESTAMPS = (boolean) solSession.getProperty(JCSMPProperties.GENERATE_SEND_TIMESTAMPS); + solSession.getProperty(JCSMPProperties.GENERATE_RCV_TIMESTAMPS); + solSession.getProperty(JCSMPProperties.GENERATE_SEQUENCE_NUMBERS); + solSession.getProperty(JCSMPProperties.CALCULATE_MESSAGE_EXPIRATION); + solSession.getProperty(JCSMPProperties.PUB_MULTI_THREAD); + solSession.getProperty(JCSMPProperties.MESSAGE_CALLBACK_ON_REACTOR); + solSession.getProperty(JCSMPProperties.IGNORE_DUPLICATE_SUBSCRIPTION_ERROR); + solSession.getProperty(JCSMPProperties.IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR); + solSession.getProperty(JCSMPProperties.NO_LOCAL); + solSession.getProperty(JCSMPProperties.SUB_ACK_WINDOW_SIZE); + solSession.getProperty(JCSMPProperties.SUBSCRIBER_LOCAL_PRIORITY); + solSession.getProperty(JCSMPProperties.SUBSCRIBER_NETWORK_PRIORITY); + solSession.getProperty(JCSMPProperties.REAPPLY_SUBSCRIPTIONS); + solSession.getProperty(JCSMPProperties.AUTHENTICATION_SCHEME); + solSession.getProperty(JCSMPProperties.KRB_SERVICE_NAME); + solSession.getProperty(JCSMPProperties.SSL_CONNECTION_DOWNGRADE_TO); + solSession.getProperty(JCSMPProperties.SSL_CIPHER_SUITES); + solSession.getProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE); + solSession.getProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE_DATE); + solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE); + solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE_PASSWORD); + solSession.getProperty(JCSMPProperties.SSL_TRUST_STORE_FORMAT); + solSession.getProperty(JCSMPProperties.SSL_TRUSTED_COMMON_NAME_LIST); + solSession.getProperty(JCSMPProperties.SSL_KEY_STORE); + solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_PASSWORD); + solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_FORMAT); + solSession.getProperty(JCSMPProperties.SSL_KEY_STORE_NORMALIZED_FORMAT); + solSession.getProperty(JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD); + + + + + + testSourceTask.stop(); + } + +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java new file mode 100644 index 0000000..1e936cc --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SolaceConnectorDeployment.java @@ -0,0 +1,148 @@ +package com.solace.connector.kafka.connect.source.it; + +import java.io.File; +import java.io.IOException; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; + +import org.apache.commons.io.FileUtils; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.Gson; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; + +import okhttp3.MediaType; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; + +public class SolaceConnectorDeployment implements TestConstants { + + static Logger logger = LoggerFactory.getLogger(SolaceConnectorDeployment.class.getName()); + + static String kafkaTestTopic = KAFKA_SOURCE_TOPIC + "-" + Instant.now().getEpochSecond(); + OkHttpClient client = new OkHttpClient(); + String connectorAddress = new TestConfigProperties().getProperty("kafka.connect_rest_url"); + + public void waitForConnectorRestIFUp() { + Request request = new Request.Builder().url("http://" + connectorAddress + "/connector-plugins").build(); + Response response = null; + do { + try { + Thread.sleep(1000l); + response = client.newCall(request).execute(); + } catch (IOException | InterruptedException e) { + // Continue looping + } + } while (response == null || !response.isSuccessful()); + } + + public void provisionKafkaTestTopic() { + // Create a new kafka test topic to use + String bootstrapServers = new TestConfigProperties().getProperty("kafka.bootstrap_servers"); + Properties properties = new Properties(); + properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + AdminClient adminClient = AdminClient.create(properties); + NewTopic newTopic = new NewTopic(kafkaTestTopic, 5, (short) 1); // new NewTopic(topicName, numPartitions, + // replicationFactor) + List newTopics = new ArrayList(); + newTopics.add(newTopic); + adminClient.createTopics(newTopics); + adminClient.close(); + } + + void startConnector() { + startConnector(null); // Defaults only, no override + } + + void startConnector(Properties props) { + String configJson = null; + // Prep config files + try { + // Configure .json connector params + File jsonFile = new File( + UNZIPPEDCONNECTORDESTINATION + "/" + Tools.getUnzippedConnectorDirName() + "/" + CONNECTORJSONPROPERTIESFILE); + String jsonString = FileUtils.readFileToString(jsonFile); + JsonElement jtree = new JsonParser().parse(jsonString); + JsonElement jconfig = jtree.getAsJsonObject().get("config"); + JsonObject jobject = jconfig.getAsJsonObject(); + // Set properties defaults + jobject.addProperty("sol.host", "tcp://" + new TestConfigProperties().getProperty("sol.host") + ":55555"); + jobject.addProperty("sol.username", SOL_ADMINUSER_NAME); + jobject.addProperty("sol.password", SOL_ADMINUSER_PW); + jobject.addProperty("sol.vpn_name", SOL_VPN); + jobject.addProperty("kafka.topic", kafkaTestTopic); + jobject.addProperty("sol.topics", SOL_TOPICS); + jobject.addProperty("sol.queue", SOL_QUEUE); + jobject.addProperty("sol.message_processor_class", CONN_MSGPROC_CLASS); + jobject.addProperty("sol.kafka_message_key", CONN_KAFKA_MSGKEY); + jobject.addProperty("value.converter", "org.apache.kafka.connect.converters.ByteArrayConverter"); + jobject.addProperty("key.converter", "org.apache.kafka.connect.storage.StringConverter"); + jobject.addProperty("tasks.max", "1"); + // Override properties if provided + if (props != null) { + props.forEach((key, value) -> { + jobject.addProperty((String) key, (String) value); + }); + } + Gson gson = new Gson(); + configJson = gson.toJson(jtree); + } catch (IOException e) { + e.printStackTrace(); + } + + // Configure and start the solace connector + try { + // check presence of Solace plugin: curl + // http://18.218.82.209:8083/connector-plugins | jq + Request request = new Request.Builder().url("http://" + connectorAddress + "/connector-plugins").build(); + Response response = client.newCall(request).execute(); + assert (response.isSuccessful()); + String results = response.body().string(); + logger.info("Available connector plugins: " + results); + assert (results.contains("solace")); + + // Delete a running connector, if any + Request deleterequest = new Request.Builder().url("http://" + connectorAddress + "/connectors/solaceSourceConnector") + .delete().build(); + Response deleteresponse = client.newCall(deleterequest).execute(); + logger.info("Delete response: " + deleteresponse); + + // configure plugin: curl -X POST -H "Content-Type: application/json" -d + // @solace_source_properties.json http://18.218.82.209:8083/connectors + Request configrequest = new Request.Builder().url("http://" + connectorAddress + "/connectors") + .post(RequestBody.create(configJson, MediaType.parse("application/json"))).build(); + Response configresponse = client.newCall(configrequest).execute(); + // if (!configresponse.isSuccessful()) throw new IOException("Unexpected code " + // + configresponse); + String configresults = configresponse.body().string(); + logger.info("Connector config results: " + configresults); + + // check success + Request statusrequest = new Request.Builder() + .url("http://" + connectorAddress + "/connectors/solaceSourceConnector/status").build(); + Response statusresponse; + long starttime = System.currentTimeMillis(); + do { + statusresponse = client.newCall(statusrequest).execute(); + assert (System.currentTimeMillis() - starttime < 10000l); // don't wait forever + } while (!statusresponse.body().string().contains("state\":\"RUNNING")); + Thread.sleep(10000); // Give some extra time to start + logger.info("Connector is now RUNNING"); + } catch (IOException e) { + e.printStackTrace(); + } catch (InterruptedException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java new file mode 100644 index 0000000..515da08 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/SourceConnectorIT.java @@ -0,0 +1,511 @@ +package com.solace.connector.kafka.connect.source.it; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import com.solacesystems.jcsmp.BytesMessage; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.Message; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.TextMessage; +import com.solacesystems.jcsmp.Topic; +import com.solacesystems.jcsmp.impl.AbstractDestination; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class SourceConnectorIT extends DockerizedPlatformSetupApache implements TestConstants { + + static Logger logger = LoggerFactory.getLogger(SourceConnectorIT.class.getName()); + // Connectordeployment creates a Kafka topic "kafkaTestTopic", which is used + // next + static SolaceConnectorDeployment connectorDeployment = new SolaceConnectorDeployment(); + static TestKafkaConsumer kafkaConsumer = new TestKafkaConsumer(SolaceConnectorDeployment.kafkaTestTopic); + static TestSolaceProducer solaceProducer = new TestSolaceProducer(); + + //////////////////////////////////////////////////// + // Main setup/teardown + + @BeforeAll + static void setUp() { + connectorDeployment.waitForConnectorRestIFUp(); + connectorDeployment.provisionKafkaTestTopic(); + solaceProducer.setup(); + kafkaConsumer.run(); + try { + Thread.sleep(1000l); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + @AfterAll + static void cleanUp() { + kafkaConsumer.stop(); + solaceProducer.close(); + } + + //////////////////////////////////////////////////// + // Test types + + void messageToKafkaTest(Message msg, AbstractDestination destination, String expectedValue, Object expectedKey) { + try { + // Clean catch queue first + // TODO: fix possible concurrency issue with cleaning/wring the queue later + TestKafkaConsumer.kafkaReceivedMessages.clear(); + // Send Solace message + if (destination instanceof Topic) { + solaceProducer.sendMessageToTopic((Topic) destination, msg); + } else { + solaceProducer.sendMessageToQueue((Queue) destination, msg); + } + // Wait for Kafka to report message + ConsumerRecord record = TestKafkaConsumer.kafkaReceivedMessages.poll(5, TimeUnit.SECONDS); + // Evaluate message + assert (record != null); + logger.info("Kafka message received - Key=" + record.key() + ", Value=" + record.value()); + assert record.value().equals(expectedValue); + // Check key + if (expectedKey == null) { + assert (record.key() == null); + } else { + assert (record.key() instanceof ByteBuffer); + ByteBuffer bb = (ByteBuffer) record.key(); + byte[] b = new byte[bb.remaining()]; + bb.get(b); + if (expectedKey instanceof String) { + assert (Arrays.equals(b, ((String) expectedKey).getBytes())); + } else { + assert (Arrays.equals(b, (byte[]) expectedKey)); + } + } + } catch (JCSMPException e1) { + e1.printStackTrace(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + //////////////////////////////////////////////////// + // Scenarios + + @DisplayName("Solace connector SimpleMessageProcessor tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorSimpleMessageProcessorTests { + + //////////////////////////////////////////////////// + // Scenarios + + @BeforeAll + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + Properties prop = new Properties(); + prop.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); + prop.setProperty("sol.topics", "TestTopic1/SubTopic"); + prop.setProperty("sol.username", "test"); + prop.setProperty("sol.password", "test"); + connectorDeployment.startConnector(prop); + } + + @DisplayName("TextMessage-Topic-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest() { + TextMessage msg = solaceProducer.createTextMessage("1-Hello TextMessageToTopicTest world!"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "1-Hello TextMessageToTopicTest world!", null); + } + + @DisplayName("ByteMessage-Topic-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerByteMessageToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage(new byte[] { '2', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'T', + 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "2-Hello Topic world!", null); + } + + @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage(null); + msg.writeAttachment(new byte[] { '3', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', + ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "3-Hello attached world!", null); + } + + @DisplayName("TextMessage-Queue-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerTextmessageToKafkaTest() { + TextMessage msg = solaceProducer.createTextMessage("4-Hello TextmessageToKafkaTest world!"); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "4-Hello TextmessageToKafkaTest world!", null); + } + + @DisplayName("BytesMessage-Queue-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerBytesmessageToKafkaTest() { + BytesMessage msg = solaceProducer.createBytesMessage(new byte[] { '5', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'Q', + 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "5-Hello Queue world!", null); + } + + @DisplayName("ByteMessage-AttachmentPayload-Queue-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { + BytesMessage msg = solaceProducer.createBytesMessage(null); + msg.writeAttachment(new byte[] { '6', '-', 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', + ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "6-Hello attached world!", null); + } + + } + + ////////////////////////////////////////////////////////////////////////////////////////// + + @DisplayName("Solace connector SolSampleKeyedMessageProcessor-NONE tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorNoneKeyedMessageProcessorTests { + + @BeforeAll + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + Properties prop = new Properties(); + prop.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); + prop.setProperty("sol.kafka_message_key", "NONE"); + prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); + connectorDeployment.startConnector(prop); + } + + @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello TextMessageToTopicTest1 world!", null); + } + + @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerByteMessageToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello Topic world!", null); + } + + @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage(null); + msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', + 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello attached world!", null); + } + + @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextmessageToKafkaTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello TextmessageToKafkaTest world!", null); + } + + @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerBytesmessageToKafkaTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello Queue world!", null); + } + + @DisplayName("ByteMessage-AttachmentPayload-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerByteMessageWithAttachmentPayloadToQueueTest() { + BytesMessage msg = solaceProducer.createBytesMessage(null); + msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', + 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello attached world!", null); + } + + } + + ////////////////////////////////////////////////////////////////////////////////////////// + + @DisplayName("Solace connector SolSampleKeyedMessageProcessor-DESTINATION tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorDestinationKeyedMessageProcessorTests { + + @BeforeAll + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + Properties prop = new Properties(); + prop.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); + prop.setProperty("sol.kafka_message_key", "DESTINATION"); + prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); + connectorDeployment.startConnector(prop); + } + + @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello TextMessageToTopicTest1 world!", "TestTopic1/SubTopic"); + } + + @DisplayName("TextMessage-Topic-wildcard-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest2() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest2 world!"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic2/SubTopic"), + // expected value & key: + "Hello TextMessageToTopicTest2 world!", "TestTopic2/SubTopic"); + } + + @DisplayName("TextMessage-Topic-multi-level-wildcard-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest3() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest3 world!"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic3/SubTopic/SubSubTopic"), + // expected value & key: + "Hello TextMessageToTopicTest3 world!", "TestTopic3/SubTopic/SubSubTopic"); + } + + @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerByteMessageToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello Topic world!", "TestTopic1/SubTopic"); + } + + @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextmessageToKafkaTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello TextmessageToKafkaTest world!", SOL_QUEUE); + } + + @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerBytesmessageToKafkaTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello Queue world!", SOL_QUEUE); + } + } + + ////////////////////////////////////////////////////////////////////////////////////////// + + @DisplayName("Solace connector SolSampleKeyedMessageProcessor-CORRELATION_ID tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorCorrelationIdKeyedMessageProcessorTests { + + @BeforeAll + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + Properties prop = new Properties(); + prop.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); + prop.setProperty("sol.kafka_message_key", "CORRELATION_ID"); + prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); + connectorDeployment.startConnector(prop); + } + + @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); + msg.setCorrelationId("test"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello TextMessageToTopicTest1 world!", "test"); + } + + @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerByteMessageToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + msg.setCorrelationId("test2"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello Topic world!", "test2"); + } + + @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextmessageToKafkaTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); + msg.setCorrelationId("test3"); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello TextmessageToKafkaTest world!", "test3"); + } + + @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerBytesmessageToKafkaTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + msg.setCorrelationId("test4"); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello Queue world!", "test4"); + } + } + + ////////////////////////////////////////////////////////////////////////////////////////// + + @DisplayName("Solace connector SolSampleKeyedMessageProcessor-CORRELATION_ID_AS_BYTES tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorCorrelationIdAsBytesKeyedMessageProcessorTests { + + @BeforeAll + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + Properties prop = new Properties(); + prop.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolaceSampleKeyedMessageProcessor"); + prop.setProperty("sol.kafka_message_key", "CORRELATION_ID_AS_BYTES"); + prop.setProperty("sol.topics", "TestTopic1/SubTopic,TestTopic2/*,TestTopic3/>"); + prop.setProperty("key.converter", "org.apache.kafka.connect.converters.ByteArrayConverter"); + connectorDeployment.startConnector(prop); + } + + @DisplayName("TextMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest1 world!"); + msg.setCorrelationId(new String(new byte[] { 1, 2, 3, 4 })); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello TextMessageToTopicTest1 world!", new String(new byte[] { 1, 2, 3, 4 })); + } + + @DisplayName("ByteMessage-Topic-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerByteMessageToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + msg.setCorrelationId("test2"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello Topic world!", "test2"); + } + + @DisplayName("TextMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerTextmessageToKafkaTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextmessageToKafkaTest world!"); + msg.setCorrelationId("test3"); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello TextmessageToKafkaTest world!", "test3"); + } + + @DisplayName("BytesMessage-Queue-SolSampleKeyedMessageProcessor") + @Test + void kafkaConsumerBytesmessageToKafkaTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'Q', 'u', 'e', 'u', 'e', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + msg.setCorrelationId("test4"); + messageToKafkaTest(msg, solaceProducer.defineQueue(SOL_QUEUE), + // expected value & key: + "Hello Queue world!", "test4"); + } + } + + //////////////////////////////////////////////////// + // Scenarios + + @DisplayName("Solace connector SharedSubscriptions tests") + @Nested + @TestInstance(Lifecycle.PER_CLASS) + class SolaceConnectorSharedSubscriptionsTests { + + @BeforeAll + void setUp() { + solaceProducer.resetQueue(SOL_QUEUE); + Properties prop = new Properties(); + prop.setProperty("sol.message_processor_class", + "com.solace.connector.kafka.connect.source.msgprocessors.SolSampleSimpleMessageProcessor"); + prop.setProperty("sol.topics", "#share/group1/TestTopic1/SubTopic"); + prop.setProperty("tasks.max", "5"); + connectorDeployment.startConnector(prop); + } + + @DisplayName("TextMessage-Topic-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerTextMessageToTopicTest() { + TextMessage msg = solaceProducer.createTextMessage("Hello TextMessageToTopicTest world!"); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello TextMessageToTopicTest world!", null); + } + + @DisplayName("ByteMessage-Topic-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerByteMessageToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage( + new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'T', 'o', 'p', 'i', 'c', ' ', 'w', 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello Topic world!", null); + } + + @DisplayName("ByteMessage-AttachmentPayload-Topic-SolSampleSimpleMessageProcessor") + @Test + void kafkaConsumerByteMessageWithAttachmentPayloadToTopicTest() { + BytesMessage msg = solaceProducer.createBytesMessage(null); + msg.writeAttachment(new byte[] { 'H', 'e', 'l', 'l', 'o', ' ', 'a', 't', 't', 'a', 'c', 'h', 'e', 'd', ' ', 'w', + 'o', 'r', 'l', 'd', '!' }); + messageToKafkaTest(msg, solaceProducer.defineTopic("TestTopic1/SubTopic"), + // expected value & key: + "Hello attached world!", null); + } + + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java new file mode 100644 index 0000000..06b1c9d --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConfigProperties.java @@ -0,0 +1,64 @@ +package com.solace.connector.kafka.connect.source.it; + +import java.io.FileReader; +import java.io.IOException; +import java.util.Properties; + +public class TestConfigProperties { + + static String testConfigPropertiesFile = "src/integrationTest/resources/manual-setup.properties"; + // This class helps determine the docker host's IP address and avoids getting "localhost" + static class DockerHost { + static public String getIpAddress() { + String dockerReportedAddress = MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA + .getServiceHost("kafka_1", 9092); + if (dockerReportedAddress == "localhost" || dockerReportedAddress == "127.0.0.1") { + return Tools.getIpAddress(); + } else { + return MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA + .getServiceHost("kafka_1", 9092); + } + } + } + + + private Properties properties = new Properties(); + + TestConfigProperties() { + try(FileReader fileReader = new FileReader(testConfigPropertiesFile)){ + properties.load(fileReader); + } catch (IOException e) { + e.printStackTrace(); + } + } + + String getProperty(String name) { + String configuredProperty = properties.getProperty(name); + if (configuredProperty != null) { + return configuredProperty; + } + switch(name) { + case "sol.host": + // No port here + return DockerHost.getIpAddress(); + + case "sol.username": + return "default"; + + case "sol.password": + return "default"; + + case "sol.vpn_name": + return "default"; + + case "kafka.connect_rest_url": + return (DockerHost.getIpAddress() + ":28083"); + + case "kafka.bootstrap_servers": + return (DockerHost.getIpAddress() + ":39092"); + + default: + return null; + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java new file mode 100644 index 0000000..22a21ec --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestConstants.java @@ -0,0 +1,30 @@ +package com.solace.connector.kafka.connect.source.it; + +public interface TestConstants { + + public static final String PUBSUB_TAG = "latest"; + public static final String PUBSUB_HOSTNAME = "solbroker"; + public static final String PUBSUB_NETWORK_NAME = "solace_msg_network"; + public static final String FULL_DOCKER_COMPOSE_FILE_PATH = "src/integrationTest/resources/"; + public static final String[] SERVICES = new String[]{"solbroker"}; + public static final long MAX_STARTUP_TIMEOUT_MSEC = 120000l; + public static final String DIRECT_MESSAGING_HTTP_HEALTH_CHECK_URI = "/health-check/direct-active"; + public static final int DIRECT_MESSAGING_HTTP_HEALTH_CHECK_PORT = 5550; + public static final String GUARANTEED_MESSAGING_HTTP_HEALTH_CHECK_URI = "/health-check/guaranteed-active"; + public static final int GUARANTEED_MESSAGING_HTTP_HEALTH_CHECK_PORT = 5550; + + public static final String CONNECTORSOURCE = "build/distributions/pubsubplus-connector-kafka-source.zip"; + public static final String UNZIPPEDCONNECTORDESTINATION = "src/integrationTest/resources"; + public static final String CONNECTORPROPERTIESFILE = "etc/solace_source.properties"; + public static final String CONNECTORJSONPROPERTIESFILE = "etc/solace_source_properties.json"; + + public static final String SOL_ADMINUSER_NAME = "default"; + public static final String SOL_ADMINUSER_PW = "default"; + public static final String SOL_VPN = "default"; + public static final String KAFKA_SOURCE_TOPIC = "kafka-source-test-topic"; + public static final String SOL_TOPICS = "pubsubplus-test-topic"; + public static final String SOL_QUEUE = "pubsubplus-test-queue"; + public static final String CONN_MSGPROC_CLASS = "com.solace.source.connector.msgprocessors.SolSampleSimpleMessageProcessor"; + public static final String CONN_KAFKA_MSGKEY = "DESTINATION"; + +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java new file mode 100644 index 0000000..ea7519f --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestKafkaConsumer.java @@ -0,0 +1,117 @@ +package com.solace.connector.kafka.connect.source.it; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.errors.WakeupException; +import org.apache.kafka.common.serialization.ByteBufferDeserializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Properties; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; + +public class TestKafkaConsumer { + + // Queue to communicate received messages + public static BlockingQueue> kafkaReceivedMessages = new ArrayBlockingQueue<>(10); + + private Runnable myConsumerRunnable; + private String kafkaTopic; + Logger logger = LoggerFactory.getLogger(TestKafkaConsumer.class.getName()); + CountDownLatch latch = new CountDownLatch(1); + + public TestKafkaConsumer(String kafkaTestTopic) { + kafkaTopic = kafkaTestTopic; + } + + public void run() { + String bootstrapServers = MessagingServiceFullLocalSetupConfluent.COMPOSE_CONTAINER_KAFKA.getServiceHost("kafka_1", 39092) + + ":39092"; + String groupId = "test"; + + // latch for dealing with multiple threads + + // create the consumer runnable + logger.info("Creating the consumer thread"); + myConsumerRunnable = new ConsumerRunnable(bootstrapServers, groupId, kafkaTopic, latch); + + // start the thread + Thread myThread = new Thread(myConsumerRunnable); + myThread.start(); + try { + latch.await(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + public void stop() { + logger.info("Stopping consumer"); + ((ConsumerRunnable) myConsumerRunnable).shutdown(); + try { + latch.await(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + logger.info("Consumer has been stoppped"); + } + + public class ConsumerRunnable implements Runnable { + + private CountDownLatch latch; + private KafkaConsumer consumer; + private Logger logger = LoggerFactory.getLogger(ConsumerRunnable.class.getName()); + + public ConsumerRunnable(String bootstrapServers, String groupId, String topic, CountDownLatch latch) { + this.latch = latch; + + // create consumer configs + Properties properties = new Properties(); + properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + properties.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteBufferDeserializer.class.getName()); + properties.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); + properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupId); + properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); + + // create consumer + consumer = new KafkaConsumer(properties); + // subscribe consumer to our topic(s) + consumer.subscribe(Arrays.asList(topic)); + } + + @Override + public void run() { + // poll for new data + try { + while (true) { + ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); + latch.countDown(); + for (ConsumerRecord record : records) { + kafkaReceivedMessages.put(record); + logger.info("Key: " + record.key() + ", Value: " + record.value()); + logger.info("Partition: " + record.partition() + ", Offset:" + record.offset()); + } + } + } catch (WakeupException e) { + logger.info("Received shutdown signal!"); + } catch (InterruptedException e) { + e.printStackTrace(); + } finally { + consumer.close(); + } + } + + public void shutdown() { + // the wakeup() method is a special method to interrupt consumer.poll() + // it will throw the exception WakeUpException + consumer.wakeup(); + } + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java new file mode 100644 index 0000000..f29a946 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/TestSolaceProducer.java @@ -0,0 +1,102 @@ +package com.solace.connector.kafka.connect.source.it; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.solacesystems.jcsmp.BytesMessage; +import com.solacesystems.jcsmp.DeliveryMode; +import com.solacesystems.jcsmp.EndpointProperties; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPFactory; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.JCSMPStreamingPublishEventHandler; +import com.solacesystems.jcsmp.Message; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.TextMessage; +import com.solacesystems.jcsmp.Topic; +import com.solacesystems.jcsmp.XMLMessageProducer; + +public class TestSolaceProducer { + + static Logger logger = LoggerFactory.getLogger(SourceConnectorIT.class.getName()); + private JCSMPSession session; + private XMLMessageProducer producer; + + public void setup() { + TestConfigProperties configProps = new TestConfigProperties(); + final JCSMPProperties properties = new JCSMPProperties(); + properties.setProperty(JCSMPProperties.HOST, "tcp://" + configProps.getProperty("sol.host") + ":55555"); // host:port + properties.setProperty(JCSMPProperties.USERNAME, configProps.getProperty("sol.username")); // client-username + properties.setProperty(JCSMPProperties.VPN_NAME, configProps.getProperty("sol.vpn_name")); // message-vpn + properties.setProperty(JCSMPProperties.PASSWORD, configProps.getProperty("sol.password")); // client-password + try { + session = JCSMPFactory.onlyInstance().createSession(properties); + session.connect(); + producer = session.getMessageProducer(new JCSMPStreamingPublishEventHandler() { + @Override + public void responseReceived(String messageID) { + logger.info("Producer received response for msg: " + messageID); + } + @Override + public void handleError(String messageID, JCSMPException e, long timestamp) { + logger.info("Producer received error for msg: %s@%s - %s%n", + messageID,timestamp,e); + } + }); + } catch (JCSMPException e1) { + e1.printStackTrace(); + } + } + + public TextMessage createTextMessage(String contents) { + TextMessage textMessage = JCSMPFactory.onlyInstance().createMessage(TextMessage.class); + textMessage.setText(contents); + return textMessage; + } + + public BytesMessage createBytesMessage(byte[] contents) { + BytesMessage bytesMessage = JCSMPFactory.onlyInstance().createMessage(BytesMessage.class); + bytesMessage.setData(contents); + return bytesMessage; + } + + public Topic defineTopic(String topicName) { + return JCSMPFactory.onlyInstance().createTopic(topicName); + } + + public Queue defineQueue(String queueName) { + return JCSMPFactory.onlyInstance().createQueue(queueName); + } + + public void sendMessageToTopic(Topic topic, Message msg) throws JCSMPException { + producer.send(msg,topic); + logger.info("Message sent to Solace topic " + topic.toString()); + } + + public void resetQueue(String queueName) { + try { + final Queue queue = JCSMPFactory.onlyInstance().createQueue(queueName); + // First remove existing queue potentially containing remainings from other tests + session.deprovision(queue, JCSMPSession.FLAG_IGNORE_DOES_NOT_EXIST); + // Provision new queue + final EndpointProperties endpointProps = new EndpointProperties(); + endpointProps.setPermission(EndpointProperties.PERMISSION_CONSUME); + endpointProps.setAccessType(EndpointProperties.ACCESSTYPE_NONEXCLUSIVE); + session.provision(queue, endpointProps, JCSMPSession.FLAG_IGNORE_ALREADY_EXISTS); + logger.info("Reset Solace queue " + queueName); + } catch (JCSMPException e) { + e.printStackTrace(); + } + } + + public void sendMessageToQueue(Queue queue, Message msg) throws JCSMPException { + msg.setDeliveryMode(DeliveryMode.PERSISTENT); + producer.send(msg,queue); + logger.info("Message sent to Solace queue " + queue.toString()); + } + + public void close() { + session.closeSession(); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java new file mode 100644 index 0000000..ed08533 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/source/it/Tools.java @@ -0,0 +1,49 @@ +package com.solace.connector.kafka.connect.source.it; + +import java.io.IOException; +import java.net.InterfaceAddress; +import java.net.NetworkInterface; +import java.net.SocketException; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +public class Tools { + static public String getIpAddress() { + Set HostAddresses = new HashSet<>(); + try { + for (NetworkInterface ni : Collections.list(NetworkInterface.getNetworkInterfaces())) { + if (!ni.isLoopback() && ni.isUp() && ni.getHardwareAddress() != null) { + for (InterfaceAddress ia : ni.getInterfaceAddresses()) { + if (ia.getBroadcast() != null) { //If limited to IPV4 + HostAddresses.add(ia.getAddress().getHostAddress()); + } + } + } + } + } catch (SocketException e) { } + return (String) HostAddresses.toArray()[0]; + } + + static public String getUnzippedConnectorDirName() { + String connectorUnzippedPath = null; + try { + DirectoryStream dirs = Files.newDirectoryStream( + Paths.get(TestConstants.UNZIPPEDCONNECTORDESTINATION), "pubsubplus-connector-kafka-*"); + for (Path entry: dirs) { + connectorUnzippedPath = entry.toString(); + break; //expecting only one + } + } catch (IOException e) { + e.printStackTrace(); + } + if (connectorUnzippedPath.contains("\\")) { + return connectorUnzippedPath.substring(connectorUnzippedPath.lastIndexOf("\\") + 1); + } + return connectorUnzippedPath.substring(connectorUnzippedPath.lastIndexOf("/") + 1); + } +} diff --git a/src/integrationTest/resources/docker-compose-kafka-apache.yml b/src/integrationTest/resources/docker-compose-kafka-apache.yml new file mode 100644 index 0000000..18c2a2e --- /dev/null +++ b/src/integrationTest/resources/docker-compose-kafka-apache.yml @@ -0,0 +1,29 @@ +version: '3.7' + +services: + zookeeper: + image: bitnami/zookeeper:3 + ports: + - 2181:2181 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + ALLOW_ANONYMOUS_LOGIN: 'yes' + kafka: + image: bitnami/kafka:2 + ports: + - 9092:9092 + - 29092:29092 + - 39092:39092 + environment: + KAFKA_CFG_BROKER_ID: 1 + KAFKA_CFG_ZOOKEEPER_CONNECT: zookeeper:2181 + ALLOW_PLAINTEXT_LISTENER: 'yes' + KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAINTEXT_EXTHOST:PLAINTEXT + KAFKA_CFG_LISTENERS: PLAINTEXT://:9092,PLAINTEXT_HOST://:29092,PLAINTEXT_EXTHOST://:39092 + KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 +# KAFKA_CFG_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 +# KAFKA_CFG_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 +# KAFKA_CFG_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + depends_on: + - zookeeper diff --git a/src/integrationTest/resources/docker-compose-kafka-confluent.yml b/src/integrationTest/resources/docker-compose-kafka-confluent.yml new file mode 100644 index 0000000..f474f44 --- /dev/null +++ b/src/integrationTest/resources/docker-compose-kafka-confluent.yml @@ -0,0 +1,71 @@ +version: '3.7' + +services: + zookeeper: + image: confluentinc/cp-zookeeper:5.4.0 + ports: + - 2181:2181 + environment: + ZOOKEEPER_CLIENT_PORT: 2181 + ZOOKEEPER_TICK_TIME: 2000 + kafka: + image: confluentinc/cp-kafka:5.4.0 + ports: + - 9092:9092 + - 29092:29092 + - 39092:39092 + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,PLAINTEXT_EXTHOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092,PLAINTEXT_EXTHOST://$KAFKA_HOST:39092 + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_TOPIC: $KAFKA_TOPIC + depends_on: + - zookeeper + kafka-setup: + image: confluentinc/cp-kafka:5.4.0 + hostname: kafka-setup + depends_on: + - kafka + - zookeeper + command: "bash -c 'echo Waiting for Kafka to be ready... && \ + cub kafka-ready -b kafka:9092 1 30 && \ + kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic $KAFKA_TOPIC && \ + sleep 30'" + environment: + # The following settings are listed here only to satisfy the image's requirements. + # We override the image's `command` anyways, hence this container will not start a broker. + KAFKA_BROKER_ID: ignored + KAFKA_ZOOKEEPER_CONNECT: ignored + + schema-registry: + image: confluentinc/cp-schema-registry:5.4.0 + ports: + - 8081:8081 + environment: + SCHEMA_REGISTRY_HOST_NAME: localhost + SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 + SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://kafka:9092 + depends_on: + - kafka + + control-center: + image: confluentinc/cp-enterprise-control-center:latest + hostname: control-center + depends_on: + - zookeeper + - kafka + - schema-registry + ports: + - "9021:9021" + environment: + CONTROL_CENTER_BOOTSTRAP_SERVERS: 'kafka:9092' + CONTROL_CENTER_ZOOKEEPER_CONNECT: 'zookeeper:2181' + CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" + CONTROL_CENTER_REPLICATION_FACTOR: 1 + CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1 + CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1 + CONFLUENT_METRICS_TOPIC_REPLICATION: 1 + PORT: 9021 \ No newline at end of file diff --git a/src/integrationTest/resources/docker-compose-solace.yml b/src/integrationTest/resources/docker-compose-solace.yml new file mode 100644 index 0000000..67b4105 --- /dev/null +++ b/src/integrationTest/resources/docker-compose-solace.yml @@ -0,0 +1,25 @@ +version: '3.5' + +services: + solbroker: + image: solace/solace-pubsub-standard:$PUBSUB_TAG + hostname: $PUBSUB_HOSTNAME + env_file: + - ./solace.env + ports: + - "2222:2222" + - "8080:8080" + - "55003:55003" + - "55443:55443" + - "55445:55445" + - "55555:55555" + - "55556:55556" + - "5672:5672" + - "5550:5550" + - "8008:8008" + shm_size: 2g + ulimits: + memlock: -1 + nofile: + soft: 2448 + hard: 42192 diff --git a/src/integrationTest/resources/logback-test.xml b/src/integrationTest/resources/logback-test.xml new file mode 100644 index 0000000..985c68e --- /dev/null +++ b/src/integrationTest/resources/logback-test.xml @@ -0,0 +1,14 @@ + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger -%msg%n%rEx{full, org} + + + + + + + + + + \ No newline at end of file diff --git a/src/integrationTest/resources/manual-setup.properties b/src/integrationTest/resources/manual-setup.properties new file mode 100644 index 0000000..94b0e6c --- /dev/null +++ b/src/integrationTest/resources/manual-setup.properties @@ -0,0 +1,6 @@ +#sol.host=mr1u6o37qn3lar.-cloud-clmessaging.solace.cloud +sol.username=test +sol.password=test +#sol.vpn_name=b-1 +#kafka.connect_rest_host=A:28083 +#kafka.bootstrap_servers=B:39092 \ No newline at end of file diff --git a/src/integrationTest/resources/solace.env b/src/integrationTest/resources/solace.env new file mode 100644 index 0000000..863a835 --- /dev/null +++ b/src/integrationTest/resources/solace.env @@ -0,0 +1,4 @@ +username_admin_globalaccesslevel=admin +username_admin_password=admin +system_scaling_maxconnectioncount=100 +logging_debug_output=all \ No newline at end of file diff --git a/src/main/java/com/solace/source/connector/SolFlowEventCallBackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java similarity index 96% rename from src/main/java/com/solace/source/connector/SolFlowEventCallBackHandler.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java index c4ef8de..5df1099 100644 --- a/src/main/java/com/solace/source/connector/SolFlowEventCallBackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolFlowEventCallBackHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.FlowEventArgs; import com.solacesystems.jcsmp.FlowEventHandler; diff --git a/src/main/java/com/solace/source/connector/SolMessageProcessor.java b/src/main/java/com/solace/connector/kafka/connect/source/SolMessageProcessorIF.java similarity index 85% rename from src/main/java/com/solace/source/connector/SolMessageProcessor.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolMessageProcessorIF.java index 8985772..894ca6f 100644 --- a/src/main/java/com/solace/source/connector/SolMessageProcessor.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolMessageProcessorIF.java @@ -17,14 +17,14 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.BytesXMLMessage; import org.apache.kafka.connect.source.SourceRecord; -public interface SolMessageProcessor { - SolMessageProcessor process(String skey, BytesXMLMessage message); +public interface SolMessageProcessorIF { + SolMessageProcessorIF process(String skey, BytesXMLMessage message); SourceRecord[] getRecords(String kafkaTopic); } diff --git a/src/main/java/com/solace/source/connector/SolMessageQueueCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolMessageQueueCallbackHandler.java similarity index 91% rename from src/main/java/com/solace/source/connector/SolMessageQueueCallbackHandler.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolMessageQueueCallbackHandler.java index 1c1f977..7ee3a9c 100644 --- a/src/main/java/com/solace/source/connector/SolMessageQueueCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolMessageQueueCallbackHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.JCSMPException; @@ -54,10 +54,13 @@ public void onException(JCSMPException je) { } @Override - public void onReceive(BytesXMLMessage msg) { + synchronized public void onReceive(BytesXMLMessage msg) { log.debug("=================Received Queue Message"); squeue.add(msg); + } + synchronized public void shutdown() { + squeue = null; } } diff --git a/src/main/java/com/solace/source/connector/SolMessageTopicCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolMessageTopicCallbackHandler.java similarity index 85% rename from src/main/java/com/solace/source/connector/SolMessageTopicCallbackHandler.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolMessageTopicCallbackHandler.java index f6bb1d2..55be636 100644 --- a/src/main/java/com/solace/source/connector/SolMessageTopicCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolMessageTopicCallbackHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.JCSMPException; @@ -39,7 +39,7 @@ public class SolMessageTopicCallbackHandler implements XMLMessageListener { * @param lconfig Connector Configuration * @param squeue Blocking Queue */ - public SolMessageTopicCallbackHandler(SolaceSourceConfig lconfig, + public SolMessageTopicCallbackHandler(SolaceSourceConnectorConfig lconfig, BlockingQueue squeue) { this.squeue = squeue; log.debug("===Constructor for SolMessageTopicProcessor"); @@ -54,12 +54,15 @@ public void onException(JCSMPException je) { } @Override - public void onReceive(BytesXMLMessage msg) { + synchronized public void onReceive(BytesXMLMessage msg) { log.debug("=================Received Message"); - - - squeue.add(msg); - + if (squeue != null) { + squeue.add(msg); + } + } + + synchronized public void shutdown() { + squeue = null; } } diff --git a/src/main/java/com/solace/source/connector/SolReconnectCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java similarity index 96% rename from src/main/java/com/solace/source/connector/SolReconnectCallbackHandler.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java index 926172f..ec0c2a4 100644 --- a/src/main/java/com/solace/source/connector/SolReconnectCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolReconnectCallbackHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPReconnectEventHandler; diff --git a/src/main/java/com/solace/source/connector/SolSessionEventCallbackHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java similarity index 96% rename from src/main/java/com/solace/source/connector/SolSessionEventCallbackHandler.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java index 415e52c..7b32572 100644 --- a/src/main/java/com/solace/source/connector/SolSessionEventCallbackHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionEventCallbackHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.SessionEvent; import com.solacesystems.jcsmp.SessionEventArgs; diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java new file mode 100644 index 0000000..c16b61d --- /dev/null +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolSessionHandler.java @@ -0,0 +1,211 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.solace.connector.kafka.connect.source; + +import com.solacesystems.jcsmp.InvalidPropertiesException; +import com.solacesystems.jcsmp.JCSMPChannelProperties; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPFactory; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.JCSMPSessionStats; +import com.solacesystems.jcsmp.statistics.StatType; +import com.solacesystems.jcsmp.Context; +import com.solacesystems.jcsmp.ContextProperties; + +import java.util.Enumeration; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SolSessionHandler { + private static final Logger log = LoggerFactory.getLogger(SolSessionHandler.class); + + private SolaceSourceConnectorConfig connectorConfig; + + final JCSMPProperties properties = new JCSMPProperties(); + final JCSMPChannelProperties chanProperties = new JCSMPChannelProperties(); + private JCSMPSession session; + private Context ctx; + + public SolSessionHandler(SolaceSourceConnectorConfig connectorConfig) { + this.connectorConfig = connectorConfig; + ContextProperties ctx_prop = new ContextProperties(); + ctx_prop.setName(Thread.currentThread().getName()); // unique name + ctx = JCSMPFactory.onlyInstance().createContext(ctx_prop); + } + + /** + * Create JCSMPProperties to configure Solace JCSMPSession. + */ + public void configureSession() { + // Required Properties + properties.setProperty(JCSMPProperties.USERNAME, + connectorConfig.getString(SolaceSourceConstants.SOL_USERNAME)); + properties.setProperty(JCSMPProperties.PASSWORD, + connectorConfig.getString(SolaceSourceConstants.SOL_PASSWORD)); + properties.setProperty(JCSMPProperties.VPN_NAME, + connectorConfig.getString(SolaceSourceConstants.SOL_VPN_NAME)); + properties.setProperty(JCSMPProperties.HOST, connectorConfig.getString(SolaceSourceConstants.SOL_HOST)); + + // Channel Properties + chanProperties.setConnectTimeoutInMillis(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_connectTimeoutInMillis)); + chanProperties.setReadTimeoutInMillis(connectorConfig + .getInt(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_readTimeoutInMillis)); + chanProperties.setConnectRetries(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_connectRetries)); + chanProperties.setReconnectRetries(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_reconnectRetries)); + ; + chanProperties.setConnectRetriesPerHost(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_connectRetriesPerHost)); + chanProperties.setReconnectRetryWaitInMillis( + connectorConfig.getInt(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis)); + chanProperties.setKeepAliveIntervalInMillis( + connectorConfig.getInt(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis)); + chanProperties.setKeepAliveLimit(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_keepAliveLimit)); + chanProperties.setSendBuffer(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_sendBuffer)); + chanProperties.setReceiveBuffer(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_receiveBuffer)); + chanProperties.setTcpNoDelay(connectorConfig.getBoolean(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_tcpNoDelay)); + chanProperties.setCompressionLevel(connectorConfig.getInt(SolaceSourceConstants + .SOL_CHANNEL_PROPERTY_compressionLevel)); + // Add channel properties to Session Properties + properties.setProperty(JCSMPProperties.CLIENT_CHANNEL_PROPERTIES, chanProperties); + + properties.setProperty(JCSMPProperties.REAPPLY_SUBSCRIPTIONS, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_REAPPLY_SUBSCRIPTIONS)); + properties.setBooleanProperty(JCSMPProperties.GENERATE_SEND_TIMESTAMPS, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_GENERATE_SEND_TIMESTAMPS)); + properties.setBooleanProperty(JCSMPProperties.GENERATE_RCV_TIMESTAMPS, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_GENERATE_RCV_TIMESTAMPS)); + properties.setIntegerProperty(JCSMPProperties.SUB_ACK_WINDOW_SIZE, + connectorConfig.getInt(SolaceSourceConstants.SOL_SUB_ACK_WINDOW_SIZE)); + properties.setBooleanProperty(JCSMPProperties.GENERATE_SEQUENCE_NUMBERS, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_GENERATE_SEQUENCE_NUMBERS)); + properties.setBooleanProperty(JCSMPProperties.CALCULATE_MESSAGE_EXPIRATION, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_CALCULATE_MESSAGE_EXPIRATION)); + properties.setBooleanProperty(JCSMPProperties.PUB_MULTI_THREAD, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_PUB_MULTI_THREAD)); + properties.setBooleanProperty(JCSMPProperties.MESSAGE_CALLBACK_ON_REACTOR, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_MESSAGE_CALLBACK_ON_REACTOR)); + properties.setBooleanProperty(JCSMPProperties.IGNORE_DUPLICATE_SUBSCRIPTION_ERROR, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR)); + properties.setBooleanProperty(JCSMPProperties.IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR)); + properties.setBooleanProperty(JCSMPProperties + .NO_LOCAL, connectorConfig.getBoolean(SolaceSourceConstants.SOL_NO_LOCAL)); + properties.setProperty(JCSMPProperties.AUTHENTICATION_SCHEME, + connectorConfig.getString(SolaceSourceConstants.SOL_AUTHENTICATION_SCHEME)); + properties.setProperty(JCSMPProperties.KRB_SERVICE_NAME, + connectorConfig.getString(SolaceSourceConstants.SOL_KRB_SERVICE_NAME)); + properties.setProperty(JCSMPProperties.SSL_CONNECTION_DOWNGRADE_TO, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_CONNECTION_DOWNGRADE_TO)); + properties.setIntegerProperty(JCSMPProperties.SUBSCRIBER_LOCAL_PRIORITY, + connectorConfig.getInt(SolaceSourceConstants.SOL_SUBSCRIBER_LOCAL_PRIORITY)); + properties.setIntegerProperty(JCSMPProperties.SUBSCRIBER_NETWORK_PRIORITY, + connectorConfig.getInt(SolaceSourceConstants.SOL_SUBSCRIBER_NETWORK_PRIORITY)); + + // Use SSL for connection, make sure to use the SSL port for the Solace PubSub+ + // broker connection URL + log.info("=============Attempting to use SSL for PubSub+ connection"); + if (!(connectorConfig.getString(SolaceSourceConstants.SOL_SSL_CIPHER_SUITES).equals(""))) { + properties.setProperty(JCSMPProperties.SSL_CIPHER_SUITES, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_CIPHER_SUITES)); + } + properties.setProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE)); + properties.setProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE_DATE, + connectorConfig.getBoolean(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE_DATE)); + properties.setProperty(JCSMPProperties.SSL_TRUST_STORE, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE)); + properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_PASSWORD, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD)); + properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_FORMAT, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT)); + properties.setProperty(JCSMPProperties.SSL_TRUSTED_COMMON_NAME_LIST, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_TRUSTED_COMMON_NAME_LIST)); + properties.setProperty(JCSMPProperties + .SSL_KEY_STORE, connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE)); + properties.setProperty(JCSMPProperties.SSL_KEY_STORE_PASSWORD, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD)); + properties.setProperty(JCSMPProperties.SSL_KEY_STORE_FORMAT, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_FORMAT)); + properties.setProperty(JCSMPProperties.SSL_KEY_STORE_NORMALIZED_FORMAT, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT)); + properties.setProperty(JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, + connectorConfig.getString(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD)); + + // } + } + + /** + * Connect JCSMPSession. + * @return boolean result + * @throws JCSMPException + */ + public void connectSession() throws JCSMPException { + + System.setProperty("java.security.auth.login.config", + connectorConfig.getString(SolaceSourceConstants.SOL_KERBEROS_LOGIN_CONFIG)); + System.setProperty("java.security.krb5.conf", + connectorConfig.getString(SolaceSourceConstants.SOL_KERBEROS_KRB5_CONFIG)); + + session = JCSMPFactory.onlyInstance().createSession(properties, ctx, new SolSessionEventCallbackHandler()); + session.connect(); + } + + public JCSMPSession getSession() { + return session; + } + + public void printStats() { + if (session != null) { + JCSMPSessionStats lastStats = session.getSessionStats(); + Enumeration estats = StatType.elements(); + while (estats.hasMoreElements()) { + StatType statName = estats.nextElement(); + log.info("\t" + statName.getLabel() + ": " + lastStats.getStat(statName)); + } + log.info("\n"); + } + } + + /** + * Shutdown the session. + * @return return shutdown boolean result + */ + public boolean shutdown() { + + Context context = JCSMPFactory.onlyInstance().getDefaultContext(); + if ( session != null ) { + session.closeSession(); + } + if ( ctx != null ) { + ctx.destroy(); + } + session = null; + return true; + } +} diff --git a/src/main/java/com/solace/source/connector/SolaceSourceConnector.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnector.java similarity index 92% rename from src/main/java/com/solace/source/connector/SolaceSourceConnector.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnector.java index 201ce9d..1df0030 100644 --- a/src/main/java/com/solace/source/connector/SolaceSourceConnector.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnector.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import java.util.ArrayList; import java.util.HashMap; @@ -34,7 +34,7 @@ public class SolaceSourceConnector extends SourceConnector { private static final Logger log = LoggerFactory.getLogger(SolaceSourceConnector.class); - SolaceSourceConfig mconfig; + SolaceSourceConnectorConfig mconfig; private Map mconfigProperties; @Override @@ -44,11 +44,11 @@ public String version() { @Override public void start(Map props) { + log.info("==================== Start a SolaceSourceConnector"); mconfigProperties = props; - mconfig = new SolaceSourceConfig(props); + mconfig = new SolaceSourceConnectorConfig(props); - } @Override @@ -77,7 +77,7 @@ public void stop() { @Override public ConfigDef config() { log.info("==================== Requesting Config for SolaceSourceConnector"); - return SolaceSourceConfig.config; + return SolaceSourceConnectorConfig.config; } } diff --git a/src/main/java/com/solace/source/connector/SolaceSourceConfig.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java similarity index 96% rename from src/main/java/com/solace/source/connector/SolaceSourceConfig.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java index b701cc9..7e66d25 100644 --- a/src/main/java/com/solace/source/connector/SolaceSourceConfig.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConnectorConfig.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import java.util.Map; @@ -28,23 +28,28 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SolaceSourceConfig extends AbstractConfig { +public class SolaceSourceConnectorConfig extends AbstractConfig { - private static final Logger log = LoggerFactory.getLogger(SolaceSourceConfig.class); + private static final Logger log = LoggerFactory.getLogger(SolaceSourceConnectorConfig.class); /** * Constructor to create Solace Configuration details for Source Connector. */ - public SolaceSourceConfig(Map properties) { + public SolaceSourceConnectorConfig(Map properties) { super(config, properties); - log.info("==================Initialize Connnector properties"); + log.info("==================Initialize Connector properties"); } /** * Returns a ConfigDef to be used for Source Task. */ public static ConfigDef solaceConfigDef() { + + + // TODO: Revise defaults to JCSMP defaults + + return new ConfigDef() .define(SolaceSourceConstants.KAFKA_TOPIC, Type.STRING, "default", Importance.HIGH, "Kafka topic to consume from") @@ -58,9 +63,9 @@ public static ConfigDef solaceConfigDef() { "Solace VPN to connect with ") .define(SolaceSourceConstants.SOL_TOPICS, Type.STRING, null, Importance.MEDIUM, "Solace topic or list of topics to subscribe from") - .define(SolaceSourceConstants.SOl_QUEUE, Type.STRING, null, + .define(SolaceSourceConstants.SOL_QUEUE, Type.STRING, null, Importance.MEDIUM, "Solace queue to consume from") - .define(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, Type.CLASS, SolMessageProcessor.class, + .define(SolaceSourceConstants.SOL_MESSAGE_PROCESSOR, Type.CLASS, SolMessageProcessorIF.class, Importance.HIGH, "default Solace message processor to use") .define(SolaceSourceConstants.SOL_LOCALHOST, Type.STRING, null, Importance.LOW, @@ -118,7 +123,7 @@ public static ConfigDef solaceConfigDef() { + "if the client has a subscription that matches the published topic.") .define(SolaceSourceConstants.SOL_SUB_ACK_WINDOW_SIZE, Type.INT, 255, Importance.LOW, "The size of the sliding subscriber ACK window. The valid range is 1-255") - .define(SolaceSourceConstants.SOl_AUTHENTICATION_SCHEME, Type.STRING, + .define(SolaceSourceConstants.SOL_AUTHENTICATION_SCHEME, Type.STRING, "AUTHENTICATION_SCHEME_BASIC", Importance.MEDIUM, "String property specifying the authentication scheme.") .define(SolaceSourceConstants.SOL_KRB_SERVICE_NAME, Type.STRING, "solace", diff --git a/src/main/java/com/solace/source/connector/SolaceSourceConstants.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java similarity index 94% rename from src/main/java/com/solace/source/connector/SolaceSourceConstants.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java index aa0b823..35e95be 100644 --- a/src/main/java/com/solace/source/connector/SolaceSourceConstants.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceConstants.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; /** * SolaceSourceConstants is responsible for correct configuration management. @@ -37,13 +37,16 @@ public class SolaceSourceConstants { public static final String SOL_HOST = "sol.host"; public static final String SOL_USERNAME = "sol.username"; public static final String SOL_PASSWORD = "sol.password"; + + // TODO: SOL_MESSAGE_ACK_MODE is not used! public static final String SOL_MESSAGE_ACK_MODE = "sol.message_ack_mode"; + public static final String SOL_VPN_NAME = "sol.vpn_name"; public static final String SOL_TOPICS = "sol.topics"; - public static final String SOl_QUEUE = "sol.queue"; + public static final String SOL_QUEUE = "sol.queue"; // Low Importance General Properties - public static final String SOl_SESSION_NAME = "sol.session_name"; + public static final String SOL_SESSION_NAME = "sol.session_name"; public static final String SOL_LOCALHOST = "sol.localhost"; public static final String SOL_CLIENT_NAME = "sol.client_name"; public static final String SOL_GENERATE_SENDER_ID = "sol.generate_sender_id"; @@ -62,7 +65,7 @@ public class SolaceSourceConstants { = "sol.ignore_subscription_not_found_error"; public static final String SOL_NO_LOCAL = "sol.no_local"; public static final String SOL_ACK_EVENT_MODE = "sol.ack_event_mode"; - public static final String SOl_AUTHENTICATION_SCHEME = "sol.authentication_scheme"; + public static final String SOL_AUTHENTICATION_SCHEME = "sol.authentication_scheme"; public static final String SOL_KRB_SERVICE_NAME = "sol.krb_service_name"; public static final String SOL_SSL_CONNECTION_DOWNGRADE_TO = "sol.ssl_connection_downgrade_to"; @@ -73,7 +76,7 @@ public class SolaceSourceConstants { public static final String SOL_SSL_VALIDATE_CERTIFICATE = "sol.ssl_validate_certificate"; public static final String SOL_SSL_VALIDATE_CERTIFICATE_DATE = "sol.ssl_validate_certicate_date"; public static final String SOL_SSL_TRUST_STORE = "sol.ssl_trust_store"; - public static final String SOL_SSL_TRUST_STORE_PASSWORD = "sol.ssl_trust_store_pasword"; + public static final String SOL_SSL_TRUST_STORE_PASSWORD = "sol.ssl_trust_store_password"; public static final String SOL_SSL_TRUST_STORE_FORMAT = "sol.ssl_trust_store_format"; public static final String SOL_SSL_TRUSTED_COMMON_NAME_LIST = "sol.ssl_trusted_common_name_list"; public static final String SOL_SSL_KEY_STORE = "sol.ssl_key_store"; @@ -103,7 +106,7 @@ public class SolaceSourceConstants { = "sol.channel_properties.keep_alive_limit"; public static final String SOL_CHANNEL_PROPERTY_sendBuffer = "sol.channel_properties.send_buffer"; public static final String SOL_CHANNEL_PROPERTY_receiveBuffer - = "sol.channle_properties.receive_buffer"; + = "sol.channel_properties.receive_buffer"; public static final String SOL_CHANNEL_PROPERTY_tcpNoDelay = "sol.channel_properties.tcp_no_delay"; public static final String SOL_CHANNEL_PROPERTY_compressionLevel @@ -111,7 +114,7 @@ public class SolaceSourceConstants { // Low Importance Persistent Message Properties public static final String SOL_SUB_ACK_WINDOW_SIZE = "sol.sub_ack_window_size"; - public static final String SOL_PUB_ACK_WINDOW_SIZE = "sol.sub_ack_window_size"; + public static final String SOL_PUB_ACK_WINDOW_SIZE = "sol.pub_ack_window_size"; public static final String SOL_SUB_ACK_TIME = "sol.sub_ack_time"; public static final String SOL_PUB_ACK_TIME = "sol.pub_ack_time"; public static final String SOL_SUB_ACK_WINDOW_THRESHOLD = "sol.sub_ack_window_threshold"; diff --git a/src/main/java/com/solace/source/connector/SolaceSourceQueueConsumer.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java similarity index 66% rename from src/main/java/com/solace/source/connector/SolaceSourceQueueConsumer.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java index ebc6fed..643f33b 100644 --- a/src/main/java/com/solace/source/connector/SolaceSourceQueueConsumer.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceQueueConsumer.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.ConsumerFlowProperties; @@ -26,7 +26,6 @@ import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; import com.solacesystems.jcsmp.JCSMPProperties; -import com.solacesystems.jcsmp.JCSMPSession; import com.solacesystems.jcsmp.Queue; import java.util.concurrent.BlockingQueue; @@ -34,60 +33,51 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - - public class SolaceSourceQueueConsumer { private static final Logger log = LoggerFactory.getLogger(SolaceSourceQueueConsumer.class); - private SolaceSourceConfig lconfig; - private BlockingQueue squeue; + private SolaceSourceConnectorConfig lconfig; private Queue solQueue; private FlowReceiver recv; + private SolMessageQueueCallbackHandler callbackhandler; + private SolSessionHandler solSessionHandler; - // SolaceSourceQueueConsumer(SolaceSourceConfig lconfig, - // BlockingQueue squeue) { - SolaceSourceQueueConsumer(SolaceSourceConfig lconfig, BlockingQueue squeue) { + SolaceSourceQueueConsumer(SolaceSourceConnectorConfig lconfig, SolSessionHandler solSessionHandler) { this.lconfig = lconfig; - this.squeue = squeue; + this.solSessionHandler = solSessionHandler; } - /** - * Initializes the JCSMP Session. - */ - public boolean init(JCSMPSession session) { - solQueue = JCSMPFactory.onlyInstance() - .createQueue(lconfig.getString(SolaceSourceConstants.SOl_QUEUE)); + public boolean init(BlockingQueue squeue) { + solQueue = JCSMPFactory.onlyInstance().createQueue(lconfig.getString(SolaceSourceConstants.SOL_QUEUE)); final ConsumerFlowProperties flow_prop = new ConsumerFlowProperties(); flow_prop.setEndpoint(solQueue); - flow_prop.setAckMode(JCSMPProperties.SUPPORTED_MESSAGE_ACK_CLIENT); + flow_prop.setAckMode(JCSMPProperties.SUPPORTED_MESSAGE_ACK_CLIENT); // Will explicitly ack at commit flow_prop.setStartState(true); EndpointProperties endpointProps = new EndpointProperties(); endpointProps.setAccessType(EndpointProperties.ACCESSTYPE_NONEXCLUSIVE); try { - - recv = session.createFlow(new SolMessageQueueCallbackHandler(squeue), - flow_prop, - endpointProps, + callbackhandler = new SolMessageQueueCallbackHandler(squeue); + recv = solSessionHandler.getSession().createFlow(callbackhandler, flow_prop, endpointProps, new SolFlowEventCallBackHandler()); recv.start(); } catch (JCSMPException je) { - log.info("===========JCSMP Exception while creating Solace Flow to Queue " - + "in SolaceSourceQueueConsumer {} \n", + log.info("=========== JCSMP Exception while creating Solace Flow to Queue " + "in SolaceSourceQueueConsumer {} \n", je.getLocalizedMessage()); } - return true; } - /** - * Shuts down the Solace Receiver. - * - *

@return - */ - public boolean shutdown() { + public void stop() { if (recv != null) { - recv.close(); + recv.stop(); } - return true; + } + public void shutdown() { + if (recv != null) { + recv.close(); + } + if (callbackhandler != null) { + callbackhandler.shutdown(); // Must remove reference to squeue + } } } diff --git a/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java new file mode 100644 index 0000000..8c1a48e --- /dev/null +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTask.java @@ -0,0 +1,172 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.solace.connector.kafka.connect.source; + +import com.solacesystems.jcsmp.BytesXMLMessage; +import com.solacesystems.jcsmp.DeliveryMode; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.JCSMPSessionStats; +import com.solacesystems.jcsmp.statistics.StatType; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Enumeration; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.apache.kafka.connect.source.SourceRecord; +import org.apache.kafka.connect.source.SourceTask; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + + +public class SolaceSourceTask extends SourceTask { // implements XMLMessageListener{ + + private static final Logger log = LoggerFactory.getLogger(SolaceSourceTask.class); + + final JCSMPProperties properties = new JCSMPProperties(); + + SolaceSourceConnectorConfig connectorConfig; + private SolSessionHandler solSessionHandler = null; + BlockingQueue ingressMessages + = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for any incoming message from PS+ topics and queue + BlockingQueue outstandingAckList + = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for Solace Flow messages + String skafkaTopic; + SolaceSourceTopicListener topicListener = null; + SolaceSourceQueueConsumer queueConsumer = null; + + private volatile boolean shuttingDown = false; + + // private Class cProcessor; + private SolMessageProcessorIF processor; + + @Override + public String version() { + return VersionUtil.getVersion(); + } + + @Override + public void start(Map props) { + + connectorConfig = new SolaceSourceConnectorConfig(props); + skafkaTopic = connectorConfig.getString(SolaceSourceConstants.KAFKA_TOPIC); + solSessionHandler = new SolSessionHandler(connectorConfig); + try { + solSessionHandler.configureSession(); + solSessionHandler.connectSession(); + } catch (JCSMPException e) { + log.info("Received Solace exception {}, with the " + + "following: {} ", e.getCause(), e.getStackTrace()); + log.info("================ Failed to create JCSMPSession Session"); + stop(); + } + log.info("================ JCSMPSession Connected"); + if (connectorConfig.getString(SolaceSourceConstants.SOL_TOPICS) != null) { + topicListener = new SolaceSourceTopicListener(connectorConfig, solSessionHandler); + if (!topicListener.init(ingressMessages)) { + log.info("================ Failed to start topic consumer ... shutting down"); + stop(); + } + } + if (connectorConfig.getString(SolaceSourceConstants.SOL_QUEUE) != null) { + queueConsumer = new SolaceSourceQueueConsumer(connectorConfig, solSessionHandler); + if (!queueConsumer.init(ingressMessages)) { + log.info("================ Failed to start queue consumer ... shutting down"); + stop(); + } + } + } + + @Override + public synchronized List poll() throws InterruptedException { + + if (shuttingDown || ingressMessages.size() == 0) { + return null; // Nothing to do, return control + } + // There is at least one message to process + List records = new ArrayList<>(); + int processedInIhisBatch = 0; + int count = 0; + int arraySize = ingressMessages.size(); + while (count < arraySize) { + BytesXMLMessage msg = ingressMessages.take(); + processor = connectorConfig + .getConfiguredInstance(SolaceSourceConstants + .SOL_MESSAGE_PROCESSOR, SolMessageProcessorIF.class) + .process(connectorConfig.getString(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY), msg); + Collections.addAll(records, processor.getRecords(skafkaTopic)); + count++; + processedInIhisBatch++; + if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT + || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { + outstandingAckList.add(msg); // enqueue messages received from guaranteed messaging endpoint for later ack + } + } + log.debug("Processed {} records in this batch.", processedInIhisBatch); + return records; + } + + /** + * Kafka Connect method that write records to disk. + */ + public synchronized void commit() throws InterruptedException { + log.trace("Committing records"); + int currentLoad = outstandingAckList.size(); + int count = 0; + while (count != currentLoad) { + outstandingAckList.take().ackMessage(); + count++; + } + } + + @Override + public synchronized void stop() { + log.info("================ Shutting down PubSub+ Source Connector"); + shuttingDown = true; + if (topicListener != null) { + topicListener.shutdown(); + } + if (queueConsumer != null) { + queueConsumer.shutdown(); + } + if (solSessionHandler != null) { + log.info("Final Statistics summary:\n"); + solSessionHandler.printStats(); + solSessionHandler.shutdown(); + } + solSessionHandler = null; // At this point filling the ingress queue is stopped + ingressMessages.clear(); // Remove all remaining ingressed messages, these will be no longer imported to Kafka + log.info("PubSub+ Source Connector stopped"); + } + + // For testing only + public JCSMPSession getSolSession() { + return solSessionHandler.getSession(); + } + +} diff --git a/src/main/java/com/solace/source/connector/SolaceSourceTopicListener.java b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java similarity index 73% rename from src/main/java/com/solace/source/connector/SolaceSourceTopicListener.java rename to src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java index 886ba3d..1a4a30e 100644 --- a/src/main/java/com/solace/source/connector/SolaceSourceTopicListener.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/SolaceSourceTopicListener.java @@ -17,7 +17,7 @@ * under the License. */ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.JCSMPException; @@ -36,37 +36,28 @@ public class SolaceSourceTopicListener { private static final Logger log = LoggerFactory.getLogger(SolaceSourceTopicListener.class); - private SolaceSourceConfig lconfig; + private SolaceSourceConnectorConfig lconfig; private String solaceTopics; private String[] topics; private XMLMessageConsumer cons; + SolMessageTopicCallbackHandler callbackhandler; + private SolSessionHandler solSessionHandler; - public BlockingQueue squeue; - - /** - * Constructor. - */ - public SolaceSourceTopicListener(SolaceSourceConfig lconfig, - BlockingQueue squeue) { + public SolaceSourceTopicListener(SolaceSourceConnectorConfig lconfig, SolSessionHandler solSessionHandler) { this.lconfig = lconfig; - this.squeue = squeue; - + this.solSessionHandler = solSessionHandler; } - /** - * Initializes JCSMPSession. - */ - public boolean init(JCSMPSession session) { - + public boolean init(BlockingQueue squeue) { boolean topicListenerStarted = true; solaceTopics = lconfig.getString(SolaceSourceConstants.SOL_TOPICS); topics = solaceTopics.split(","); try { - cons = session.getMessageConsumer(new SolReconnectCallbackHandler(), - new SolMessageTopicCallbackHandler(lconfig, squeue)); + callbackhandler = new SolMessageTopicCallbackHandler(lconfig, squeue); + cons = solSessionHandler.getSession().getMessageConsumer(new SolReconnectCallbackHandler(), callbackhandler); } catch (JCSMPException je) { log.info("JCSMP Exception in SolaceSourceTopicListener {} \n", je.getLocalizedMessage()); } @@ -77,11 +68,11 @@ public boolean init(JCSMPSession session) { while (topics.length > counter) { log.info("Adding subscription for topic {} ", topics[counter].trim()); TopicProperties tproperties = new TopicProperties(); - tproperties.setRxAllDeliverToOne(lconfig - .getBoolean(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE)); tproperties.setName(topics[counter].trim()); + // Only used for legacy PubSub+ versions + tproperties.setRxAllDeliverToOne(lconfig.getBoolean(SolaceSourceConstants.SOL_SUBSCRIBER_DTO_OVERRIDE)); topic = JCSMPFactory.onlyInstance().createTopic(tproperties); - session.addSubscription(topic, true); + solSessionHandler.getSession().addSubscription(topic, true); counter++; } } catch (JCSMPException je) { @@ -100,16 +91,13 @@ public boolean init(JCSMPSession session) { } - /** - * Stops JCSMPSession. - * @return - */ - public boolean shutdown() { + public void shutdown() { if (cons != null) { cons.close(); } - return true; - + if (callbackhandler != null) { + callbackhandler.shutdown(); // Must remove reference to squeue + } } } diff --git a/src/main/java/com/solace/source/connector/VersionUtil.java b/src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java similarity index 67% rename from src/main/java/com/solace/source/connector/VersionUtil.java rename to src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java index 19a98bc..4aec262 100644 --- a/src/main/java/com/solace/source/connector/VersionUtil.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/VersionUtil.java @@ -1,4 +1,4 @@ -package com.solace.source.connector; +package com.solace.connector.kafka.connect.source; public class VersionUtil { @@ -7,7 +7,7 @@ public class VersionUtil { */ public static String getVersion() { - return "1.0.2"; + return "2.0.0"; } diff --git a/src/main/java/com/solace/source/connector/msgprocessors/SolSampleSimpleMessageProcessor.java b/src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolSampleSimpleMessageProcessor.java similarity index 89% rename from src/main/java/com/solace/source/connector/msgprocessors/SolSampleSimpleMessageProcessor.java rename to src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolSampleSimpleMessageProcessor.java index 6eb0cf4..50dfe47 100644 --- a/src/main/java/com/solace/source/connector/msgprocessors/SolSampleSimpleMessageProcessor.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolSampleSimpleMessageProcessor.java @@ -17,10 +17,9 @@ * under the License. */ -package com.solace.source.connector.msgprocessors; - -import com.solace.source.connector.SolMessageProcessor; +package com.solace.connector.kafka.connect.source.msgprocessors; +import com.solace.connector.kafka.connect.source.SolMessageProcessorIF; import com.solacesystems.jcsmp.BytesXMLMessage; //import com.solacesystems.jcsmp.DeliveryMode; import com.solacesystems.jcsmp.TextMessage; @@ -34,7 +33,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SolSampleSimpleMessageProcessor implements SolMessageProcessor { +public class SolSampleSimpleMessageProcessor implements SolMessageProcessorIF { private static final Logger log = LoggerFactory.getLogger(SolSampleSimpleMessageProcessor.class); private Object smsg; @@ -44,10 +43,10 @@ public class SolSampleSimpleMessageProcessor implements SolMessageProcessor { @Override - public SolMessageProcessor process(String skey, BytesXMLMessage msg) { + public SolMessageProcessorIF process(String skey, BytesXMLMessage msg) { this.smsg = msg; if (msg instanceof TextMessage) { - log.debug("Text Mesasge received {}", ((TextMessage) msg).getText()); + log.debug("Text Message received {}", ((TextMessage) msg).getText()); String smsg = ((TextMessage) msg).getText(); messageOut = smsg.getBytes(StandardCharsets.UTF_8); } else { @@ -57,7 +56,6 @@ public SolMessageProcessor process(String skey, BytesXMLMessage msg) { } else { // Binary attachment pay load messageOut = msg.getAttachmentByteBuffer().array(); } - } log.debug("Message Dump:{}", msg.dump()); diff --git a/src/main/java/com/solace/source/connector/msgprocessors/SolaceSampleKeyedMessageProcessor.java b/src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolaceSampleKeyedMessageProcessor.java similarity index 94% rename from src/main/java/com/solace/source/connector/msgprocessors/SolaceSampleKeyedMessageProcessor.java rename to src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolaceSampleKeyedMessageProcessor.java index bff0c2a..7c72abc 100644 --- a/src/main/java/com/solace/source/connector/msgprocessors/SolaceSampleKeyedMessageProcessor.java +++ b/src/main/java/com/solace/connector/kafka/connect/source/msgprocessors/SolaceSampleKeyedMessageProcessor.java @@ -17,10 +17,9 @@ * under the License. */ -package com.solace.source.connector.msgprocessors; - -import com.solace.source.connector.SolMessageProcessor; +package com.solace.connector.kafka.connect.source.msgprocessors; +import com.solace.connector.kafka.connect.source.SolMessageProcessorIF; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.TextMessage; @@ -34,7 +33,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class SolaceSampleKeyedMessageProcessor implements SolMessageProcessor { +public class SolaceSampleKeyedMessageProcessor implements SolMessageProcessorIF { private static final Logger log = LoggerFactory.getLogger(SolaceSampleKeyedMessageProcessor.class); @@ -51,7 +50,7 @@ public enum KeyHeader { protected KeyHeader keyheader = KeyHeader.NONE; @Override - public SolMessageProcessor process(String skey, BytesXMLMessage msg) { + public SolMessageProcessorIF process(String skey, BytesXMLMessage msg) { this.msg = msg; this.skey = skey.toUpperCase(); diff --git a/src/main/java/com/solace/source/connector/SolSessionCreate.java b/src/main/java/com/solace/source/connector/SolSessionCreate.java deleted file mode 100644 index f6b59be..0000000 --- a/src/main/java/com/solace/source/connector/SolSessionCreate.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package com.solace.source.connector; - -import com.solacesystems.jcsmp.InvalidPropertiesException; -import com.solacesystems.jcsmp.JCSMPChannelProperties; -import com.solacesystems.jcsmp.JCSMPException; -import com.solacesystems.jcsmp.JCSMPFactory; -import com.solacesystems.jcsmp.JCSMPProperties; -import com.solacesystems.jcsmp.JCSMPSession; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SolSessionCreate { - private static final Logger log = LoggerFactory.getLogger(SolSessionCreate.class); - - private SolaceSourceConfig lconfig; - - final JCSMPProperties properties = new JCSMPProperties(); - final JCSMPChannelProperties chanProperties = new JCSMPChannelProperties(); - private JCSMPSession session; - - private enum KeyHeader { - NONE, DESTINATION, CORRELATION_ID, CORRELATION_ID_AS_BYTES - } - - protected KeyHeader keyheader = KeyHeader.NONE; - - public SolSessionCreate(SolaceSourceConfig lconfig) { - this.lconfig = lconfig; - } - - /** - * Create JCSMPProperties to configure Solace JCSMPSession. - */ - public void configureSession() { - // Required Properties - properties.setProperty(JCSMPProperties.USERNAME, - lconfig.getString(SolaceSourceConstants.SOL_USERNAME)); - properties.setProperty(JCSMPProperties.PASSWORD, - lconfig.getString(SolaceSourceConstants.SOL_PASSWORD)); - properties.setProperty(JCSMPProperties.VPN_NAME, - lconfig.getString(SolaceSourceConstants.SOL_VPN_NAME)); - properties.setProperty(JCSMPProperties.HOST, lconfig.getString(SolaceSourceConstants.SOL_HOST)); - - // Channel Properties - chanProperties.setConnectTimeoutInMillis(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_connectTimeoutInMillis)); - chanProperties.setReadTimeoutInMillis(lconfig - .getInt(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_readTimeoutInMillis)); - chanProperties.setConnectRetries(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_connectRetries)); - chanProperties.setReconnectRetries(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_reconnectRetries)); - ; - chanProperties.setConnectRetriesPerHost(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_connectRetriesPerHost)); - chanProperties.setReconnectRetryWaitInMillis( - lconfig.getInt(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_reconnectRetryWaitInMillis)); - chanProperties.setKeepAliveIntervalInMillis( - lconfig.getInt(SolaceSourceConstants.SOL_CHANNEL_PROPERTY_keepAliveIntervalInMillis)); - chanProperties.setKeepAliveLimit(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_keepAliveLimit)); - chanProperties.setSendBuffer(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_sendBuffer)); - chanProperties.setReceiveBuffer(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_receiveBuffer)); - chanProperties.setTcpNoDelay(lconfig.getBoolean(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_tcpNoDelay)); - chanProperties.setCompressionLevel(lconfig.getInt(SolaceSourceConstants - .SOL_CHANNEL_PROPERTY_compressionLevel)); - // Add channel properties to Session Properties - properties.setProperty(JCSMPProperties.CLIENT_CHANNEL_PROPERTIES, chanProperties); - - properties.setProperty(JCSMPProperties.REAPPLY_SUBSCRIPTIONS, - lconfig.getBoolean(SolaceSourceConstants.SOL_REAPPLY_SUBSCRIPTIONS)); - properties.setBooleanProperty(JCSMPProperties.GENERATE_SEND_TIMESTAMPS, - lconfig.getBoolean(SolaceSourceConstants.SOL_GENERATE_SEND_TIMESTAMPS)); - properties.setBooleanProperty(JCSMPProperties.GENERATE_RCV_TIMESTAMPS, - lconfig.getBoolean(SolaceSourceConstants.SOL_GENERATE_RCV_TIMESTAMPS)); - properties.setIntegerProperty(JCSMPProperties.SUB_ACK_WINDOW_SIZE, - lconfig.getInt(SolaceSourceConstants.SOL_SUB_ACK_WINDOW_SIZE)); - properties.setBooleanProperty(JCSMPProperties.GENERATE_SEQUENCE_NUMBERS, - lconfig.getBoolean(SolaceSourceConstants.SOL_GENERATE_SEQUENCE_NUMBERS)); - properties.setBooleanProperty(JCSMPProperties.CALCULATE_MESSAGE_EXPIRATION, - lconfig.getBoolean(SolaceSourceConstants.SOL_CALCULATE_MESSAGE_EXPIRATION)); - properties.setBooleanProperty(JCSMPProperties.PUB_MULTI_THREAD, - lconfig.getBoolean(SolaceSourceConstants.SOL_PUB_MULTI_THREAD)); - properties.setBooleanProperty(JCSMPProperties.MESSAGE_CALLBACK_ON_REACTOR, - lconfig.getBoolean(SolaceSourceConstants.SOL_MESSAGE_CALLBACK_ON_REACTOR)); - properties.setBooleanProperty(JCSMPProperties.IGNORE_DUPLICATE_SUBSCRIPTION_ERROR, - lconfig.getBoolean(SolaceSourceConstants.SOL_IGNORE_DUPLICATE_SUBSCRIPTION_ERROR)); - properties.setBooleanProperty(JCSMPProperties.IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR, - lconfig.getBoolean(SolaceSourceConstants.SOL_IGNORE_SUBSCRIPTION_NOT_FOUND_ERROR)); - properties.setBooleanProperty(JCSMPProperties - .NO_LOCAL, lconfig.getBoolean(SolaceSourceConstants.SOL_NO_LOCAL)); - properties.setProperty(JCSMPProperties.AUTHENTICATION_SCHEME, - lconfig.getString(SolaceSourceConstants.SOl_AUTHENTICATION_SCHEME)); - properties.setProperty(JCSMPProperties.KRB_SERVICE_NAME, - lconfig.getString(SolaceSourceConstants.SOL_KRB_SERVICE_NAME)); - properties.setProperty(JCSMPProperties.SSL_CONNECTION_DOWNGRADE_TO, - lconfig.getString(SolaceSourceConstants.SOL_SSL_CONNECTION_DOWNGRADE_TO)); - properties.setIntegerProperty(JCSMPProperties.SUBSCRIBER_LOCAL_PRIORITY, - lconfig.getInt(SolaceSourceConstants.SOL_SUBSCRIBER_LOCAL_PRIORITY)); - properties.setIntegerProperty(JCSMPProperties.SUBSCRIBER_NETWORK_PRIORITY, - lconfig.getInt(SolaceSourceConstants.SOL_SUBSCRIBER_NETWORK_PRIORITY)); - - // Use SSL for connection, make sure to use the SSL port for the Solace PubSub+ - // broker connection URL - log.info("=============Attempting to use SSL for PubSub+ connection"); - if (!(lconfig.getString(SolaceSourceConstants.SOL_SSL_CIPHER_SUITES).equals(""))) { - properties.setProperty(JCSMPProperties.SSL_CIPHER_SUITES, - lconfig.getString(SolaceSourceConstants.SOL_SSL_CIPHER_SUITES)); - } - properties.setProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE, - lconfig.getBoolean(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE)); - properties.setProperty(JCSMPProperties.SSL_VALIDATE_CERTIFICATE_DATE, - lconfig.getBoolean(SolaceSourceConstants.SOL_SSL_VALIDATE_CERTIFICATE_DATE)); - properties.setProperty(JCSMPProperties.SSL_TRUST_STORE, - lconfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE)); - properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_PASSWORD, - lconfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_PASSWORD)); - properties.setProperty(JCSMPProperties.SSL_TRUST_STORE_FORMAT, - lconfig.getString(SolaceSourceConstants.SOL_SSL_TRUST_STORE_FORMAT)); - properties.setProperty(JCSMPProperties.SSL_TRUSTED_COMMON_NAME_LIST, - lconfig.getString(SolaceSourceConstants.SOL_SSL_TRUSTED_COMMON_NAME_LIST)); - properties.setProperty(JCSMPProperties - .SSL_KEY_STORE, lconfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE)); - properties.setProperty(JCSMPProperties.SSL_KEY_STORE_PASSWORD, - lconfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_PASSWORD)); - properties.setProperty(JCSMPProperties.SSL_KEY_STORE_FORMAT, - lconfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_FORMAT)); - properties.setProperty(JCSMPProperties.SSL_KEY_STORE_NORMALIZED_FORMAT, - lconfig.getString(SolaceSourceConstants.SOL_SSL_KEY_STORE_NORMALIZED_FORMAT)); - properties.setProperty(JCSMPProperties.SSL_PRIVATE_KEY_PASSWORD, - lconfig.getString(SolaceSourceConstants.SOL_SSL_PRIVATE_KEY_PASSWORD)); - - // } - } - - /** - * Connect JCSMPSession. - * @return boolean result - */ - public boolean connectSession() { - - System.setProperty("java.security.auth.login.config", - lconfig.getString(SolaceSourceConstants.SOL_KERBEROS_LOGIN_CONFIG)); - System.setProperty("java.security.krb5.conf", - lconfig.getString(SolaceSourceConstants.SOL_KERBEROS_KRB5_CONFIG)); - - boolean connected = false; - try { - session = JCSMPFactory.onlyInstance( - ).createSession(properties, null, new SolSessionEventCallbackHandler()); - } catch (InvalidPropertiesException e) { - connected = false; - log.info("Received Solace excepetion {}, with the " - + "following: {} ", e.getCause(), e.getStackTrace()); - } - try { - session.connect(); - connected = true; - } catch (JCSMPException e) { - log.info("Received Solace excepetion {}, with the " - + "following: {} ", e.getCause(), e.getStackTrace()); - connected = false; - } - return connected; - - } - - public JCSMPSession getSession() { - return session; - } - - /** - * Shutdown the session. - * @return return shutdown boolean result - */ - public boolean shutdown() { - - session.closeSession(); - return true; - - } - -} diff --git a/src/main/java/com/solace/source/connector/SolaceSourceTask.java b/src/main/java/com/solace/source/connector/SolaceSourceTask.java deleted file mode 100644 index 01663d2..0000000 --- a/src/main/java/com/solace/source/connector/SolaceSourceTask.java +++ /dev/null @@ -1,235 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package com.solace.source.connector; - -import com.solacesystems.jcsmp.BytesXMLMessage; -import com.solacesystems.jcsmp.DeliveryMode; -import com.solacesystems.jcsmp.JCSMPProperties; -import com.solacesystems.jcsmp.JCSMPSession; -import com.solacesystems.jcsmp.JCSMPSessionStats; -import com.solacesystems.jcsmp.statistics.StatType; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.Enumeration; -import java.util.List; -import java.util.Map; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; - -import org.apache.kafka.connect.source.SourceRecord; -import org.apache.kafka.connect.source.SourceTask; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - - - -public class SolaceSourceTask extends SourceTask { // implements XMLMessageListener{ - - private static final Logger log = LoggerFactory.getLogger(SolaceSourceTask.class); - - final JCSMPProperties properties = new JCSMPProperties(); - - SolaceSourceConfig sconfig; - SolaceSourceTopicListener listener; - SolaceSourceQueueConsumer consumer; - BlockingQueue squeue - = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for Solace Topic - // subscription messages - BlockingQueue gqueue - = new LinkedBlockingQueue<>(); // LinkedBlockingQueue for Solace Flow messages - // from Solace Queue - String skafkaTopic; - boolean topicListenStarted = true; - boolean queueConsumerStarted = true; - - private SolSessionCreate sessionRef; - private JCSMPSession session; - - // private Class cProcessor; - private SolMessageProcessor processor; - - private static int BATCH_SIZE = 500; - private int processed = 0; - private int fmsgProcessed = 0; - - @Override - public String version() { - return VersionUtil.getVersion(); - } - - @Override - public void start(Map props) { - - sconfig = new SolaceSourceConfig(props); - skafkaTopic = sconfig.getString(SolaceSourceConstants.KAFKA_TOPIC); - - sessionRef = new SolSessionCreate(sconfig); - sessionRef.configureSession(); - boolean connected = sessionRef.connectSession(); - if (!connected) { - log.info("============Failed to create Solace Session"); - stop(); - } - session = sessionRef.getSession(); - if (session != null) { - log.info("======================JCSMPSession Connected"); - } else { - log.info("======================Failed to create JCSMPSession"); - stop(); - } - - if (sconfig.getString(SolaceSourceConstants.SOL_TOPICS) != null) { - listener = new SolaceSourceTopicListener(sconfig, squeue); - topicListenStarted = listener.init(session); - if (topicListenStarted == false) { - log.info("===============Failed to start topic consumer ... shutting down"); - stop(); - } - } - - if (sconfig.getString(SolaceSourceConstants.SOl_QUEUE) != null) { - consumer = new SolaceSourceQueueConsumer(sconfig, squeue); - queueConsumerStarted = consumer.init(session); - if (queueConsumerStarted == false) { - log.info("===============Failed to start queue consumer ... shutting down"); - stop(); - } - } - - } - - @Override - public List poll() throws InterruptedException { - - List records = new ArrayList<>(); - int arraySize = squeue.size(); - - // Block waiting for a record to arrive or process in batches depending on the - // number of records in array to process - if (squeue.size() == 0) { - BytesXMLMessage msg = squeue.take(); // Blocks here until there is a message - processor = sconfig.getConfiguredInstance(SolaceSourceConstants - .SOL_MESSAGE_PROCESSOR, SolMessageProcessor.class) - .process(sconfig.getString(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY), msg); - Collections.addAll(records, processor.getRecords(skafkaTopic)); - processed++; - if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT - || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { - gqueue.add(msg); - fmsgProcessed++; - } - - } else if (squeue.size() < BATCH_SIZE) { - int count = 0; - arraySize = squeue.size(); - while (count < arraySize) { - BytesXMLMessage msg = squeue.take(); - processor = sconfig - .getConfiguredInstance(SolaceSourceConstants - .SOL_MESSAGE_PROCESSOR, SolMessageProcessor.class) - .process(sconfig.getString(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY), msg); - Collections.addAll(records, processor.getRecords(skafkaTopic)); - count++; - processed++; - if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT - || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { - gqueue.add(msg); - fmsgProcessed++; - } - } - } else if (squeue.size() >= BATCH_SIZE) { - int count = 0; - int currentLoad = squeue.size(); - while (count < currentLoad) { - BytesXMLMessage msg = squeue.take(); - processor = sconfig - .getConfiguredInstance(SolaceSourceConstants - .SOL_MESSAGE_PROCESSOR, SolMessageProcessor.class) - .process(sconfig.getString(SolaceSourceConstants.SOL_KAFKA_MESSAGE_KEY), msg); - Collections.addAll(records, processor.getRecords(skafkaTopic)); - count++; - processed++; - if (msg.getDeliveryMode() == DeliveryMode.NON_PERSISTENT - || msg.getDeliveryMode() == DeliveryMode.PERSISTENT) { - gqueue.add(msg); - fmsgProcessed++; - } - } - - } - - if (fmsgProcessed > 0) { - commit(); - - } - - log.debug("Processed {} records in this batch.", processed); - processed = 0; - return records; - - } - - /** - * Kakfa Connect method that write records to disk. - */ - public synchronized void commit() throws InterruptedException { - log.trace("Committing records"); - int currentLoad = gqueue.size(); - int count = 0; - while (count != currentLoad) { - gqueue.take().ackMessage(); - count++; - } - fmsgProcessed = 0; - - } - - @Override - public void stop() { - if (session != null) { - JCSMPSessionStats lastStats = session.getSessionStats(); - Enumeration estats = StatType.elements(); - log.info("Final Statistics summary:"); - - while (estats.hasMoreElements()) { - StatType statName = estats.nextElement(); - System.out.println("\t" + statName.getLabel() + ": " + lastStats.getStat(statName)); - } - log.info("\n"); - } - boolean ok = true; - log.info("==================Shutting down Solace Source Connector"); - if (listener != null) { - ok = listener.shutdown(); - } - if (consumer != null) { - ok = consumer.shutdown(); - } - - ok = sessionRef.shutdown(); - - if (!(ok)) { - log.info("Solace session failed to shutdown"); - } - - } - -}