From d3de6fda248ec6f6aa6d817f3c7b3b080e8f7665 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 24 Nov 2021 10:48:29 -0500 Subject: [PATCH 01/12] Bump sol-jcsmp from 10.12.0 to 10.12.1 (#41) Bumps sol-jcsmp from 10.12.0 to 10.12.1. --- updated-dependencies: - dependency-name: com.solacesystems:sol-jcsmp dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index db66b72..5878463 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ plugins { ext { kafkaVersion = '2.8.1' - solaceJavaAPIVersion = '10.12.0' + solaceJavaAPIVersion = '10.12.1' } repositories { From 868caf83f418d1fde09b84094e9d087cdfee5697 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Mon, 29 Nov 2021 16:16:27 -0500 Subject: [PATCH 02/12] add transacted publish to topics --- .gitignore | 5 +- README.md | 12 +- build.gradle | 2 +- etc/solace_sink.properties | 5 +- solace-integration-test-support | 2 +- .../connect/sink/SolaceProducerHandlerIT.java | 255 ++++++++++++++++++ .../connect/sink/it/SinkConnectorIT.java | 43 ++- .../connect/sink/it/SolaceSinkTaskIT.java | 16 +- .../connect/sink/SolProducerHandler.java | 182 +++++++++++++ .../kafka/connect/sink/SolSessionHandler.java | 20 +- .../sink/SolaceSinkConnectorConfig.java | 12 +- .../connect/sink/SolaceSinkConstants.java | 3 +- .../kafka/connect/sink/SolaceSinkSender.java | 103 ++----- .../kafka/connect/sink/SolaceSinkTask.java | 25 +- .../sink/SolaceSinkConnectorConfigTest.java | 26 +- .../connect/sink/SolaceSinkSenderTest.java | 61 ++++- 16 files changed, 646 insertions(+), 126 deletions(-) create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java create mode 100644 src/main/java/com/solace/connector/kafka/connect/sink/SolProducerHandler.java diff --git a/.gitignore b/.gitignore index d2e5889..4d2299d 100644 --- a/.gitignore +++ b/.gitignore @@ -45,4 +45,7 @@ local.properties *.ipr # Unzipped test connector -src/integrationTest/resources/pubsubplus-connector-kafka*/ \ No newline at end of file +src/integrationTest/resources/pubsubplus-connector-kafka*/ + +# Local testing +solace.properties diff --git a/README.md b/README.md index 3732bce..a4aadc3 100644 --- a/README.md +++ b/README.md @@ -258,17 +258,25 @@ A value of 0 results in the replay of the entire Kafka Topic. A positive value r We recommend using PubSub+ Topics if high throughput is required and the Kafka Topic is configured for high performance. Message duplication and loss mimics the underlying reliability and QoS configured for the Kafka topic. +By default, messages are published to topics using direct messaging. To publish persistent messages to topics using a local transaction, set `sol.use_transactions_for_topics` to `true`. See [Sending with Local Transactions](#sending-with-local-transactions) for more info. + #### Sending to PubSub+ Queue When Kafka records reliability is critical, we recommend configuring the Sink Connector to send records to the Event Mesh using PubSub+ queues at the cost of reduced throughput. A PubSub+ queue guarantees order of delivery, provides High Availability and Disaster Recovery (depending on the setup of the PubSub+ brokers) and provides an acknowledgment to the connector when the event is stored in all HA and DR members and flushed to disk. This is a higher guarantee than is provided by Kafka, even for Kafka idempotent delivery. -The connector uses local transactions to deliver to the queue by default. The transaction is committed if messages are flushed by Kafka Connect (see below how to tune flush interval) or the outstanding messages size reaches the `sol.autoflush.size` (default 200) configuration. +The connector uses local transactions to deliver to the queue by default. See [Sending with Local Transactions](#sending-with-local-transactions) for more info. Note that generally one connector can send to only one queue. -##### Recovery from Kafka Connect API or Kafka Broker Failure +#### Sending with Local Transactions + +By default, only sending to a queue uses local transactions. To use the transacted session to send persistent messages to topics, set `sol.use_transactions_for_topics` to `true`. + +The transaction is committed if messages are flushed by Kafka Connect (see [below how to tune flush interval](#recovery-from-kafka-connect-api-or-kafka-broker-failure)) or the outstanding messages size reaches the `sol.autoflush.size` (default 200) configuration. + +#### Recovery from Kafka Connect API or Kafka Broker Failure Operators are expected to monitor their connector for failures since errors will cause it to stop. If any are found and the connector was stopped, the operator must explicitly restart it again once the error condition has been resolved. diff --git a/build.gradle b/build.gradle index 5878463..95805bd 100644 --- a/build.gradle +++ b/build.gradle @@ -38,7 +38,7 @@ dependencies { integrationTestImplementation 'org.testcontainers:testcontainers:1.16.0' integrationTestImplementation 'org.testcontainers:junit-jupiter:1.16.0' integrationTestImplementation 'org.testcontainers:kafka:1.16.0' - integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.5.0' + integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.6.0' integrationTestImplementation 'org.slf4j:slf4j-api:1.7.32' integrationTestImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' integrationTestImplementation 'org.apache.commons:commons-configuration2:2.6' diff --git a/etc/solace_sink.properties b/etc/solace_sink.properties index ae26863..b757417 100644 --- a/etc/solace_sink.properties +++ b/etc/solace_sink.properties @@ -52,7 +52,10 @@ sol.record_processor_class=com.solace.connector.kafka.connect.sink.recordprocess # Whether to use transacted session and transactions to publish messages to PubSub+ queue #sol.use_transactions_for_queue=true -# Max outstanding number of transacted messages if using transactions to reliably publish records to a queue. Must be <255 +# When true, messages published to topics will use persistent delivery type using transactions. +#sol.use_transactions_for_topics=false + +# Max outstanding number of transacted messages if using transactions to reliably publish records to a queue or topic. Must be <255 # If outstanding messages limit is reached will auto-commit - will not wait for Kafka Connect "flush" initiated. #sol.autoflush.size=200 diff --git a/solace-integration-test-support b/solace-integration-test-support index c411ac2..f42ff27 160000 --- a/solace-integration-test-support +++ b/solace-integration-test-support @@ -1 +1 @@ -Subproject commit c411ac2e0f82af25ece2994691352cb0d6235142 +Subproject commit f42ff27fd8bdb2e350d8a6939e4edd52a1fecdf3 diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java new file mode 100644 index 0000000..fd0e966 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java @@ -0,0 +1,255 @@ +package com.solace.connector.kafka.connect.sink; + +import com.solace.connector.kafka.connect.sink.it.util.extensions.NetworkPubSubPlusExtension; +import com.solacesystems.jcsmp.BytesXMLMessage; +import com.solacesystems.jcsmp.ConsumerFlowProperties; +import com.solacesystems.jcsmp.DeliveryMode; +import com.solacesystems.jcsmp.Destination; +import com.solacesystems.jcsmp.Endpoint; +import com.solacesystems.jcsmp.FlowReceiver; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPFactory; +import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.TextMessage; +import com.solacesystems.jcsmp.Topic; +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junitpioneer.jupiter.CartesianProductTest; +import org.junitpioneer.jupiter.CartesianValueSource; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +@ExtendWith(NetworkPubSubPlusExtension.class) +public class SolaceProducerHandlerIT { + private Map properties; + private SolSessionHandler sessionHandler; + private SolProducerHandler producerHandler; + + private static final Logger logger = LoggerFactory.getLogger(SolaceProducerHandlerIT.class); + + @BeforeEach + void setUp(JCSMPProperties jcsmpProperties) { + properties = new HashMap<>(); + properties.put(SolaceSinkConstants.SOL_HOST, jcsmpProperties.getStringProperty(JCSMPProperties.HOST)); + properties.put(SolaceSinkConstants.SOL_VPN_NAME, jcsmpProperties.getStringProperty(JCSMPProperties.VPN_NAME)); + properties.put(SolaceSinkConstants.SOL_USERNAME, jcsmpProperties.getStringProperty(JCSMPProperties.USERNAME)); + properties.put(SolaceSinkConstants.SOL_PASSWORD, jcsmpProperties.getStringProperty(JCSMPProperties.PASSWORD)); + } + + @AfterEach + void tearDown() { + if (producerHandler != null) { + producerHandler.close(); + } + + if (sessionHandler != null) { + sessionHandler.shutdown(); + } + } + + @CartesianProductTest(name = "[{index}] txnQueue={0}, txnTopic={1}, sendToQueue={2}, sendToTopic={3}") + @CartesianValueSource(booleans = {true, false}) + @CartesianValueSource(booleans = {true, false}) + @CartesianValueSource(booleans = {true, false}) + @CartesianValueSource(booleans = {true, false}) + public void testStaticDestinations(boolean txnQueue, boolean txnTopic, boolean sendToQueue, boolean sendToTopic, + JCSMPSession session, Queue queue) throws Exception { + properties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + properties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); + properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(txnQueue)); + properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(txnTopic)); + + SolaceSinkConnectorConfig config = new SolaceSinkConnectorConfig(properties); + sessionHandler = new SolSessionHandler(config); + sessionHandler.configureSession(); + sessionHandler.connectSession(); + producerHandler = new SolProducerHandler(config, sessionHandler, null); + + assertNotNull(producerHandler.producer); + assertNotNull(producerHandler.queueProducer); + assertNotNull(producerHandler.topicProducer); + assertEquals(txnTopic ? producerHandler.transactedProducer : producerHandler.producer, producerHandler.topicProducer); + + if (txnQueue || txnTopic) { + assertNotNull(sessionHandler.getTxSession()); + assertNotNull(producerHandler.transactedProducer); + assertNotEquals(producerHandler.producer, producerHandler.transactedProducer); + } else { + assertNull(sessionHandler.getTxSession()); + assertNull(producerHandler.transactedProducer); + } + + if (txnQueue) { + assertEquals(producerHandler.transactedProducer, producerHandler.queueProducer); + } else { + assertNotEquals(producerHandler.transactedProducer, producerHandler.queueProducer); + } + + if (sendToQueue) { + sendAndAssert(session, properties.get(SolaceSinkConstants.SOl_QUEUE), Queue.class, queue, txnQueue); + } + + if (sendToTopic) { + sendAndAssert(session, properties.get(SolaceSinkConstants.SOL_TOPICS), Topic.class, queue, txnTopic); + } + } + + @CartesianProductTest(name = "[{index}] txnQueue={0}, txnTopic={1}, sendToQueue={2}, sendToTopic={3}") + @CartesianValueSource(booleans = {true, false}) + @CartesianValueSource(booleans = {true, false}) + @CartesianValueSource(booleans = {true, false}) + @CartesianValueSource(booleans = {true, false}) + public void testLazyInit(boolean txnQueue, boolean txnTopic, boolean sendToQueue, boolean sendToTopic, + JCSMPSession session, Queue queue) throws Exception { + properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(txnQueue)); + properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(txnTopic)); + + SolaceSinkConnectorConfig config = new SolaceSinkConnectorConfig(properties); + sessionHandler = new SolSessionHandler(config); + sessionHandler.configureSession(); + sessionHandler.connectSession(); + producerHandler = new SolProducerHandler(config, sessionHandler, null); + + assertNotNull(producerHandler.producer); + assertNull(sessionHandler.getTxSession()); + assertNull(producerHandler.transactedProducer); + assertNull(producerHandler.topicProducer); + assertNull(producerHandler.queueProducer); + + if (sendToQueue) { + sendAndAssert(session, queue.getName(), Queue.class, queue, txnQueue); + } + + if (sendToTopic) { + sendAndAssert(session, RandomStringUtils.randomAlphanumeric(100), Topic.class, queue, txnTopic); + } + + if ((sendToQueue && txnQueue) || (sendToTopic && txnTopic)) { + assertNotNull(sessionHandler.getTxSession()); + assertNotNull(producerHandler.transactedProducer); + assertNotEquals(producerHandler.producer, producerHandler.transactedProducer); + } else { + assertNull(sessionHandler.getTxSession()); + assertNull(producerHandler.transactedProducer); + } + + if (sendToQueue) { + assertNotNull(producerHandler.queueProducer); + if (txnQueue) { + assertEquals(producerHandler.transactedProducer, producerHandler.queueProducer); + } else { + assertNotEquals(producerHandler.transactedProducer, producerHandler.queueProducer); + } + } else { + assertNull(producerHandler.queueProducer); + } + + if (sendToTopic) { + assertNotNull(producerHandler.topicProducer); + assertEquals(txnTopic ? producerHandler.transactedProducer : producerHandler.producer, producerHandler.topicProducer); + } else { + assertNull(producerHandler.topicProducer); + } + } + + @Test + public void testAutoFlush(JCSMPSession session, Queue queue) throws Exception { + List topics = IntStream.range(0, 9) + .mapToObj(i -> RandomStringUtils.randomAlphanumeric(100)) + .collect(Collectors.toList()); + properties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + properties.put(SolaceSinkConstants.SOL_TOPICS, String.join(",", topics)); + properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + properties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(2)); + final int expectedNumCommits = (topics.size() + 1) / + Integer.parseInt(properties.get(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE)); + + SolaceSinkConnectorConfig config = new SolaceSinkConnectorConfig(properties); + sessionHandler = new SolSessionHandler(config); + sessionHandler.configureSession(); + sessionHandler.connectSession(); + AtomicInteger numCommits = new AtomicInteger(); + producerHandler = new SolProducerHandler(config, sessionHandler, () -> { + try { + sessionHandler.getTxSession().commit(); + producerHandler.getTxMsgCount().set(0); + } catch (JCSMPException e) { + throw new RuntimeException(e); + } + logger.info("Commit #{}", numCommits.incrementAndGet()); + }); + + sendAndAssert(session, queue.getName(), Queue.class, queue, true, false, false); + for (String topic : topics) { + sendAndAssert(session, topic, Topic.class, queue, true, false, false); + } + + assertEquals(expectedNumCommits, numCommits.get()); + } + + private void sendAndAssert(JCSMPSession session, String destination, Class destinationType, + Endpoint receiveEndpoint, boolean isTransacted) throws JCSMPException { + sendAndAssert(session, destination, destinationType, receiveEndpoint, isTransacted, isTransacted, true); + } + + private void sendAndAssert(JCSMPSession session, String destination, Class destinationType, + Endpoint receiveEndpoint, boolean isTransacted, boolean doCommit, boolean doConsume) + throws JCSMPException { + Destination sendDestination; + if (destinationType.isAssignableFrom(Queue.class)) { + sendDestination = JCSMPFactory.onlyInstance().createQueue(destination); + } else { + Topic topic = JCSMPFactory.onlyInstance().createTopic(destination); + session.addSubscription(receiveEndpoint, topic, JCSMPSession.WAIT_FOR_CONFIRM); + sendDestination = topic; + } + + TextMessage message = Mockito.spy(JCSMPFactory.onlyInstance().createMessage(TextMessage.class)); + message.setText(RandomStringUtils.randomAlphanumeric(100)); + producerHandler.send(message, sendDestination); + if (doCommit) { + sessionHandler.getTxSession().commit(); + } + + if (doConsume) { + ConsumerFlowProperties consumerFlowProperties = new ConsumerFlowProperties(); + consumerFlowProperties.setEndpoint(receiveEndpoint); + consumerFlowProperties.setStartState(true); + FlowReceiver flowReceiver = session.createFlow(null, consumerFlowProperties); + BytesXMLMessage receivedMessage; + try { + receivedMessage = flowReceiver.receive(Math.toIntExact(TimeUnit.SECONDS.toMillis(30))); + } finally { + flowReceiver.close(); + } + + assertInstanceOf(TextMessage.class, receivedMessage); + assertEquals(message.getText(), ((TextMessage) receivedMessage).getText()); + assertEquals(destination, receivedMessage.getDestination().getName()); + } + + Mockito.verify(message).setDeliveryMode(destinationType.isAssignableFrom(Queue.class) || isTransacted ? + DeliveryMode.PERSISTENT : DeliveryMode.DIRECT); + } +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java index b439d30..8a81c35 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java @@ -12,7 +12,9 @@ import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension.ExecSvc; import com.solace.test.integration.semp.v2.SempV2Api; import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueue; +import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueueSubscription; import com.solacesystems.jcsmp.BytesXMLMessage; +import com.solacesystems.jcsmp.Destination; import com.solacesystems.jcsmp.EndpointProperties; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; @@ -21,6 +23,7 @@ import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.SDTException; import com.solacesystems.jcsmp.SDTMap; +import org.apache.commons.lang3.RandomStringUtils; import org.apache.kafka.clients.producer.RecordMetadata; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Assertions; @@ -50,10 +53,13 @@ import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import static org.apache.commons.lang3.RandomStringUtils.randomAlphanumeric; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.hasItems; +import static org.hamcrest.Matchers.hasSize; import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -137,8 +143,9 @@ RecordMetadata sendMessagetoKafka(TestKafkaProducer producer, String kafkaKey, S return metadata; } - void assertMessageReceived(String expectedSolaceQueue, String[] expectedSolaceTopics, RecordMetadata metadata, - Map additionalChecks) throws SDTException, InterruptedException { + List assertMessageReceived(String expectedSolaceQueue, String[] expectedSolaceTopics, + RecordMetadata metadata, Map additionalChecks) + throws SDTException, InterruptedException { List receivedMessages = new ArrayList<>(); // Wait for PubSub+ to report messages - populate queue and topics if provided @@ -188,6 +195,8 @@ void assertMessageReceived(String expectedSolaceQueue, String[] expectedSolaceTo } } } + + return receivedMessages; } //////////////////////////////////////////////////// @@ -440,6 +449,12 @@ void testCommitRollback(boolean autoFlush, KafkaContext kafkaContext, @ExecSvc(poolSize = 2, scheduled = true) ScheduledExecutorService executorService) throws Exception { Queue queue = JCSMPFactory.onlyInstance().createQueue(randomAlphanumeric(100)); + connectorProps.setProperty(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + connectorProps.setProperty(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 100)); + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + connectorProps.setProperty("errors.retry.timeout", Long.toString(-1)); try (TestSolaceQueueConsumer solaceConsumer1 = new TestSolaceQueueConsumer(jcsmpSession)) { EndpointProperties endpointProperties = new EndpointProperties(); @@ -447,15 +462,14 @@ void testCommitRollback(boolean autoFlush, KafkaContext kafkaContext, solaceConsumer1.provisionQueue(queue.getName(), endpointProperties); solaceConsumer1.start(); - connectorProps.setProperty(SolaceSinkConstants.SOl_QUEUE, queue.getName()); - connectorProps.setProperty(SolaceSinkConstants.SOL_QUEUE_MESSAGES_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 1 : 100)); - connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); - connectorProps.setProperty("errors.retry.timeout", Long.toString(-1)); + sempV2Api.config().createMsgVpnQueueSubscription(SOL_VPN, queue.getName(), new ConfigMsgVpnQueueSubscription() + .subscriptionTopic((String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)), null); + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); clearReceivedMessages(); String recordValue = randomAlphanumeric(100); - Future recordMetadata = executorService.schedule(() -> + Future recordMetadataFuture = executorService.schedule(() -> sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), recordValue), 5, TimeUnit.SECONDS); @@ -477,8 +491,19 @@ void testCommitRollback(boolean autoFlush, KafkaContext kafkaContext, .getAsJsonArray("tasks").get(0).getAsJsonObject().get("state").getAsString()); sempV2Api.config().updateMsgVpnQueue(SOL_VPN, queue.getName(), new ConfigMsgVpnQueue().maxMsgSize(10000000), null); - assertMessageReceived(queue.getName(), new String[0], recordMetadata.get(30, TimeUnit.SECONDS), - ImmutableMap.of(AdditionalCheck.ATTACHMENTBYTEBUFFER, recordValue)); + List receivedMsgDestinations = new ArrayList<>(); + for (int i = 0; i < 2; i++) { + logger.info("Checking consumption of message {}", i); + receivedMsgDestinations.addAll(assertMessageReceived(queue.getName(), new String[0], + recordMetadataFuture.get(30, TimeUnit.SECONDS), + ImmutableMap.of(AdditionalCheck.ATTACHMENTBYTEBUFFER, recordValue)) + .stream() + .map(BytesXMLMessage::getDestination) + .collect(Collectors.toList())); + } + assertThat(receivedMsgDestinations, hasSize(2)); + assertThat(receivedMsgDestinations, hasItems(queue, JCSMPFactory.onlyInstance() + .createTopic((String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)))); } finally { jcsmpSession.deprovision(queue, JCSMPSession.FLAG_IGNORE_DOES_NOT_EXIST); } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java index c38b12a..e01cbd0 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java @@ -14,6 +14,7 @@ import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnClientProfile; import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnClientUsername; import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueue; +import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueueSubscription; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.ClosedFacilityException; import com.solacesystems.jcsmp.JCSMPException; @@ -155,7 +156,6 @@ public void testFailTransactedSessionCreation(SempV2Api sempV2Api, Queue queue) new ConfigMsgVpnClientProfile().allowTransactedSessionsEnabled(false), null); ConnectException thrown = assertThrows(ConnectException.class, () -> solaceSinkTask.start(connectorProperties)); - assertThat(thrown.getMessage(), containsString("Failed to create Transacted Session")); assertThat(thrown.getCause(), instanceOf(JCSMPException.class)); assertThat(thrown.getCause().getMessage(), containsString("Router does not support transacted sessions")); } @@ -285,9 +285,13 @@ public void testRecordProcessorError(boolean ignoreRecordProcessorError, @Test public void testCommitRollback(SempV2Api sempV2Api, Queue queue) throws Exception { connectorProperties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); String vpnName = connectorProperties.get(SolaceSinkConstants.SOL_VPN_NAME); + sempV2Api.config().createMsgVpnQueueSubscription(vpnName, queue.getName(), new ConfigMsgVpnQueueSubscription() + .subscriptionTopic(connectorProperties.get(SolaceSinkConstants.SOL_TOPICS)), null); sempV2Api.config().updateMsgVpnQueue(vpnName, queue.getName(), new ConfigMsgVpnQueue().maxMsgSize(1), null); assertTimeoutPreemptively(Duration.ofSeconds(20), () -> { @@ -312,15 +316,21 @@ public void testCommitRollback(SempV2Api sempV2Api, Queue queue) throws Exceptio assertThat(thrown.getMessage(), containsString("Error in committing transaction")); assertThat(thrown.getCause(), instanceOf(RollbackException.class)); assertThat(thrown.getCause().getMessage(), containsString("Document Is Too Large")); + assertEquals(2, sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) + .getData().getMaxMsgSizeExceededDiscardedMsgCount()); } @Test public void testAutoFlushCommitRollback(SempV2Api sempV2Api, Queue queue) throws Exception { connectorProperties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); - connectorProperties.put(SolaceSinkConstants.SOL_QUEUE_MESSAGES_AUTOFLUSH_SIZE, Integer.toString(1)); + connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(2)); String vpnName = connectorProperties.get(SolaceSinkConstants.SOL_VPN_NAME); + sempV2Api.config().createMsgVpnQueueSubscription(vpnName, queue.getName(), new ConfigMsgVpnQueueSubscription() + .subscriptionTopic(connectorProperties.get(SolaceSinkConstants.SOL_TOPICS)), null); sempV2Api.config().updateMsgVpnQueue(vpnName, queue.getName(), new ConfigMsgVpnQueue().maxMsgSize(1), null); assertTimeoutPreemptively(Duration.ofSeconds(20), () -> { @@ -341,6 +351,8 @@ public void testAutoFlushCommitRollback(SempV2Api sempV2Api, Queue queue) throws assertThat(thrown.getMessage(), containsString("Error in committing transaction")); assertThat(thrown.getCause(), instanceOf(RollbackException.class)); assertThat(thrown.getCause().getMessage(), containsString("Document Is Too Large")); + assertEquals(2, sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) + .getData().getMaxMsgSizeExceededDiscardedMsgCount()); } public static class BadRecordProcessor implements SolRecordProcessorIF { diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolProducerHandler.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolProducerHandler.java new file mode 100644 index 0000000..41bf46f --- /dev/null +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolProducerHandler.java @@ -0,0 +1,182 @@ +package com.solace.connector.kafka.connect.sink; + +import com.solacesystems.jcsmp.DeliveryMode; +import com.solacesystems.jcsmp.Destination; +import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.ProducerFlowProperties; +import com.solacesystems.jcsmp.Queue; +import com.solacesystems.jcsmp.XMLMessage; +import com.solacesystems.jcsmp.XMLMessageProducer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +public class SolProducerHandler implements AutoCloseable { + private final SolaceSinkConnectorConfig config; + private final SolSessionHandler sessionHandler; + final XMLMessageProducer producer; + private final AtomicInteger txMsgCount = new AtomicInteger(); + private final Runnable txAutoFlushCallback; + private final ReadWriteLock lock = new ReentrantReadWriteLock(); + volatile XMLMessageProducer transactedProducer; + volatile XMLMessageProducer topicProducer; + volatile XMLMessageProducer queueProducer; + + private static final Logger log = LoggerFactory.getLogger(SolProducerHandler.class); + + public SolProducerHandler(final SolaceSinkConnectorConfig config, + final SolSessionHandler sessionHandler, + final Runnable txAutoFlushCallback) throws JCSMPException { + this.config = config; + this.sessionHandler = sessionHandler; + this.txAutoFlushCallback = txAutoFlushCallback; + this.producer = sessionHandler.getSession().getMessageProducer(new SolStreamingMessageCallbackHandler()); + + if (config.getString(SolaceSinkConstants.SOl_QUEUE) != null) { + // pre-init producer if queues are statically defined + initQueueProducer(); + } + + if (config.getTopics().length > 0) { + // pre-init producer if topics are statically defined + initTopicProducer(); + } + } + + public void send(final XMLMessage message, final Destination destination) throws JCSMPException { + if (destination instanceof Queue) { + if (queueProducer == null) { + initQueueProducer(); + } + } else { + if (topicProducer == null) { + initTopicProducer(); + } + } + + Lock readLock = this.lock.readLock(); + readLock.lock(); + try { + if (destination instanceof Queue) { + message.setDeliveryMode(DeliveryMode.PERSISTENT); + queueProducer.send(message, destination); + if (config.getBoolean(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE)) { + autoFlush(); + } + } else { + if (config.getBoolean(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS)) { + message.setDeliveryMode(DeliveryMode.PERSISTENT); + topicProducer.send(message, destination); + autoFlush(); + } else { + message.setDeliveryMode(DeliveryMode.DIRECT); + topicProducer.send(message, destination); + } + } + } finally { + readLock.unlock(); + } + } + + public AtomicInteger getTxMsgCount() { + return txMsgCount; + } + + private void autoFlush() { + int txMsgCnt = txMsgCount.incrementAndGet(); + log.trace("================ Count of TX message is now: {}", txMsgCnt); + if (txMsgCnt > config.getInt(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE)-1) { + txAutoFlushCallback.run(); + } + } + + private void initTopicProducer() throws JCSMPException { + Lock writeLock = this.lock.writeLock(); + writeLock.lock(); + try { + if (topicProducer == null) { + if (config.getBoolean(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS)) { + this.topicProducer = createTransactedProducer(); + } else { + this.topicProducer = producer; + } + } + } finally { + writeLock.unlock(); + } + } + + private void initQueueProducer() throws JCSMPException { + Lock writeLock = this.lock.writeLock(); + writeLock.lock(); + try { + if (queueProducer == null) { + if (config.getBoolean(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE)) { + // Using transacted session for queue + queueProducer = createTransactedProducer(); + } else { + // Not using transacted session for queue + queueProducer = sessionHandler.getSession().createProducer(createProducerFlowProperties(), + new SolStreamingMessageCallbackHandler(), new SolProducerEventCallbackHandler()); + } + } + } finally { + writeLock.unlock(); + } + } + + private XMLMessageProducer createTransactedProducer() throws JCSMPException { + if (transactedProducer == null) { + Lock writeLock = this.lock.writeLock(); + writeLock.lock(); + try { + if (transactedProducer == null) { + sessionHandler.createTxSession(); + transactedProducer = sessionHandler.getTxSession().createProducer(createProducerFlowProperties(), + new SolStreamingMessageCallbackHandler(), new SolProducerEventCallbackHandler()); + log.info("================ txSession status: {}", + sessionHandler.getTxSession().getStatus().toString()); + } + } finally { + writeLock.unlock(); + } + } + return transactedProducer; + } + + private ProducerFlowProperties createProducerFlowProperties() { + ProducerFlowProperties flowProps = new ProducerFlowProperties(); + flowProps.setAckEventMode(config.getString(SolaceSinkConstants.SOL_ACK_EVENT_MODE)); + flowProps.setWindowSize(config.getInt(SolaceSinkConstants.SOL_PUBLISHER_WINDOW_SIZE)); + return flowProps; + } + + @Override + public void close() { + Lock writeLock = lock.writeLock(); + writeLock.lock(); + try { + if (queueProducer != null && !queueProducer.isClosed()) { + queueProducer.close(); + } + + if (topicProducer != null && !topicProducer.isClosed()) { + topicProducer.close(); + } + + if (transactedProducer != null && !transactedProducer.isClosed()) { + transactedProducer.close(); + } + + if (producer != null && !producer.isClosed()) { + producer.close(); + } + } finally { + writeLock.unlock(); + } + } +} diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java index 9ce427c..2f1c37a 100644 --- a/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java @@ -30,6 +30,8 @@ import java.util.Enumeration; import java.util.Optional; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import org.apache.kafka.common.config.types.Password; import org.slf4j.Logger; @@ -39,12 +41,13 @@ public class SolSessionHandler { private static final Logger log = LoggerFactory.getLogger(SolSessionHandler.class); - private SolaceSinkConnectorConfig lconfig; + private final SolaceSinkConnectorConfig lconfig; final JCSMPProperties properties = new JCSMPProperties(); final JCSMPChannelProperties chanProperties = new JCSMPChannelProperties(); private JCSMPSession session = null; - private TransactedSession txSession = null; + private volatile TransactedSession txSession = null; + private final Lock lock = new ReentrantLock(); public SolSessionHandler(SolaceSinkConnectorConfig lconfig) { this.lconfig = lconfig; @@ -180,11 +183,20 @@ public void connectSession() throws JCSMPException { /** * Create transacted session - * @return TransactedSession * @throws JCSMPException */ public void createTxSession() throws JCSMPException { - txSession = session.createTransactedSession(); + if (txSession == null) { + lock.lock(); + try { + if (txSession == null) { + txSession = session.createTransactedSession(); + log.info("Transacted Session has been created"); + } + } finally { + lock.unlock(); + } + } } public JCSMPSession getSession() { diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfig.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfig.java index f6db799..8e86900 100644 --- a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfig.java +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfig.java @@ -20,6 +20,7 @@ package com.solace.connector.kafka.connect.sink; import java.util.Map; +import java.util.Optional; import org.apache.kafka.common.config.AbstractConfig; import org.apache.kafka.common.config.ConfigDef; @@ -118,7 +119,7 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSinkConstants.SOL_SUB_ACK_WINDOW_SIZE, Type.INT, 255, Importance.LOW, "The size of the sliding subscriber ACK window. The valid range is 1-255") - .define(SolaceSinkConstants.SOL_QUEUE_MESSAGES_AUTOFLUSH_SIZE, + .define(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Type.INT, 200, Importance.LOW, "Number of outstanding transacted messages before autoflush. Must be lower than " + "max PubSub+ transaction size (255). The valid range is 1-200") @@ -137,6 +138,9 @@ public static ConfigDef solaceConfigDef() { .define(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Type.BOOLEAN, true, Importance.LOW, "Specifies if writing messages to queue destination shall use transactions.") + .define(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, + Type.BOOLEAN, false, Importance.LOW, + "When true, messages published to topics will use persistent delivery type using transactions.") .define(SolaceSinkConstants.SOL_CHANNEL_PROPERTY_connectTimeoutInMillis, Type.INT, 30000, Importance.MEDIUM, "Timeout value (in ms) for creating an initial connection to Solace") @@ -268,6 +272,12 @@ public static ConfigDef solaceConfigDef() { } + public String[] getTopics() { + return Optional.ofNullable(getString(SolaceSinkConstants.SOL_TOPICS)) + .map(s -> s.split(",")) + .orElse(new String[0]); + } + public boolean isEmitKafkaRecordHeadersEnabled() { return getBoolean(SolaceSinkConstants.SOL_EMIT_KAFKA_RECORD_HEADERS_ENABLED); } diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConstants.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConstants.java index ad3fa5b..acf16a8 100644 --- a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConstants.java +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkConstants.java @@ -66,6 +66,7 @@ public class SolaceSinkConstants { public static final String SOL_KRB_SERVICE_NAME = "sol.krb_service_name"; public static final String SOL_SSL_CONNECTION_DOWNGRADE_TO = "sol.ssl_connection_downgrade_to"; public static final String SOl_USE_TRANSACTIONS_FOR_QUEUE = "sol.use_transactions_for_queue"; + public static final String SOl_USE_TRANSACTIONS_FOR_TOPICS = "sol.use_transactions_for_topics"; // Low Importance Solace TLS Protocol properties // public static final String SOL_SSL_PROTOCOL = "sol.ssl_protocol"; @@ -114,7 +115,7 @@ public class SolaceSinkConstants { // Low Importance Persistent Message Properties public static final String SOL_SUB_ACK_WINDOW_SIZE = "sol.sub_ack_window_size"; public static final String SOL_PUB_ACK_WINDOW_SIZE = "sol.pub_ack_window_size"; - public static final String SOL_QUEUE_MESSAGES_AUTOFLUSH_SIZE = "sol.autoflush.size"; + public static final String SOL_AUTOFLUSH_SIZE = "sol.autoflush.size"; public static final String SOL_SUB_ACK_TIME = "sol.sub_ack_time"; public static final String SOL_PUB_ACK_TIME = "sol.pub_ack_time"; public static final String SOL_SUB_ACK_WINDOW_THRESHOLD = "sol.sub_ack_window_threshold"; diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java index 85b789b..2a854ae 100644 --- a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java @@ -20,16 +20,13 @@ package com.solace.connector.kafka.connect.sink; import com.solacesystems.jcsmp.BytesXMLMessage; -import com.solacesystems.jcsmp.DeliveryMode; import com.solacesystems.jcsmp.Destination; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; -import com.solacesystems.jcsmp.ProducerFlowProperties; import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.SDTException; import com.solacesystems.jcsmp.SDTMap; import com.solacesystems.jcsmp.Topic; -import com.solacesystems.jcsmp.XMLMessageProducer; import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.connect.errors.ConnectException; @@ -43,21 +40,17 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.concurrent.atomic.AtomicInteger; public class SolaceSinkSender { private static final Logger log = LoggerFactory.getLogger(SolaceSinkSender.class); private final SolaceSinkConnectorConfig sconfig; - private final XMLMessageProducer topicProducer; - private XMLMessageProducer queueProducer; private final SolSessionHandler sessionHandler; + final SolProducerHandler producerHandler; private final List topics = new ArrayList<>(); private Queue solQueue = null; - private boolean useTxforQueue = false; private final SolRecordProcessorIF processor; private final String kafkaKey; - private final AtomicInteger txMsgCounter = new AtomicInteger(); private final SolaceSinkTask sinkTask; private final Map offsets = new HashMap<>(); @@ -65,53 +58,25 @@ public class SolaceSinkSender { * Class that sends Solace Messages from Kafka Records. * @param sconfig JCSMP Configuration * @param sessionHandler SolSessionHandler - * @param useTxforQueue * @param sinkTask Connector Sink Task * @throws JCSMPException */ public SolaceSinkSender(final SolaceSinkConnectorConfig sconfig, final SolSessionHandler sessionHandler, - final boolean useTxforQueue, final SolaceSinkTask sinkTask) throws JCSMPException { this.sconfig = sconfig; this.sessionHandler = sessionHandler; - this.useTxforQueue = useTxforQueue; this.sinkTask = sinkTask; this.kafkaKey = sconfig.getString(SolaceSinkConstants.SOL_KAFKA_MESSAGE_KEY); - this.topicProducer = sessionHandler.getSession().getMessageProducer(new SolStreamingMessageCallbackHandler()); + this.producerHandler = new SolProducerHandler(sconfig, sessionHandler, this::txAutoFlushHandler); this.processor = sconfig.getConfiguredInstance(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolRecordProcessorIF.class); - } - /** - * Generate PubSub+ topics from topic string - */ - public void setupDestinationTopics() { - String solaceTopics = sconfig.getString(SolaceSinkConstants.SOL_TOPICS); - String[] stopics = solaceTopics.split(","); - int counter = 0; - while (stopics.length > counter) { - topics.add(JCSMPFactory.onlyInstance().createTopic(stopics[counter].trim())); - counter++; + for (String topic : sconfig.getTopics()) { + this.topics.add(JCSMPFactory.onlyInstance().createTopic(topic.trim())); } - } - /** - * Generate PubSub queue - */ - public void setupDestinationQueue() throws JCSMPException { - solQueue = JCSMPFactory.onlyInstance().createQueue(sconfig.getString(SolaceSinkConstants.SOl_QUEUE)); - ProducerFlowProperties flowProps = new ProducerFlowProperties(); - flowProps.setAckEventMode(sconfig.getString(SolaceSinkConstants.SOL_ACK_EVENT_MODE)); - flowProps.setWindowSize(sconfig.getInt(SolaceSinkConstants.SOL_PUBLISHER_WINDOW_SIZE)); - if (useTxforQueue) { - // Using transacted session for queue - queueProducer = sessionHandler.getTxSession().createProducer(flowProps, new SolStreamingMessageCallbackHandler(), - new SolProducerEventCallbackHandler()); - log.info("================ txSession status: {}", sessionHandler.getTxSession().getStatus().toString()); - } else { - // Not using transacted session for queue - queueProducer = sessionHandler.getSession().createProducer(flowProps, new SolStreamingMessageCallbackHandler(), - new SolProducerEventCallbackHandler()); + if (sconfig.getString(SolaceSinkConstants.SOl_QUEUE) != null) { + solQueue = JCSMPFactory.onlyInstance().createQueue(sconfig.getString(SolaceSinkConstants.SOl_QUEUE)); } } @@ -125,8 +90,7 @@ public void sendRecord(SinkRecord record) { message = processor.processRecord(kafkaKey, record); offsets.put(new TopicPartition(record.topic(), record.kafkaPartition()), new OffsetAndMetadata(record.kafkaOffset())); - log.trace("================ Processed record details, topic: {}, Partition: {}, " - + "Offset: {}", record.topic(), + log.trace("================ Processed record details, topic: {}, Partition: {}, Offset: {}", record.topic(), record.kafkaPartition(), record.kafkaOffset()); } catch (Exception e) { if (sconfig.getBoolean(SolaceSinkConstants.SOL_RECORD_PROCESSOR_IGNORE_ERROR)) { @@ -160,7 +124,7 @@ public void sendRecord(SinkRecord record) { } } try { - topicProducer.send(message, dest); + producerHandler.send(message, dest); } catch (IllegalArgumentException e) { throw new ConnectException(String.format("Received exception while sending message to topic %s", dest != null ? dest.getName() : null), e); @@ -172,12 +136,7 @@ public void sendRecord(SinkRecord record) { // Process when Dynamic destination is not set if (solQueue != null) { try { - message.setDeliveryMode(DeliveryMode.PERSISTENT); - queueProducer.send(message, solQueue); - if (useTxforQueue) { - txMsgCounter.getAndIncrement(); - log.trace("================ Count of TX message is now: {}", txMsgCounter.get()); - } + producerHandler.send(message, solQueue); } catch (IllegalArgumentException e) { throw new ConnectException(String.format("Received exception while sending message to queue %s", solQueue.getName()), e); @@ -187,34 +146,30 @@ public void sendRecord(SinkRecord record) { } } if (topics.size() != 0 && message.getDestination() == null) { - message.setDeliveryMode(DeliveryMode.DIRECT); - int count = 0; - while (topics.size() > count) { + for (Topic topic : topics) { try { - topicProducer.send(message, topics.get(count)); + producerHandler.send(message, topic); } catch (IllegalArgumentException e) { throw new ConnectException(String.format("Received exception while sending message to topic %s", - topics.get(count).getName()), e); + topic.getName()), e); } catch (JCSMPException e) { throw new RetriableException(String.format("Received exception while sending message to topic %s", - topics.get(count).getName()), e); + topic.getName()), e); } - count++; } } } + } - // Solace limits transaction size to 255 messages so need to force commit - if ( useTxforQueue && txMsgCounter.get() > sconfig.getInt(SolaceSinkConstants.SOL_QUEUE_MESSAGES_AUTOFLUSH_SIZE)-1 ) { - log.debug("================ Queue transaction autoflush size reached, flushing offsets from connector"); - try { - sinkTask.flush(offsets); - } catch (ConnectException e) { - if (e.getCause() instanceof JCSMPException) { - throw new RetriableException(e.getMessage(), e.getCause()); - } else { - throw e; - } + private void txAutoFlushHandler() { + log.debug("================ Queue transaction autoflush size reached, flushing offsets from connector"); + try { + sinkTask.flush(offsets); + } catch (ConnectException e) { + if (e.getCause() instanceof JCSMPException) { + throw new RetriableException(e.getMessage(), e.getCause()); + } else { + throw e; } } } @@ -244,10 +199,9 @@ void mayEnrichUserPropertiesWithKafkaRecordHeaders(final SinkRecord record, * Commit Solace and Kafka records. */ public synchronized void commit() throws JCSMPException { - if (useTxforQueue) { + if (producerHandler.getTxMsgCount().getAndSet(0) > 0) { sessionHandler.getTxSession().commit(); - txMsgCounter.set(0); - log.debug("Comitted Solace records for transaction with status: {}", + log.debug("Committed Solace records for transaction with status: {}", sessionHandler.getTxSession().getStatus().name()); } } @@ -257,12 +211,7 @@ public synchronized void commit() throws JCSMPException { * @return */ public void shutdown() { - if (queueProducer != null) { - queueProducer.close(); - } - if (topicProducer != null) { - topicProducer.close(); - } + producerHandler.close(); } } diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkTask.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkTask.java index 5d56790..7dd3750 100644 --- a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkTask.java +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkTask.java @@ -38,7 +38,6 @@ public class SolaceSinkTask extends SinkTask { private static final Logger log = LoggerFactory.getLogger(SolaceSinkTask.class); private SolSessionHandler solSessionHandler; private SolaceSinkSender solSender; - private boolean useTxforQueue = false; private SinkTaskContext context; SolaceSinkConnectorConfig connectorConfig; @@ -60,28 +59,8 @@ public void start(Map props) { } log.info("================ JCSMPSession Connected"); - if (connectorConfig.getString(SolaceSinkConstants.SOl_QUEUE) != null) { - // Use transactions for queue destination - useTxforQueue = connectorConfig.getBoolean(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE); - if (useTxforQueue) { - try { - solSessionHandler.createTxSession(); - log.info("================ Transacted Session has been Created for PubSub+ queue destination"); - } catch (JCSMPException e) { - throw new ConnectException("Failed to create Transacted Session for PubSub+ queue destination, " + - "make sure transacted sessions are enabled", e); - } - } - } - try { - solSender = new SolaceSinkSender(connectorConfig, solSessionHandler, useTxforQueue, this); - if (connectorConfig.getString(SolaceSinkConstants.SOL_TOPICS) != null) { - solSender.setupDestinationTopics(); - } - if (connectorConfig.getString(SolaceSinkConstants.SOl_QUEUE) != null) { - solSender.setupDestinationQueue(); - } + solSender = new SolaceSinkSender(connectorConfig, solSessionHandler, this); } catch (Exception e) { throw new ConnectException("Failed to setup sender to PubSub+", e); } @@ -121,7 +100,7 @@ public synchronized void flush(Map currentOff log.debug("Flushing up to topic {}, partition {} and offset {}", tp.topic(), tp.partition(), om.offset()); } - if (useTxforQueue) { + if (solSessionHandler.getTxSession() != null) { try { solSender.commit(); } catch (JCSMPException e) { diff --git a/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfigTest.java b/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfigTest.java index b51b222..06b00e9 100644 --- a/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfigTest.java +++ b/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkConnectorConfigTest.java @@ -8,9 +8,14 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; +import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.stream.IntStream; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.emptyArray; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; @@ -42,13 +47,32 @@ public void shouldReturnConfiguredSolRecordProcessorIFGivenConfigurableClass() { SolaceSinkConnectorConfig config = new SolaceSinkConnectorConfig(configProps); // THEN - SolRecordProcessorIF processor = config.getConfiguredInstance(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolRecordProcessorIF.class);; + SolRecordProcessorIF processor = config.getConfiguredInstance(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolRecordProcessorIF.class); assertNotNull(processor); assertNotNull(((TestSolRecordProcessorIF)processor).configs); assertEquals("dummy", ((TestSolRecordProcessorIF)processor).configs.get("processor.config")); } + @Test + public void testSplitTopics() { + String[] topics = IntStream.range(0, 10) + .mapToObj(i -> RandomStringUtils.randomAlphanumeric(30)) + .toArray(String[]::new); + SolaceSinkConnectorConfig config = new SolaceSinkConnectorConfig( + Collections.singletonMap(SolaceSinkConstants.SOL_TOPICS, String.join(",", topics))); + assertNotNull(config.getTopics()); + assertArrayEquals(topics, config.getTopics()); + } + + @Test + public void testNullTopics() { + SolaceSinkConnectorConfig config = new SolaceSinkConnectorConfig( + Collections.singletonMap(SolaceSinkConstants.SOL_TOPICS, null)); + assertNotNull(config.getTopics()); + assertThat(config.getTopics(), emptyArray()); + } + public static class TestSolRecordProcessorIF implements SolRecordProcessorIF { Map configs; diff --git a/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkSenderTest.java b/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkSenderTest.java index ea71da9..d58d0ea 100644 --- a/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkSenderTest.java +++ b/src/test/java/com/solace/connector/kafka/connect/sink/SolaceSinkSenderTest.java @@ -24,27 +24,37 @@ import com.solacesystems.jcsmp.JCSMPFactory; import com.solacesystems.jcsmp.JCSMPSession; import com.solacesystems.jcsmp.SDTMap; +import com.solacesystems.jcsmp.transaction.RollbackException; +import com.solacesystems.jcsmp.transaction.TransactedSession; +import com.solacesystems.jcsmp.transaction.TransactionStatus; import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.header.ConnectHeaders; import org.apache.kafka.connect.sink.SinkRecord; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; import java.util.HashMap; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; @ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class SolaceSinkSenderTest { @Mock private SolSessionHandler mkSessionHandler; @Mock private JCSMPSession mkJcsmpSession; + @Mock private TransactedSession mkTransactedSession; @Mock private SolaceSinkTask mkSolaceSinkTask; @Test @@ -62,7 +72,6 @@ public void shouldAddKafkaRecordHeadersOnBytesXMLMessageWhenEnabled() throws JCS final SolaceSinkSender sender = new SolaceSinkSender( connectorConfig, mkSessionHandler, - false, mkSolaceSinkTask ); @@ -101,4 +110,52 @@ public void shouldAddKafkaRecordHeadersOnBytesXMLMessageWhenEnabled() throws JCS assertEquals("val2", properties.getString("h2")); assertEquals("val5", properties.getString("h3")); } -} \ No newline at end of file + + @ParameterizedTest(name = "[{index}] rollback={0}") + @ValueSource(booleans = {true, false}) + public void testCommit(boolean rollback) throws Exception { + Mockito.when(mkSessionHandler.getSession()).thenReturn(mkJcsmpSession); + Mockito.when(mkSessionHandler.getTxSession()).thenReturn(mkTransactedSession); + Mockito.when(mkTransactedSession.getStatus()).thenReturn(TransactionStatus.ACTIVE); + + if (rollback) { + Mockito.doThrow(new RollbackException("test-rollback")).when(mkTransactedSession).commit(); + } + + Map config = new HashMap<>(); + config.put(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolSimpleRecordProcessor.class.getName()); + config.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + config.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + + final SolaceSinkConnectorConfig connectorConfig = new SolaceSinkConnectorConfig(config); + final SolaceSinkSender sender = new SolaceSinkSender(connectorConfig, mkSessionHandler, mkSolaceSinkTask); + + sender.producerHandler.getTxMsgCount().incrementAndGet(); + if (rollback) { + assertThrows(RollbackException.class, sender::commit); + } else { + sender.commit(); + } + + Mockito.verify(mkTransactedSession, Mockito.times(1)).commit(); + assertEquals(0, sender.producerHandler.getTxMsgCount().get()); + } + + @Test + public void testCommitNoMessages() throws Exception { + Mockito.when(mkSessionHandler.getSession()).thenReturn(mkJcsmpSession); + Mockito.when(mkSessionHandler.getTxSession()).thenReturn(mkTransactedSession); + + Map config = new HashMap<>(); + config.put(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolSimpleRecordProcessor.class.getName()); + config.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + config.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + + final SolaceSinkConnectorConfig connectorConfig = new SolaceSinkConnectorConfig(config); + final SolaceSinkSender sender = new SolaceSinkSender(connectorConfig, mkSessionHandler, mkSolaceSinkTask); + sender.commit(); + + Mockito.verify(mkTransactedSession, Mockito.times(0)).commit(); + assertEquals(0, sender.producerHandler.getTxMsgCount().get()); + } +} From fe1bb57c079567aa3c06eb70b82912590d4428be Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Thu, 2 Dec 2021 16:24:16 -0500 Subject: [PATCH 03/12] add more transaction recovery tests --- build.gradle | 3 +- solace-integration-test-support | 2 +- .../connect/sink/it/SinkConnectorIT.java | 176 +++++++++++++++--- .../connect/sink/it/SolaceSinkTaskIT.java | 100 ++++++++++ .../sink/it/util/ThrowingFunction.java | 16 ++ .../BitnamiKafkaConnectContainer.java | 17 +- .../ConfluentKafkaConnectContainer.java | 10 + 7 files changed, 290 insertions(+), 34 deletions(-) create mode 100644 src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/ThrowingFunction.java diff --git a/build.gradle b/build.gradle index 95805bd..e3071cc 100644 --- a/build.gradle +++ b/build.gradle @@ -38,7 +38,8 @@ dependencies { integrationTestImplementation 'org.testcontainers:testcontainers:1.16.0' integrationTestImplementation 'org.testcontainers:junit-jupiter:1.16.0' integrationTestImplementation 'org.testcontainers:kafka:1.16.0' - integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.6.0' + integrationTestImplementation 'org.testcontainers:toxiproxy:1.16.0' + integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.7.0' integrationTestImplementation 'org.slf4j:slf4j-api:1.7.32' integrationTestImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' integrationTestImplementation 'org.apache.commons:commons-configuration2:2.6' diff --git a/solace-integration-test-support b/solace-integration-test-support index f42ff27..84ae6ee 160000 --- a/solace-integration-test-support +++ b/solace-integration-test-support @@ -1 +1 @@ -Subproject commit f42ff27fd8bdb2e350d8a6939e4edd52a1fecdf3 +Subproject commit 84ae6eee5e1594d75b88b3be8b722205cda3318e diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java index 8a81c35..4622f74 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java @@ -4,12 +4,15 @@ import com.google.gson.GsonBuilder; import com.google.gson.JsonObject; import com.solace.connector.kafka.connect.sink.SolaceSinkConstants; +import com.solace.connector.kafka.connect.sink.it.util.ThrowingFunction; import com.solace.connector.kafka.connect.sink.it.util.extensions.KafkaArgumentsProvider; import com.solace.connector.kafka.connect.sink.it.util.extensions.KafkaArgumentsProvider.KafkaArgumentSource; import com.solace.connector.kafka.connect.sink.it.util.extensions.KafkaArgumentsProvider.KafkaContext; import com.solace.connector.kafka.connect.sink.it.util.extensions.NetworkPubSubPlusExtension; import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension; import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension.ExecSvc; +import com.solace.test.integration.junit.jupiter.extension.PubSubPlusExtension.JCSMPProxy; +import com.solace.test.integration.junit.jupiter.extension.PubSubPlusExtension.ToxiproxyContext; import com.solace.test.integration.semp.v2.SempV2Api; import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueue; import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueueSubscription; @@ -23,10 +26,16 @@ import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.SDTException; import com.solacesystems.jcsmp.SDTMap; +import eu.rekawek.toxiproxy.model.ToxicDirection; +import eu.rekawek.toxiproxy.model.toxic.Latency; import org.apache.commons.lang3.RandomStringUtils; +import org.apache.kafka.clients.admin.ConsumerGroupListing; +import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsResult; +import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.clients.producer.RecordMetadata; +import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; @@ -46,6 +55,7 @@ import java.time.Duration; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Properties; @@ -54,6 +64,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.apache.commons.lang3.RandomStringUtils.randomAlphanumeric; import static org.hamcrest.CoreMatchers.containsString; @@ -88,9 +99,9 @@ enum AdditionalCheck { ATTACHMENTBYTEBUFFER, CORRELATIONID } static void setUp(JCSMPSession jcsmpSession) throws JCSMPException { // Start consumer // Ensure test queue exists on PubSub+ - solaceTopicConsumer = new TestSolaceTopicConsumer(jcsmpSession); + solaceTopicConsumer = new TestSolaceTopicConsumer(jcsmpSession); //TODO make this dynamic for concurrency solaceTopicConsumer.start(); - solaceQueueConsumer = new TestSolaceQueueConsumer(jcsmpSession); + solaceQueueConsumer = new TestSolaceQueueConsumer(jcsmpSession); //TODO make this dynamic for concurrency solaceQueueConsumer.provisionQueue(SOL_QUEUE); solaceQueueConsumer.start(); } @@ -115,16 +126,18 @@ static void cleanUp() { //////////////////////////////////////////////////// // Test types - void messageToKafkaTest(TestKafkaProducer producer, String expectedSolaceQueue, String[] expectedSolaceTopics, String kafkaKey, String kafkaValue, - Map additionalChecks) { + void messageToKafkaTest(KafkaContext kafkaContext, + String expectedSolaceQueue, + String[] expectedSolaceTopics, + String kafkaKey, + String kafkaValue, + Map additionalChecks) { try { clearReceivedMessages(); - RecordMetadata metadata = sendMessagetoKafka(producer, kafkaKey, kafkaValue); - assertMessageReceived(expectedSolaceQueue, expectedSolaceTopics, metadata, additionalChecks); - } catch (InterruptedException e) { - e.printStackTrace(); - } catch (SDTException e) { + RecordMetadata metadata = sendMessagetoKafka(kafkaContext.getProducer(), kafkaKey, kafkaValue); + assertMessageReceived(kafkaContext, expectedSolaceQueue, expectedSolaceTopics, metadata, additionalChecks); + } catch (InterruptedException | SDTException e) { e.printStackTrace(); } } @@ -143,9 +156,44 @@ RecordMetadata sendMessagetoKafka(TestKafkaProducer producer, String kafkaKey, S return metadata; } - List assertMessageReceived(String expectedSolaceQueue, String[] expectedSolaceTopics, - RecordMetadata metadata, Map additionalChecks) + List assertMessageReceived(KafkaContext kafkaContext, + String expectedSolaceQueue, + String[] expectedSolaceTopics, + RecordMetadata metadata, + Map additionalChecks) throws SDTException, InterruptedException { + assertTimeoutPreemptively(Duration.ofMinutes(5), () -> { + boolean isCommitted; + do { + Stream groupIds = kafkaContext.getAdminClient().listConsumerGroups().all().get().stream() + .map(ConsumerGroupListing::groupId); + + Stream> partitionsToOffsets = groupIds + .map(groupId -> kafkaContext.getAdminClient().listConsumerGroupOffsets(groupId)) + .map(ListConsumerGroupOffsetsResult::partitionsToOffsetAndMetadata) + .map((ThrowingFunction>, + Map>) KafkaFuture::get) + .map(Map::entrySet) + .flatMap(Collection::stream); + + Long partitionOffset = partitionsToOffsets + .filter(e -> e.getKey().topic().equals(metadata.topic())) + .filter(e -> e.getKey().partition() == metadata.partition()) + .map(Map.Entry::getValue) + .map(OffsetAndMetadata::offset) + .findAny() + .orElse(null); + + isCommitted = partitionOffset != null && partitionOffset >= metadata.offset(); + + if (!isCommitted) { + logger.info("Waiting for record {} to be committed. Partition offset: {}", metadata, partitionOffset); + Thread.sleep(TimeUnit.SECONDS.toMillis(1)); + } + } while (!isCommitted); + logger.info("Record {} was committed", metadata); + }); + List receivedMessages = new ArrayList<>(); // Wait for PubSub+ to report messages - populate queue and topics if provided @@ -223,7 +271,7 @@ void setUp() { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), SOL_QUEUE, topics, + messageToKafkaTest(kafkaContext, SOL_QUEUE, topics, // kafka key and value "Key", "Hello TextMessageToTopicTest world!", // additional checks @@ -254,7 +302,7 @@ void setUp() { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), SOL_QUEUE, topics, + messageToKafkaTest(kafkaContext, SOL_QUEUE, topics, // kafka key and value "Key", "Hello TextMessageToTopicTest world!", // additional checks @@ -285,7 +333,7 @@ void setUp() { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), SOL_QUEUE, topics, + messageToKafkaTest(kafkaContext, SOL_QUEUE, topics, // kafka key and value "Destination", "Hello TextMessageToTopicTest world!", // additional checks with expected values @@ -317,7 +365,7 @@ void setUp() { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), SOL_QUEUE, topics, + messageToKafkaTest(kafkaContext, SOL_QUEUE, topics, // kafka key and value "TestCorrelationId", "Hello TextMessageToTopicTest world!", // additional checks with expected values @@ -349,7 +397,7 @@ void setUp() { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), SOL_QUEUE, topics, + messageToKafkaTest(kafkaContext, SOL_QUEUE, topics, // kafka key and value "TestCorrelationId", "Hello TextMessageToTopicTest world!", // additional checks with expected values @@ -380,7 +428,7 @@ void setUp() { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), + messageToKafkaTest(kafkaContext, // expected list of delivery queue and topics null, new String[] {"ctrl/bus/1234/start"}, // kafka key and value @@ -394,7 +442,7 @@ void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest2(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), + messageToKafkaTest(kafkaContext, // expected list of delivery queue and topics null, new String[] {"ctrl/bus/1234/stop"}, // kafka key and value @@ -408,7 +456,7 @@ void kafkaConsumerTextMessageToTopicTest2(KafkaContext kafkaContext) { @ArgumentsSource(KafkaArgumentsProvider.class) void kafkaConsumerTextMessageToTopicTest3(KafkaContext kafkaContext) { kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); - messageToKafkaTest(kafkaContext.getProducer(), + messageToKafkaTest(kafkaContext, // expected list of delivery queue and topics null, new String[] {"comms/bus/1234"}, // kafka key and value @@ -441,6 +489,68 @@ void testFailPubSubConnection(KafkaContext kafkaContext) { }, () -> "Timed out waiting for connector to fail: " + GSON.toJson(connectorStatus.get())); } + @CartesianProductTest(name = "[{index}] autoFlush={0}, kafka={1}") + @CartesianValueSource(booleans = { true, false }) + @KafkaArgumentSource + void testRebalancedKafkaConsumers(boolean autoFlush, + KafkaContext kafkaContext, + @JCSMPProxy ToxiproxyContext jcsmpProxyContext, + @ExecSvc(poolSize = 2, scheduled = true) ScheduledExecutorService executorService) + throws Exception { + connectorProps.setProperty(SolaceSinkConstants.SOL_HOST, String.format("tcp://%s:%s", + jcsmpProxyContext.getDockerNetworkAlias(), jcsmpProxyContext.getProxy().getOriginalProxyPort())); + connectorProps.setProperty(SolaceSinkConstants.SOl_QUEUE, SOL_QUEUE); + connectorProps.setProperty(SolaceSinkConstants.SOL_TOPICS, + SOL_ROOT_TOPIC + "/" + RandomStringUtils.randomAlphanumeric(100)); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 100)); + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + connectorProps.setProperty(SolaceSinkConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Integer.toString(-1)); + connectorProps.setProperty("errors.retry.timeout", Long.toString(-1)); + connectorProps.setProperty("consumer.override.max.poll.interval.ms", "1000"); + + kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); + + Latency lag = jcsmpProxyContext.getProxy().toxics() + .latency("lag", ToxicDirection.UPSTREAM, TimeUnit.DAYS.toMillis(1)); + + clearReceivedMessages(); + String recordValue = randomAlphanumeric(100); + Future recordMetadataFuture = executorService.schedule(() -> + sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), recordValue), + 5, TimeUnit.SECONDS); + + logger.info("Waiting for Kafka poll interval to expire..."); + WaitingConsumer logConsumer = new WaitingConsumer(); + kafkaContext.getConnection().getConnectContainer().followOutput(logConsumer); + logConsumer.waitUntil(frame -> frame.getUtf8String().contains("consumer poll timeout has expired"), + 1, TimeUnit.MINUTES); + logConsumer.waitUntil(frame -> frame.getUtf8String().contains("Connection attempt failed to host"), + 1, TimeUnit.MINUTES); + Thread.sleep(TimeUnit.SECONDS.toMillis(5)); + + assertEquals("RUNNING", + kafkaContext.getSolaceConnectorDeployment().getConnectorStatus() + .getAsJsonArray("tasks").get(0).getAsJsonObject().get("state").getAsString()); + + logger.info("Removing toxic {}", lag.getName()); + lag.remove(); + + logger.info("Checking consumption of message"); + assertMessageReceived(kafkaContext, SOL_QUEUE, + new String[]{(String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)}, + recordMetadataFuture.get(30, TimeUnit.SECONDS), + ImmutableMap.of(AdditionalCheck.ATTACHMENTBYTEBUFFER, recordValue)); + + logger.info("Sending a new message to verify that Solace producers & Kafka consumers have recovered"); + clearReceivedMessages(); + String newRecordValue = randomAlphanumeric(100); + messageToKafkaTest(kafkaContext, SOL_QUEUE, + new String[]{(String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)}, + randomAlphanumeric(100), newRecordValue, + ImmutableMap.of(AdditionalCheck.ATTACHMENTBYTEBUFFER, newRecordValue)); + } + @CartesianProductTest(name = "[{index}] autoFlush={0}, kafka={1}") @CartesianValueSource(booleans = { true, false }) @KafkaArgumentSource @@ -486,15 +596,27 @@ void testCommitRollback(boolean autoFlush, KafkaContext kafkaContext, } Thread.sleep(5000); - Assertions.assertEquals("RUNNING", - kafkaContext.getSolaceConnectorDeployment().getConnectorStatus() - .getAsJsonArray("tasks").get(0).getAsJsonObject().get("state").getAsString()); + assertEquals("RUNNING", kafkaContext.getSolaceConnectorDeployment().getConnectorStatus() + .getAsJsonArray("tasks").get(0).getAsJsonObject().get("state").getAsString()); + + executorService.schedule(() -> { + logger.info("Restoring max message size for queue {}", queue.getName()); + return sempV2Api.config().updateMsgVpnQueue(SOL_VPN, queue.getName(), + new ConfigMsgVpnQueue().maxMsgSize(10000000), null); + }, 5, TimeUnit.SECONDS); + + logger.info("Waiting for Solace transaction to be committed"); + logConsumer.waitUntil(frame -> frame.getUtf8String() + .contains("Committed Solace records for transaction"), 5, TimeUnit.MINUTES); + + logger.info("Sending another message to Kafka since Kafka Connect won't commit existing messages " + + "after a retry until a new message is received"); + sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), randomAlphanumeric(100)); - sempV2Api.config().updateMsgVpnQueue(SOL_VPN, queue.getName(), new ConfigMsgVpnQueue().maxMsgSize(10000000), null); List receivedMsgDestinations = new ArrayList<>(); - for (int i = 0; i < 2; i++) { - logger.info("Checking consumption of message {}", i); - receivedMsgDestinations.addAll(assertMessageReceived(queue.getName(), new String[0], + while (receivedMsgDestinations.size() < 2) { + logger.info("Checking consumption of messages"); + receivedMsgDestinations.addAll(assertMessageReceived(kafkaContext, queue.getName(), new String[0], recordMetadataFuture.get(30, TimeUnit.SECONDS), ImmutableMap.of(AdditionalCheck.ATTACHMENTBYTEBUFFER, recordValue)) .stream() diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java index e01cbd0..8202283 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java @@ -1,6 +1,7 @@ package com.solace.connector.kafka.connect.sink.it; import com.solace.connector.kafka.connect.sink.SolRecordProcessorIF; +import com.solace.connector.kafka.connect.sink.SolSessionEventCallbackHandler; import com.solace.connector.kafka.connect.sink.SolaceSinkConstants; import com.solace.connector.kafka.connect.sink.SolaceSinkSender; import com.solace.connector.kafka.connect.sink.SolaceSinkTask; @@ -10,6 +11,8 @@ import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension.ExecSvc; import com.solace.test.integration.junit.jupiter.extension.LogCaptorExtension; import com.solace.test.integration.junit.jupiter.extension.LogCaptorExtension.LogCaptor; +import com.solace.test.integration.junit.jupiter.extension.PubSubPlusExtension.JCSMPProxy; +import com.solace.test.integration.junit.jupiter.extension.PubSubPlusExtension.ToxiproxyContext; import com.solace.test.integration.semp.v2.SempV2Api; import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnClientProfile; import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnClientUsername; @@ -17,11 +20,20 @@ import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueueSubscription; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.ClosedFacilityException; +import com.solacesystems.jcsmp.ConsumerFlowProperties; +import com.solacesystems.jcsmp.Destination; +import com.solacesystems.jcsmp.FlowReceiver; import com.solacesystems.jcsmp.JCSMPException; +import com.solacesystems.jcsmp.JCSMPFactory; import com.solacesystems.jcsmp.JCSMPProperties; +import com.solacesystems.jcsmp.JCSMPSession; import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.SDTException; +import com.solacesystems.jcsmp.SessionEvent; +import com.solacesystems.jcsmp.XMLMessage; import com.solacesystems.jcsmp.transaction.RollbackException; +import eu.rekawek.toxiproxy.model.ToxicDirection; +import eu.rekawek.toxiproxy.model.toxic.Latency; import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomUtils; import org.apache.kafka.clients.consumer.OffsetAndMetadata; @@ -44,8 +56,10 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.Duration; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -55,6 +69,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.instanceOf; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -355,6 +370,91 @@ public void testAutoFlushCommitRollback(SempV2Api sempV2Api, Queue queue) throws .getData().getMaxMsgSizeExceededDiscardedMsgCount()); } + @ParameterizedTest(name = "[{index}] autoFlush={0}") + @ValueSource(booleans = {false, true}) + public void testLongCommit(boolean autoFlush, + @JCSMPProxy JCSMPSession jcsmpSession, + SempV2Api sempV2Api, + Queue queue, + @JCSMPProxy ToxiproxyContext jcsmpProxyContext, + @ExecSvc ExecutorService executorService, + @LogCaptor(SolSessionEventCallbackHandler.class) BufferedReader logReader) + throws Exception { + connectorProperties.put(SolaceSinkConstants.SOL_HOST, (String) jcsmpSession.getProperty(JCSMPProperties.HOST)); + connectorProperties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); + connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 100)); + connectorProperties.put(SolaceSinkConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Integer.toString(-1)); + + String vpnName = connectorProperties.get(SolaceSinkConstants.SOL_VPN_NAME); + sempV2Api.config().createMsgVpnQueueSubscription(vpnName, queue.getName(), new ConfigMsgVpnQueueSubscription() + .subscriptionTopic(connectorProperties.get(SolaceSinkConstants.SOL_TOPICS)), null); + + solaceSinkTask.start(connectorProperties); + + SinkRecord sinkRecord = new SinkRecord(RandomStringUtils.randomAlphanumeric(100), 0, + Schema.STRING_SCHEMA, RandomStringUtils.randomAlphanumeric(100), + Schema.BYTES_SCHEMA, RandomUtils.nextBytes(10), 0); + Map currentOffsets = Collections.singletonMap( + new TopicPartition(sinkRecord.topic(), sinkRecord.kafkaPartition()), + new OffsetAndMetadata(sinkRecord.kafkaOffset())); + + logger.info("Cutting JCSMP upstream"); + Latency lag = jcsmpProxyContext.getProxy().toxics() + .latency("lag", ToxicDirection.UPSTREAM, TimeUnit.HOURS.toMillis(1)); + + Future future = executorService.submit(() -> { + String logLine; + do { + try { + logLine = logReader.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } while (!logLine.contains("Received Session Event " + SessionEvent.RECONNECTING)); + + try { + Thread.sleep(TimeUnit.SECONDS.toMillis(5)); + } catch (InterruptedException ignored) {} + + try { + logger.info("Restoring JCSMP upstream"); + lag.remove(); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + + assertTimeoutPreemptively(Duration.ofMinutes(1), () -> { + solaceSinkTask.put(Collections.singleton(sinkRecord)); + solaceSinkTask.flush(currentOffsets); + }); + future.get(30, TimeUnit.SECONDS); + + List receivedDestinations = new ArrayList<>(); + ConsumerFlowProperties consumerFlowProperties = new ConsumerFlowProperties(); + consumerFlowProperties.setEndpoint(queue); + consumerFlowProperties.setStartState(true); + FlowReceiver flow = jcsmpSession.createFlow(null, consumerFlowProperties); + try { + assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (receivedDestinations.size() < 2) { + logger.info("Receiving messages"); + Optional.ofNullable(flow.receive()) + .map(XMLMessage::getDestination) + .ifPresent(receivedDestinations::add); + } + }); + } finally { + flow.close(); + } + + assertThat(receivedDestinations, hasItems(queue, + JCSMPFactory.onlyInstance().createTopic(connectorProperties.get(SolaceSinkConstants.SOL_TOPICS)))); + } + public static class BadRecordProcessor implements SolRecordProcessorIF { static final RuntimeException TEST_EXCEPTION = new RuntimeException("Some processing failure"); diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/ThrowingFunction.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/ThrowingFunction.java new file mode 100644 index 0000000..dc590e7 --- /dev/null +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/ThrowingFunction.java @@ -0,0 +1,16 @@ +package com.solace.connector.kafka.connect.sink.it.util; + +import java.util.function.Function; + +public interface ThrowingFunction extends Function { + @Override + default R apply(T t) { + try { + return acceptThrows(t); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + R acceptThrows(T t) throws Exception; +} diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java index 5729c40..ef37dea 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java @@ -1,6 +1,8 @@ package com.solace.connector.kafka.connect.sink.it.util.testcontainers; import com.github.dockerjava.api.command.InspectContainerResponse; +import com.solace.connector.kafka.connect.sink.SolProducerHandler; +import com.solace.connector.kafka.connect.sink.SolaceSinkSender; import com.solace.connector.kafka.connect.sink.SolaceSinkTask; import com.solace.connector.kafka.connect.sink.it.Tools; import org.testcontainers.containers.BindMode; @@ -23,6 +25,8 @@ public class BitnamiKafkaConnectContainer extends GenericContainer zookeeperContainer; @@ -77,14 +81,17 @@ protected void doStart() { protected void containerIsStarting(InspectContainerResponse containerInfo) { String command = "/bin/sh\n" + "set -e\n" + - "echo 'plugin.path=/opt/bitnami/kafka/jars' >> /opt/bitnami/kafka/config/connect-distributed.properties\n" + - "echo 'rest.port=" + CONNECT_PORT + "' >> /opt/bitnami/kafka/config/connect-distributed.properties\n" + - "echo 'log4j.logger.org.apache.kafka.connect.runtime.WorkerSinkTask=DEBUG' >> /opt/bitnami/kafka/config/connect-log4j.properties\n" + - "echo 'log4j.logger." + SolaceSinkTask.class.getName() + "=TRACE' >> /opt/bitnami/kafka/config/connect-log4j.properties\n" + + "echo 'plugin.path=/opt/bitnami/kafka/jars' >> " + PROPS_CONNECT + "\n" + + "echo 'rest.port=" + CONNECT_PORT + "' >> " + PROPS_CONNECT + "\n" + + "echo 'connector.client.config.override.policy=All' >> " + PROPS_CONNECT + "\n" + + "echo 'log4j.logger.org.apache.kafka.connect.runtime.WorkerSinkTask=TRACE' >> " + PROPS_LOG4J + "\n" + + "echo 'log4j.logger." + SolProducerHandler.class.getName() + "=TRACE' >> " + PROPS_LOG4J + "\n" + + "echo 'log4j.logger." + SolaceSinkSender.class.getName() + "=TRACE' >> " + PROPS_LOG4J + "\n" + + "echo 'log4j.logger." + SolaceSinkTask.class.getName() + "=TRACE' >> " + PROPS_LOG4J + "\n" + "export KAFKA_CFG_ADVERTISED_LISTENERS=" + advertisedListeners(containerInfo) + "\n" + "/opt/bitnami/scripts/kafka/setup.sh\n" + "/opt/bitnami/scripts/kafka/run.sh &\n" + - "/opt/bitnami/kafka/bin/connect-distributed.sh /opt/bitnami/kafka/config/connect-distributed.properties\n"; + "/opt/bitnami/kafka/bin/connect-distributed.sh " + PROPS_CONNECT + "\n"; copyFileToContainer(Transferable.of(command.getBytes(StandardCharsets.UTF_8), 0777), STARTER_SCRIPT); super.containerIsStarting(containerInfo); } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java index 04a8064..6f0a100 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java @@ -1,5 +1,8 @@ package com.solace.connector.kafka.connect.sink.it.util.testcontainers; +import com.solace.connector.kafka.connect.sink.SolProducerHandler; +import com.solace.connector.kafka.connect.sink.SolaceSinkSender; +import com.solace.connector.kafka.connect.sink.SolaceSinkTask; import com.solace.connector.kafka.connect.sink.it.Tools; import org.testcontainers.containers.BindMode; import org.testcontainers.containers.GenericContainer; @@ -50,7 +53,14 @@ public ConfluentKafkaConnectContainer(DockerImageName dockerImageName, withEnv("CONNECT_INTERNAL_KEY_CONVERTER", "org.apache.kafka.connect.json.JsonConverter"); withEnv("CONNECT_INTERNAL_VALUE_CONVERTER", "org.apache.kafka.connect.json.JsonConverter"); withEnv("CONNECT_REST_ADVERTISED_HOST_NAME", "localhost"); + withEnv("CONNECT_CONNECTOR_CLIENT_CONFIG_OVERRIDE_POLICY", "All"); withEnv("CONNECT_LOG4J_ROOT_LOGLEVEL", "INFO"); + withEnv("CONNECT_LOG4J_LOGGERS", String.join(",", + String.join("=", "org.apache.kafka.connect.runtime.WorkerSinkTask", "TRACE"), + String.join("=", SolProducerHandler.class.getName(), "TRACE"), + String.join("=", SolaceSinkSender.class.getName(), "TRACE"), + String.join("=", SolaceSinkTask.class.getName(), "TRACE") + )); withEnv("CONNECT_PLUGIN_PATH", "/usr/share/java,/etc/kafka-connect/jars"); withClasspathResourceMapping(Tools.getUnzippedConnectorDirName() + "/lib", "/etc/kafka-connect/jars", BindMode.READ_ONLY); From 75bcdd29594d92cb7429e5d53ec0928667d6d383 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Fri, 3 Dec 2021 11:06:05 -0500 Subject: [PATCH 04/12] fix testLongCommit --- .../connector/kafka/connect/sink/it/SolaceSinkTaskIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java index 8202283..9b24af2 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java @@ -422,12 +422,13 @@ public void testLongCommit(boolean autoFlush, try { logger.info("Restoring JCSMP upstream"); lag.remove(); + logger.info("JCSMP upstream restored"); } catch (IOException e) { throw new RuntimeException(e); } }); - assertTimeoutPreemptively(Duration.ofMinutes(1), () -> { + assertTimeoutPreemptively(Duration.ofMinutes(5), () -> { solaceSinkTask.put(Collections.singleton(sinkRecord)); solaceSinkTask.flush(currentOffsets); }); From d598b481f83ca8ab6bffe672e0596634dfb8a562 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Mon, 6 Dec 2021 15:58:30 -0500 Subject: [PATCH 05/12] add transacted & queue dynamic-destination tests --- build.gradle | 2 +- .../connect/sink/SolaceProducerHandlerIT.java | 32 +-- .../connect/sink/it/SinkConnectorIT.java | 150 ++++++---- .../connect/sink/it/SolaceSinkTaskIT.java | 256 +++++++++++++++--- .../extensions/KafkaArgumentsProvider.java | 32 ++- .../BitnamiKafkaConnectContainer.java | 1 + .../ConfluentKafkaConnectContainer.java | 1 + src/integrationTest/resources/log4j2.xml | 3 + src/test/resources/log4j2.xml | 3 + 9 files changed, 356 insertions(+), 124 deletions(-) diff --git a/build.gradle b/build.gradle index e3071cc..7f345ee 100644 --- a/build.gradle +++ b/build.gradle @@ -33,7 +33,7 @@ testSets { dependencies { integrationTestImplementation 'org.junit.jupiter:junit-jupiter:5.8.1' - integrationTestImplementation 'org.junit-pioneer:junit-pioneer:1.4.2' + integrationTestImplementation 'org.junit-pioneer:junit-pioneer:1.5.0' integrationTestImplementation 'org.mockito:mockito-junit-jupiter:3.12.4' integrationTestImplementation 'org.testcontainers:testcontainers:1.16.0' integrationTestImplementation 'org.testcontainers:junit-jupiter:1.16.0' diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java index fd0e966..9bf7c25 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/SolaceProducerHandlerIT.java @@ -19,8 +19,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.junitpioneer.jupiter.CartesianProductTest; -import org.junitpioneer.jupiter.CartesianValueSource; +import org.junitpioneer.jupiter.cartesian.CartesianTest; +import org.junitpioneer.jupiter.cartesian.CartesianTest.Values; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -67,13 +67,13 @@ void tearDown() { } } - @CartesianProductTest(name = "[{index}] txnQueue={0}, txnTopic={1}, sendToQueue={2}, sendToTopic={3}") - @CartesianValueSource(booleans = {true, false}) - @CartesianValueSource(booleans = {true, false}) - @CartesianValueSource(booleans = {true, false}) - @CartesianValueSource(booleans = {true, false}) - public void testStaticDestinations(boolean txnQueue, boolean txnTopic, boolean sendToQueue, boolean sendToTopic, - JCSMPSession session, Queue queue) throws Exception { + @CartesianTest(name = "[{index}] txnQueue={0}, txnTopic={1}, sendToQueue={2}, sendToTopic={3}") + public void testStaticDestinations(@Values(booleans = {true, false}) boolean txnQueue, + @Values(booleans = {true, false}) boolean txnTopic, + @Values(booleans = {true, false}) boolean sendToQueue, + @Values(booleans = {true, false}) boolean sendToTopic, + JCSMPSession session, + Queue queue) throws Exception { properties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); properties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(txnQueue)); @@ -114,13 +114,13 @@ public void testStaticDestinations(boolean txnQueue, boolean txnTopic, boolean s } } - @CartesianProductTest(name = "[{index}] txnQueue={0}, txnTopic={1}, sendToQueue={2}, sendToTopic={3}") - @CartesianValueSource(booleans = {true, false}) - @CartesianValueSource(booleans = {true, false}) - @CartesianValueSource(booleans = {true, false}) - @CartesianValueSource(booleans = {true, false}) - public void testLazyInit(boolean txnQueue, boolean txnTopic, boolean sendToQueue, boolean sendToTopic, - JCSMPSession session, Queue queue) throws Exception { + @CartesianTest(name = "[{index}] txnQueue={0}, txnTopic={1}, sendToQueue={2}, sendToTopic={3}") + public void testLazyInit(@Values(booleans = {true, false}) boolean txnQueue, + @Values(booleans = {true, false}) boolean txnTopic, + @Values(booleans = {true, false}) boolean sendToQueue, + @Values(booleans = {true, false}) boolean sendToTopic, + JCSMPSession session, + Queue queue) throws Exception { properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(txnQueue)); properties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(txnTopic)); diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java index 4622f74..c67f619 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SinkConnectorIT.java @@ -6,9 +6,10 @@ import com.solace.connector.kafka.connect.sink.SolaceSinkConstants; import com.solace.connector.kafka.connect.sink.it.util.ThrowingFunction; import com.solace.connector.kafka.connect.sink.it.util.extensions.KafkaArgumentsProvider; -import com.solace.connector.kafka.connect.sink.it.util.extensions.KafkaArgumentsProvider.KafkaArgumentSource; import com.solace.connector.kafka.connect.sink.it.util.extensions.KafkaArgumentsProvider.KafkaContext; +import com.solace.connector.kafka.connect.sink.it.util.extensions.KafkaArgumentsProvider.KafkaSource; import com.solace.connector.kafka.connect.sink.it.util.extensions.NetworkPubSubPlusExtension; +import com.solace.connector.kafka.connect.sink.recordprocessor.SolDynamicDestinationRecordProcessor; import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension; import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension.ExecSvc; import com.solace.test.integration.junit.jupiter.extension.PubSubPlusExtension.JCSMPProxy; @@ -18,7 +19,6 @@ import com.solace.test.integration.semp.v2.config.model.ConfigMsgVpnQueueSubscription; import com.solacesystems.jcsmp.BytesXMLMessage; import com.solacesystems.jcsmp.Destination; -import com.solacesystems.jcsmp.EndpointProperties; import com.solacesystems.jcsmp.JCSMPException; import com.solacesystems.jcsmp.JCSMPFactory; import com.solacesystems.jcsmp.JCSMPProperties; @@ -46,8 +46,8 @@ import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ArgumentsSource; -import org.junitpioneer.jupiter.CartesianProductTest; -import org.junitpioneer.jupiter.CartesianValueSource; +import org.junitpioneer.jupiter.cartesian.CartesianTest; +import org.junitpioneer.jupiter.cartesian.CartesianTest.Values; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.output.WaitingConsumer; @@ -416,7 +416,7 @@ class SinkDynamicDestinationMessageProcessorMessageProcessorTests { @BeforeEach void setUp() { - connectorProps.setProperty("sol.record_processor_class", "com.solace.connector.kafka.connect.sink.recordprocessor.SolDynamicDestinationRecordProcessor"); + connectorProps.setProperty("sol.record_processor_class", SolDynamicDestinationRecordProcessor.class.getName()); connectorProps.setProperty("sol.dynamic_destination", "true"); connectorProps.setProperty("sol.topics", String.join(", ", topics)); connectorProps.setProperty("sol.queue", SOL_QUEUE); @@ -424,9 +424,12 @@ void setUp() { @DisplayName("TextMessage-DynamicDestinationMessageProcessor-start") - @ParameterizedTest - @ArgumentsSource(KafkaArgumentsProvider.class) - void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { + @CartesianTest(name = "[{index}] transacted={0}, autoFlush={1}, kafka={2}") + void kafkaConsumerTextMessageToTopicTest(@Values(booleans = { true, false }) boolean transacted, + @Values(booleans = { true, false }) boolean autoFlush, + @KafkaSource KafkaContext kafkaContext) { + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(transacted)); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 1 : 200)); kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); messageToKafkaTest(kafkaContext, // expected list of delivery queue and topics @@ -438,9 +441,12 @@ void kafkaConsumerTextMessageToTopicTest(KafkaContext kafkaContext) { } @DisplayName("TextMessage-DynamicDestinationMessageProcessor-stop") - @ParameterizedTest - @ArgumentsSource(KafkaArgumentsProvider.class) - void kafkaConsumerTextMessageToTopicTest2(KafkaContext kafkaContext) { + @CartesianTest(name = "[{index}] transacted={0}, autoFlush={1}, kafka={2}") + void kafkaConsumerTextMessageToTopicTest2(@Values(booleans = { true, false }) boolean transacted, + @Values(booleans = { true, false }) boolean autoFlush, + @KafkaSource KafkaContext kafkaContext) { + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(transacted)); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 1 : 200)); kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); messageToKafkaTest(kafkaContext, // expected list of delivery queue and topics @@ -452,9 +458,12 @@ void kafkaConsumerTextMessageToTopicTest2(KafkaContext kafkaContext) { } @DisplayName("TextMessage-DynamicDestinationMessageProcessor-other") - @ParameterizedTest - @ArgumentsSource(KafkaArgumentsProvider.class) - void kafkaConsumerTextMessageToTopicTest3(KafkaContext kafkaContext) { + @CartesianTest(name = "[{index}] transacted={0}, autoFlush={1}, kafka={2}") + void kafkaConsumerTextMessageToTopicTest3(@Values(booleans = { true, false }) boolean transacted, + @Values(booleans = { true, false }) boolean autoFlush, + @KafkaSource KafkaContext kafkaContext) { + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(transacted)); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 1 : 200)); kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); messageToKafkaTest(kafkaContext, // expected list of delivery queue and topics @@ -489,20 +498,29 @@ void testFailPubSubConnection(KafkaContext kafkaContext) { }, () -> "Timed out waiting for connector to fail: " + GSON.toJson(connectorStatus.get())); } - @CartesianProductTest(name = "[{index}] autoFlush={0}, kafka={1}") - @CartesianValueSource(booleans = { true, false }) - @KafkaArgumentSource - void testRebalancedKafkaConsumers(boolean autoFlush, - KafkaContext kafkaContext, + @CartesianTest(name = "[{index}] dynamicDestination={0}, autoFlush={1}, kafka={2}") + void testRebalancedKafkaConsumers(@Values(booleans = { false, true }) boolean dynamicDestination, + @Values(booleans = { false, true }) boolean autoFlush, + @KafkaSource KafkaContext kafkaContext, @JCSMPProxy ToxiproxyContext jcsmpProxyContext, @ExecSvc(poolSize = 2, scheduled = true) ScheduledExecutorService executorService) throws Exception { + String topicName; + + if (dynamicDestination) { + topicName = "comms/bus/" + RandomStringUtils.randomAlphanumeric(4); + connectorProps.setProperty(SolaceSinkConstants.SOL_DYNAMIC_DESTINATION, Boolean.toString(true)); + connectorProps.setProperty(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolDynamicDestinationRecordProcessor.class.getName()); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 1 : 100)); + } else { + topicName = SOL_ROOT_TOPIC + "/" + RandomStringUtils.randomAlphanumeric(100); + connectorProps.setProperty(SolaceSinkConstants.SOl_QUEUE, SOL_QUEUE); + connectorProps.setProperty(SolaceSinkConstants.SOL_TOPICS, topicName); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 100)); + } + connectorProps.setProperty(SolaceSinkConstants.SOL_HOST, String.format("tcp://%s:%s", jcsmpProxyContext.getDockerNetworkAlias(), jcsmpProxyContext.getProxy().getOriginalProxyPort())); - connectorProps.setProperty(SolaceSinkConstants.SOl_QUEUE, SOL_QUEUE); - connectorProps.setProperty(SolaceSinkConstants.SOL_TOPICS, - SOL_ROOT_TOPIC + "/" + RandomStringUtils.randomAlphanumeric(100)); - connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 100)); connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); connectorProps.setProperty(SolaceSinkConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Integer.toString(-1)); @@ -517,7 +535,10 @@ void testRebalancedKafkaConsumers(boolean autoFlush, clearReceivedMessages(); String recordValue = randomAlphanumeric(100); Future recordMetadataFuture = executorService.schedule(() -> - sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), recordValue), + sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), + dynamicDestination ? + topicName.substring("comms/bus/".length()) + ":" + recordValue : + recordValue), 5, TimeUnit.SECONDS); logger.info("Waiting for Kafka poll interval to expire..."); @@ -537,50 +558,73 @@ void testRebalancedKafkaConsumers(boolean autoFlush, lag.remove(); logger.info("Checking consumption of message"); - assertMessageReceived(kafkaContext, SOL_QUEUE, - new String[]{(String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)}, + assertMessageReceived(kafkaContext, dynamicDestination ? null : SOL_QUEUE, + new String[]{topicName}, recordMetadataFuture.get(30, TimeUnit.SECONDS), ImmutableMap.of(AdditionalCheck.ATTACHMENTBYTEBUFFER, recordValue)); logger.info("Sending a new message to verify that Solace producers & Kafka consumers have recovered"); clearReceivedMessages(); String newRecordValue = randomAlphanumeric(100); - messageToKafkaTest(kafkaContext, SOL_QUEUE, - new String[]{(String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)}, - randomAlphanumeric(100), newRecordValue, + messageToKafkaTest(kafkaContext, dynamicDestination ? null : SOL_QUEUE, + new String[]{topicName}, + randomAlphanumeric(100), dynamicDestination ? + topicName.substring("comms/bus/".length()) + ":" + newRecordValue : + newRecordValue, ImmutableMap.of(AdditionalCheck.ATTACHMENTBYTEBUFFER, newRecordValue)); } - @CartesianProductTest(name = "[{index}] autoFlush={0}, kafka={1}") - @CartesianValueSource(booleans = { true, false }) - @KafkaArgumentSource - void testCommitRollback(boolean autoFlush, KafkaContext kafkaContext, - JCSMPSession jcsmpSession, SempV2Api sempV2Api, + @CartesianTest(name = "[{index}] dynamicDestination={0}, autoFlush={1}, kafka={2}") + void testCommitRollback(@Values(booleans = { false, true }) boolean dynamicDestination, + @Values(booleans = { false, true }) boolean autoFlush, + @KafkaSource KafkaContext kafkaContext, + JCSMPSession jcsmpSession, + SempV2Api sempV2Api, + Queue queue, @ExecSvc(poolSize = 2, scheduled = true) ScheduledExecutorService executorService) throws Exception { - Queue queue = JCSMPFactory.onlyInstance().createQueue(randomAlphanumeric(100)); - connectorProps.setProperty(SolaceSinkConstants.SOl_QUEUE, queue.getName()); - connectorProps.setProperty(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); - connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 100)); + String topicName; + if (dynamicDestination) { + topicName = "comms/bus/" + RandomStringUtils.randomAlphanumeric(4); + connectorProps.setProperty(SolaceSinkConstants.SOL_DYNAMIC_DESTINATION, Boolean.toString(true)); + connectorProps.setProperty(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolDynamicDestinationRecordProcessor.class.getName()); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 1 : 200)); + } else { + topicName = RandomStringUtils.randomAlphanumeric(100); + connectorProps.setProperty(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + connectorProps.setProperty(SolaceSinkConstants.SOL_TOPICS, topicName); + connectorProps.setProperty(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 200)); + } + connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); connectorProps.setProperty(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); connectorProps.setProperty("errors.retry.timeout", Long.toString(-1)); + sempV2Api.config().updateMsgVpnQueue(SOL_VPN, queue.getName(), new ConfigMsgVpnQueue().maxMsgSize(1), + null); + sempV2Api.config().createMsgVpnQueueSubscription(SOL_VPN, queue.getName(), + new ConfigMsgVpnQueueSubscription().subscriptionTopic(topicName), null); + assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (sempV2Api.monitor().getMsgVpnQueue(SOL_VPN, queue.getName(), null).getData() + .getMaxMsgSize() != 1) { + logger.info("Waiting for queue {} to have max message size of 1", queue.getName()); + Thread.sleep(100); + } + }); + try (TestSolaceQueueConsumer solaceConsumer1 = new TestSolaceQueueConsumer(jcsmpSession)) { - EndpointProperties endpointProperties = new EndpointProperties(); - endpointProperties.setMaxMsgSize(1); - solaceConsumer1.provisionQueue(queue.getName(), endpointProperties); + solaceConsumer1.setQueueName(queue.getName()); solaceConsumer1.start(); - sempV2Api.config().createMsgVpnQueueSubscription(SOL_VPN, queue.getName(), new ConfigMsgVpnQueueSubscription() - .subscriptionTopic((String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)), null); - kafkaContext.getSolaceConnectorDeployment().startConnector(connectorProps); clearReceivedMessages(); String recordValue = randomAlphanumeric(100); Future recordMetadataFuture = executorService.schedule(() -> - sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), recordValue), + sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), + dynamicDestination ? + topicName.substring("comms/bus/".length()) + ":" + recordValue : + recordValue), 5, TimeUnit.SECONDS); WaitingConsumer logConsumer = new WaitingConsumer(); @@ -614,7 +658,7 @@ void testCommitRollback(boolean autoFlush, KafkaContext kafkaContext, sendMessagetoKafka(kafkaContext.getProducer(), randomAlphanumeric(100), randomAlphanumeric(100)); List receivedMsgDestinations = new ArrayList<>(); - while (receivedMsgDestinations.size() < 2) { + while (receivedMsgDestinations.size() < (dynamicDestination ? 1 : 2)) { logger.info("Checking consumption of messages"); receivedMsgDestinations.addAll(assertMessageReceived(kafkaContext, queue.getName(), new String[0], recordMetadataFuture.get(30, TimeUnit.SECONDS), @@ -623,11 +667,15 @@ void testCommitRollback(boolean autoFlush, KafkaContext kafkaContext, .map(BytesXMLMessage::getDestination) .collect(Collectors.toList())); } - assertThat(receivedMsgDestinations, hasSize(2)); - assertThat(receivedMsgDestinations, hasItems(queue, JCSMPFactory.onlyInstance() - .createTopic((String) connectorProps.get(SolaceSinkConstants.SOL_TOPICS)))); - } finally { - jcsmpSession.deprovision(queue, JCSMPSession.FLAG_IGNORE_DOES_NOT_EXIST); + + if (dynamicDestination) { + assertThat(receivedMsgDestinations, hasSize(1)); + assertThat(receivedMsgDestinations, hasItems(JCSMPFactory.onlyInstance().createTopic(topicName))); + } else { + assertThat(receivedMsgDestinations, hasSize(2)); + assertThat(receivedMsgDestinations, hasItems(queue, + JCSMPFactory.onlyInstance().createTopic(topicName))); + } } } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java index 9b24af2..8fc4286 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/SolaceSinkTaskIT.java @@ -7,6 +7,7 @@ import com.solace.connector.kafka.connect.sink.SolaceSinkTask; import com.solace.connector.kafka.connect.sink.it.util.extensions.NetworkPubSubPlusExtension; import com.solace.connector.kafka.connect.sink.recordprocessor.SolDynamicDestinationRecordProcessor; +import com.solace.connector.kafka.connect.sink.recordprocessor.SolSimpleRecordProcessor; import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension; import com.solace.test.integration.junit.jupiter.extension.ExecutorServiceExtension.ExecSvc; import com.solace.test.integration.junit.jupiter.extension.LogCaptorExtension; @@ -30,6 +31,7 @@ import com.solacesystems.jcsmp.Queue; import com.solacesystems.jcsmp.SDTException; import com.solacesystems.jcsmp.SessionEvent; +import com.solacesystems.jcsmp.Topic; import com.solacesystems.jcsmp.XMLMessage; import com.solacesystems.jcsmp.transaction.RollbackException; import eu.rekawek.toxiproxy.model.ToxicDirection; @@ -49,6 +51,8 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; +import org.junitpioneer.jupiter.cartesian.CartesianTest; +import org.junitpioneer.jupiter.cartesian.CartesianTest.Values; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -72,6 +76,7 @@ import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.instanceOf; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTimeoutPreemptively; @@ -175,9 +180,15 @@ public void testFailTransactedSessionCreation(SempV2Api sempV2Api, Queue queue) assertThat(thrown.getCause().getMessage(), containsString("Router does not support transacted sessions")); } - @Test - public void testSendToTopicThrowsJCSMPException() { - connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); + @ParameterizedTest + @ValueSource(classes = {Queue.class, Topic.class}) + public void testSendThrowsJCSMPException(Class destinationType, Queue queue) { + if (destinationType.isAssignableFrom(Queue.class)) { + connectorProperties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + } else { + connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); + } + solaceSinkTask.start(connectorProperties); SinkRecord sinkRecord = new SinkRecord(RandomStringUtils.randomAlphanumeric(100), 0, @@ -188,37 +199,28 @@ public void testSendToTopicThrowsJCSMPException() { ConnectException thrown = assertThrows(ConnectException.class, () -> solaceSinkTask.put( Collections.singleton(sinkRecord))); assertThat(thrown, instanceOf(RetriableException.class)); - assertThat(thrown.getMessage(), containsString("Received exception while sending message to topic")); + assertThat(thrown.getMessage(), containsString("Received exception while sending message to " + + (destinationType.isAssignableFrom(Queue.class) ? "queue" : "topic"))); assertThat(thrown.getCause(), instanceOf(ClosedFacilityException.class)); } - @Test - public void testSendToQueueThrowsJCSMPException(Queue queue) { - connectorProperties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); + @ParameterizedTest(name = "[{index}] destinationType={0}") + @ValueSource(classes = {Queue.class, Topic.class}) + public void testDynamicSendThrowsJCSMPException(Class destinationType, Queue queue) { + connectorProperties.put(SolaceSinkConstants.SOL_DYNAMIC_DESTINATION, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOL_RECORD_PROCESSOR, DynamicDestinationTypeRecordProcessor.class + .getName()); solaceSinkTask.start(connectorProperties); SinkRecord sinkRecord = new SinkRecord(RandomStringUtils.randomAlphanumeric(100), 0, Schema.STRING_SCHEMA, RandomStringUtils.randomAlphanumeric(100), Schema.BYTES_SCHEMA, RandomUtils.nextBytes(10), 0); - solaceSinkTask.stop(); - ConnectException thrown = assertThrows(ConnectException.class, () -> solaceSinkTask.put( - Collections.singleton(sinkRecord))); - assertThat(thrown, instanceOf(RetriableException.class)); - assertThat(thrown.getMessage(), containsString("Received exception while sending message to queue")); - assertThat(thrown.getCause(), instanceOf(ClosedFacilityException.class)); - } - - @Test - public void testSendToDynamicTopicThrowsJCSMPException() { - connectorProperties.put(SolaceSinkConstants.SOL_DYNAMIC_DESTINATION, "true"); - connectorProperties.put(SolaceSinkConstants.SOL_RECORD_PROCESSOR, SolDynamicDestinationRecordProcessor.class.getName()); - solaceSinkTask.start(connectorProperties); - - SinkRecord sinkRecord = new SinkRecord(RandomStringUtils.randomAlphanumeric(100), 0, - Schema.STRING_SCHEMA, RandomStringUtils.randomAlphanumeric(100), - Schema.BYTES_SCHEMA, String.format("%s %s", RandomStringUtils.randomAlphanumeric(4), - RandomStringUtils.randomAlphanumeric(100)).getBytes(StandardCharsets.UTF_8), 0); + String dynamicDestinationName = destinationType.isAssignableFrom(Queue.class) ? queue.getName() : + RandomStringUtils.randomAlphanumeric(100); + sinkRecord.headers() + .addString(DynamicDestinationTypeRecordProcessor.HEADER_DYNAMIC_DESTINATION, dynamicDestinationName) + .addString(DynamicDestinationTypeRecordProcessor.HEADER_DYNAMIC_DESTINATION_TYPE, destinationType.getName()); solaceSinkTask.stop(); ConnectException thrown = assertThrows(ConnectException.class, () -> solaceSinkTask.put( @@ -297,12 +299,16 @@ public void testRecordProcessorError(boolean ignoreRecordProcessorError, } } - @Test - public void testCommitRollback(SempV2Api sempV2Api, Queue queue) throws Exception { + @ParameterizedTest(name = "[{index}] autoFlush={0}") + @ValueSource(booleans = {false, true}) + public void testCommitRollback(boolean autoFlush, SempV2Api sempV2Api, Queue queue) throws Exception { connectorProperties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + if (autoFlush) { + connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(2)); + } String vpnName = connectorProperties.get(SolaceSinkConstants.SOL_VPN_NAME); sempV2Api.config().createMsgVpnQueueSubscription(vpnName, queue.getName(), new ConfigMsgVpnQueueSubscription() @@ -322,12 +328,18 @@ public void testCommitRollback(SempV2Api sempV2Api, Queue queue) throws Exceptio SinkRecord sinkRecord = new SinkRecord(RandomStringUtils.randomAlphanumeric(100), 0, Schema.STRING_SCHEMA, RandomStringUtils.randomAlphanumeric(100), Schema.BYTES_SCHEMA, RandomUtils.nextBytes(10), 0); - Map currentOffsets = Collections.singletonMap( - new TopicPartition(sinkRecord.topic(), sinkRecord.kafkaPartition()), - new OffsetAndMetadata(sinkRecord.kafkaOffset())); - solaceSinkTask.put(Collections.singleton(sinkRecord)); - ConnectException thrown = assertThrows(ConnectException.class, () -> solaceSinkTask.flush(currentOffsets)); + ConnectException thrown; + if (autoFlush) { + thrown = assertThrows(RetriableException.class, () -> solaceSinkTask.put(Collections.singleton(sinkRecord))); + } else { + Map currentOffsets = Collections.singletonMap( + new TopicPartition(sinkRecord.topic(), sinkRecord.kafkaPartition()), + new OffsetAndMetadata(sinkRecord.kafkaOffset())); + solaceSinkTask.put(Collections.singleton(sinkRecord)); + thrown = assertThrows(ConnectException.class, () -> solaceSinkTask.flush(currentOffsets)); + } + assertThat(thrown.getMessage(), containsString("Error in committing transaction")); assertThat(thrown.getCause(), instanceOf(RollbackException.class)); assertThat(thrown.getCause().getMessage(), containsString("Document Is Too Large")); @@ -335,19 +347,31 @@ public void testCommitRollback(SempV2Api sempV2Api, Queue queue) throws Exceptio .getData().getMaxMsgSizeExceededDiscardedMsgCount()); } - @Test - public void testAutoFlushCommitRollback(SempV2Api sempV2Api, Queue queue) throws Exception { - connectorProperties.put(SolaceSinkConstants.SOl_QUEUE, queue.getName()); - connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); + @CartesianTest(name = "[{index}] destinationType={0}, autoFlush={1}") + public void testDynamicDestinationCommitRollback( + @Values(classes = {Queue.class, Topic.class}) Class destinationType, + @Values(booleans = {false, true}) boolean autoFlush, + SempV2Api sempV2Api, + Queue queue) throws Exception { connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); - connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(2)); + connectorProperties.put(SolaceSinkConstants.SOL_DYNAMIC_DESTINATION, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOL_RECORD_PROCESSOR, DynamicDestinationTypeRecordProcessor.class + .getName()); + if (autoFlush) { + connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(1)); + } + + String topicName = RandomStringUtils.randomAlphanumeric(100); String vpnName = connectorProperties.get(SolaceSinkConstants.SOL_VPN_NAME); - sempV2Api.config().createMsgVpnQueueSubscription(vpnName, queue.getName(), new ConfigMsgVpnQueueSubscription() - .subscriptionTopic(connectorProperties.get(SolaceSinkConstants.SOL_TOPICS)), null); - sempV2Api.config().updateMsgVpnQueue(vpnName, queue.getName(), new ConfigMsgVpnQueue().maxMsgSize(1), null); + if (destinationType.isAssignableFrom(Topic.class)) { + sempV2Api.config().createMsgVpnQueueSubscription(vpnName, queue.getName(), + new ConfigMsgVpnQueueSubscription().subscriptionTopic(topicName), null); + } + + sempV2Api.config().updateMsgVpnQueue(vpnName, queue.getName(), new ConfigMsgVpnQueue().maxMsgSize(1), null); assertTimeoutPreemptively(Duration.ofSeconds(20), () -> { while (sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null).getData() .getMaxMsgSize() != 1) { @@ -362,11 +386,26 @@ public void testAutoFlushCommitRollback(SempV2Api sempV2Api, Queue queue) throws Schema.STRING_SCHEMA, RandomStringUtils.randomAlphanumeric(100), Schema.BYTES_SCHEMA, RandomUtils.nextBytes(10), 0); - ConnectException thrown = assertThrows(RetriableException.class, () -> solaceSinkTask.put(Collections.singleton(sinkRecord))); + String dynamicDestinationName = destinationType.isAssignableFrom(Queue.class) ? queue.getName() : topicName; + sinkRecord.headers() + .addString(DynamicDestinationTypeRecordProcessor.HEADER_DYNAMIC_DESTINATION, dynamicDestinationName) + .addString(DynamicDestinationTypeRecordProcessor.HEADER_DYNAMIC_DESTINATION_TYPE, destinationType.getName()); + + ConnectException thrown; + if (autoFlush) { + thrown = assertThrows(RetriableException.class, () -> solaceSinkTask.put(Collections.singleton(sinkRecord))); + } else { + Map currentOffsets = Collections.singletonMap( + new TopicPartition(sinkRecord.topic(), sinkRecord.kafkaPartition()), + new OffsetAndMetadata(sinkRecord.kafkaOffset())); + solaceSinkTask.put(Collections.singleton(sinkRecord)); + thrown = assertThrows(ConnectException.class, () -> solaceSinkTask.flush(currentOffsets)); + } + assertThat(thrown.getMessage(), containsString("Error in committing transaction")); assertThat(thrown.getCause(), instanceOf(RollbackException.class)); assertThat(thrown.getCause().getMessage(), containsString("Document Is Too Large")); - assertEquals(2, sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) + assertEquals(1, sempV2Api.monitor().getMsgVpnQueue(vpnName, queue.getName(), null) .getData().getMaxMsgSizeExceededDiscardedMsgCount()); } @@ -385,9 +424,12 @@ public void testLongCommit(boolean autoFlush, connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); - connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(autoFlush ? 2 : 100)); connectorProperties.put(SolaceSinkConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Integer.toString(-1)); + if (autoFlush) { + connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(2)); + } + String vpnName = connectorProperties.get(SolaceSinkConstants.SOL_VPN_NAME); sempV2Api.config().createMsgVpnQueueSubscription(vpnName, queue.getName(), new ConfigMsgVpnQueueSubscription() .subscriptionTopic(connectorProperties.get(SolaceSinkConstants.SOL_TOPICS)), null); @@ -456,6 +498,107 @@ public void testLongCommit(boolean autoFlush, JCSMPFactory.onlyInstance().createTopic(connectorProperties.get(SolaceSinkConstants.SOL_TOPICS)))); } + @CartesianTest(name = "[{index}] destinationType={0}, autoFlush={1}") + public void testDynamicDestinationLongCommit( + @Values(classes = {Queue.class, Topic.class}) Class destinationType, + @Values(booleans = {false, true}) boolean autoFlush, + @JCSMPProxy JCSMPSession jcsmpSession, + SempV2Api sempV2Api, + Queue queue, + @JCSMPProxy ToxiproxyContext jcsmpProxyContext, + @ExecSvc ExecutorService executorService, + @LogCaptor(SolSessionEventCallbackHandler.class) BufferedReader logReader) throws Exception { + connectorProperties.put(SolaceSinkConstants.SOL_HOST, (String) jcsmpSession.getProperty(JCSMPProperties.HOST)); + connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_QUEUE, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOl_USE_TRANSACTIONS_FOR_TOPICS, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOL_CHANNEL_PROPERTY_reconnectRetries, Integer.toString(-1)); + connectorProperties.put(SolaceSinkConstants.SOL_DYNAMIC_DESTINATION, Boolean.toString(true)); + connectorProperties.put(SolaceSinkConstants.SOL_RECORD_PROCESSOR, DynamicDestinationTypeRecordProcessor.class + .getName()); + + // Force transacted session to be created during connector-start. + connectorProperties.put(SolaceSinkConstants.SOL_TOPICS, RandomStringUtils.randomAlphanumeric(100)); + + if (autoFlush) { + connectorProperties.put(SolaceSinkConstants.SOL_AUTOFLUSH_SIZE, Integer.toString(1)); + } + + String topicName = RandomStringUtils.randomAlphanumeric(100); + if (destinationType.isAssignableFrom(Topic.class)) { + sempV2Api.config().createMsgVpnQueueSubscription(connectorProperties.get(SolaceSinkConstants.SOL_VPN_NAME), + queue.getName(), new ConfigMsgVpnQueueSubscription() + .subscriptionTopic(topicName), null); + } + + solaceSinkTask.start(connectorProperties); + + SinkRecord sinkRecord = new SinkRecord(RandomStringUtils.randomAlphanumeric(100), 0, + Schema.STRING_SCHEMA, RandomStringUtils.randomAlphanumeric(100), + Schema.BYTES_SCHEMA, RandomUtils.nextBytes(10), 0); + + String dynamicDestinationName = destinationType.isAssignableFrom(Queue.class) ? queue.getName() : topicName; + sinkRecord.headers() + .addString(DynamicDestinationTypeRecordProcessor.HEADER_DYNAMIC_DESTINATION, dynamicDestinationName) + .addString(DynamicDestinationTypeRecordProcessor.HEADER_DYNAMIC_DESTINATION_TYPE, destinationType.getName()); + + Map currentOffsets = Collections.singletonMap( + new TopicPartition(sinkRecord.topic(), sinkRecord.kafkaPartition()), + new OffsetAndMetadata(sinkRecord.kafkaOffset())); + + logger.info("Cutting JCSMP upstream"); + Latency lag = jcsmpProxyContext.getProxy().toxics() + .latency("lag", ToxicDirection.UPSTREAM, TimeUnit.HOURS.toMillis(1)); + + Future future = executorService.submit(() -> { + String logLine; + do { + try { + logLine = logReader.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } while (!logLine.contains("Received Session Event " + SessionEvent.RECONNECTING)); + + try { + Thread.sleep(TimeUnit.SECONDS.toMillis(5)); + } catch (InterruptedException ignored) {} + + try { + logger.info("Restoring JCSMP upstream"); + lag.remove(); + logger.info("JCSMP upstream restored"); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + + assertTimeoutPreemptively(Duration.ofMinutes(5), () -> { + solaceSinkTask.put(Collections.singleton(sinkRecord)); + solaceSinkTask.flush(currentOffsets); + }); + future.get(30, TimeUnit.SECONDS); + + ConsumerFlowProperties consumerFlowProperties = new ConsumerFlowProperties(); + consumerFlowProperties.setEndpoint(queue); + consumerFlowProperties.setStartState(true); + FlowReceiver flow = jcsmpSession.createFlow(null, consumerFlowProperties); + try { + assertTimeoutPreemptively(Duration.ofSeconds(30), () -> { + while (true) { + logger.info("Receiving message"); + BytesXMLMessage receivedMessage = flow.receive(); + if (receivedMessage != null) { + assertInstanceOf(destinationType, receivedMessage.getDestination()); + assertEquals(dynamicDestinationName, receivedMessage.getDestination().getName()); + break; + } + } + }); + } finally { + flow.close(); + } + } + public static class BadRecordProcessor implements SolRecordProcessorIF { static final RuntimeException TEST_EXCEPTION = new RuntimeException("Some processing failure"); @@ -477,4 +620,31 @@ public BytesXMLMessage processRecord(String skey, SinkRecord record) { return msg; } } + + public static class DynamicDestinationTypeRecordProcessor extends SolSimpleRecordProcessor { + public static final String HEADER_DYNAMIC_DESTINATION = "dynamicDestination"; + public static final String HEADER_DYNAMIC_DESTINATION_TYPE = "dynamicDestinationType"; + private static final Logger logger = LoggerFactory.getLogger(DynamicDestinationTypeRecordProcessor.class); + + @Override + public BytesXMLMessage processRecord(String skey, SinkRecord record) { + try { + String dynamicDestinationName = (String) record.headers().lastWithName(HEADER_DYNAMIC_DESTINATION) + .value(); + Class dynamicDestinationType = Class.forName((String) record.headers() + .lastWithName(HEADER_DYNAMIC_DESTINATION_TYPE).value()); + + Destination dynamicDestination = dynamicDestinationType.isAssignableFrom(Queue.class) ? + JCSMPFactory.onlyInstance().createQueue(dynamicDestinationName) : + JCSMPFactory.onlyInstance().createTopic(dynamicDestinationName); + logger.info("Parsed dynamic destination {} {}", dynamicDestinationType.getSimpleName(), dynamicDestination); + + BytesXMLMessage msg = super.processRecord(skey, record); + msg.getProperties().putDestination("dynamicDestination", dynamicDestination); + return msg; + } catch (SDTException | ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + } } diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/extensions/KafkaArgumentsProvider.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/extensions/KafkaArgumentsProvider.java index 99f6531..9d9ab83 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/extensions/KafkaArgumentsProvider.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/extensions/KafkaArgumentsProvider.java @@ -16,7 +16,7 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.ArgumentsProvider; import org.junit.jupiter.params.provider.ArgumentsSource; -import org.junitpioneer.jupiter.CartesianAnnotationConsumer; +import org.junitpioneer.jupiter.cartesian.CartesianArgumentsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.GenericContainer; @@ -28,16 +28,29 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.lang.reflect.Parameter; +import java.util.Objects; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; import java.util.stream.Stream; -public class KafkaArgumentsProvider implements ArgumentsProvider, CartesianAnnotationConsumer { +public class KafkaArgumentsProvider implements ArgumentsProvider, + CartesianArgumentsProvider { private static final Logger LOG = LoggerFactory.getLogger(KafkaArgumentsProvider.class); @Override public Stream provideArguments(ExtensionContext context) { + return createKafkaContexts(context).map(Arguments::of); + } + + @Override + public Stream provideArguments(ExtensionContext context, Parameter parameter) { + Objects.requireNonNull(parameter.getAnnotation(KafkaSource.class)); + return createKafkaContexts(context); + } + + private Stream createKafkaContexts(ExtensionContext context) { KafkaConnection bitnamiCxn = context.getRoot() .getStore(KafkaNamespace.BITNAMI.getNamespace()) .getOrComputeIfAbsent(BitnamiResource.class, c -> { @@ -86,8 +99,8 @@ public Stream provideArguments(ExtensionContext context) { .getKafkaConnection(); return Stream.of( - Arguments.of(createKafkaContext(bitnamiCxn, KafkaNamespace.BITNAMI, context)), - Arguments.of(createKafkaContext(confluentCxn, KafkaNamespace.CONFLUENT, context)) + createKafkaContext(bitnamiCxn, KafkaNamespace.BITNAMI, context), + createKafkaContext(confluentCxn, KafkaNamespace.CONFLUENT, context) ); } @@ -125,17 +138,10 @@ private KafkaContext createKafkaContext(KafkaConnection connection, KafkaNamespa return new KafkaContext(namespace, connection, adminClient, connectorDeployment, producer); } - @Override - public void accept(KafkaArgumentSource kafkaArgumentSource) { - - } - - @Target(ElementType.METHOD) + @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) @ArgumentsSource(KafkaArgumentsProvider.class) - public @interface KafkaArgumentSource { - - } + public @interface KafkaSource {} public static class AutoDeleteSolaceConnectorDeploymentAfterEach implements AfterEachCallback { diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java index ef37dea..22433a7 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/BitnamiKafkaConnectContainer.java @@ -85,6 +85,7 @@ protected void containerIsStarting(InspectContainerResponse containerInfo) { "echo 'rest.port=" + CONNECT_PORT + "' >> " + PROPS_CONNECT + "\n" + "echo 'connector.client.config.override.policy=All' >> " + PROPS_CONNECT + "\n" + "echo 'log4j.logger.org.apache.kafka.connect.runtime.WorkerSinkTask=TRACE' >> " + PROPS_LOG4J + "\n" + + "echo 'log4j.logger.com.solace.connector.kafka.connect.sink=DEBUG' >> " + PROPS_LOG4J + "\n" + "echo 'log4j.logger." + SolProducerHandler.class.getName() + "=TRACE' >> " + PROPS_LOG4J + "\n" + "echo 'log4j.logger." + SolaceSinkSender.class.getName() + "=TRACE' >> " + PROPS_LOG4J + "\n" + "echo 'log4j.logger." + SolaceSinkTask.class.getName() + "=TRACE' >> " + PROPS_LOG4J + "\n" + diff --git a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java index 6f0a100..46b3f48 100644 --- a/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java +++ b/src/integrationTest/java/com/solace/connector/kafka/connect/sink/it/util/testcontainers/ConfluentKafkaConnectContainer.java @@ -57,6 +57,7 @@ public ConfluentKafkaConnectContainer(DockerImageName dockerImageName, withEnv("CONNECT_LOG4J_ROOT_LOGLEVEL", "INFO"); withEnv("CONNECT_LOG4J_LOGGERS", String.join(",", String.join("=", "org.apache.kafka.connect.runtime.WorkerSinkTask", "TRACE"), + String.join("=", "com.solace.connector.kafka.connect.sink", "DEBUG"), String.join("=", SolProducerHandler.class.getName(), "TRACE"), String.join("=", SolaceSinkSender.class.getName(), "TRACE"), String.join("=", SolaceSinkTask.class.getName(), "TRACE") diff --git a/src/integrationTest/resources/log4j2.xml b/src/integrationTest/resources/log4j2.xml index 01eb664..0ff37d5 100644 --- a/src/integrationTest/resources/log4j2.xml +++ b/src/integrationTest/resources/log4j2.xml @@ -6,6 +6,9 @@ + + + diff --git a/src/test/resources/log4j2.xml b/src/test/resources/log4j2.xml index 01eb664..0ff37d5 100644 --- a/src/test/resources/log4j2.xml +++ b/src/test/resources/log4j2.xml @@ -6,6 +6,9 @@ + + + From 5992891b0c527c9c41fe4e3b418a876487260ea4 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Mon, 6 Dec 2021 18:29:02 -0500 Subject: [PATCH 06/12] add integrationTest retries --- build.gradle | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/build.gradle b/build.gradle index 7f345ee..1da9b66 100644 --- a/build.gradle +++ b/build.gradle @@ -6,6 +6,7 @@ plugins { id 'jacoco' id 'pmd' id 'com.github.spotbugs' version '4.7.6' + id 'org.gradle.test-retry' version '1.3.1' id 'org.unbroken-dome.test-sets' version '2.2.1' } @@ -97,6 +98,9 @@ project.integrationTest { useJUnitPlatform() outputs.upToDateWhen { false } dependsOn prepDistForIntegrationTesting + retry { + maxRetries = 3 + } afterSuite { desc, result -> if (!desc.parent) println("${result.resultType} " + From 84671f1ffee273d4af7c9474a116570c748069b6 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Thu, 9 Dec 2021 12:04:10 -0500 Subject: [PATCH 07/12] add publishing tasks --- build.gradle | 83 +++++++++++++++++++ gradle.properties | 3 +- .../kafka/connect/sink/SolSessionHandler.java | 1 - .../kafka/connect/sink/SolaceSinkSender.java | 1 - 4 files changed, 85 insertions(+), 3 deletions(-) diff --git a/build.gradle b/build.gradle index 1da9b66..8676719 100644 --- a/build.gradle +++ b/build.gradle @@ -1,11 +1,15 @@ import com.github.spotbugs.snom.SpotBugsTask +import io.github.gradlenexus.publishplugin.InitializeNexusStagingRepository plugins { id 'java' id 'distribution' id 'jacoco' + id 'maven-publish' id 'pmd' + id 'signing' id 'com.github.spotbugs' version '4.7.6' + id 'io.github.gradle-nexus.publish-plugin' version '1.1.0' id 'org.gradle.test-retry' version '1.3.1' id 'org.unbroken-dome.test-sets' version '2.2.1' } @@ -13,6 +17,7 @@ plugins { ext { kafkaVersion = '2.8.1' solaceJavaAPIVersion = '10.12.1' + isSnapshot = project.version.endsWith('-SNAPSHOT') } repositories { @@ -186,6 +191,11 @@ project.compileJava { dependsOn generateJava } +java { + withJavadocJar() + withSourcesJar() +} + distributions { main { contents { @@ -202,3 +212,76 @@ distributions { } } } + +publishing { + publications { + maven(MavenPublication) { + from components.java + pom { + name = "Solace PubSub+ Connector for Kafka: Sink" + description = "The Solace/Kafka adapter consumes Kafka topic records and streams them to the PubSub+ Event Mesh as topic and/or queue data events." + url = "https://github.com/SolaceProducts/pubsubplus-connector-kafka-sink" + packaging = "jar" + licenses { + license { + name = "Apache License, Version 2.0" + url = "https://github.com/SolaceProducts/pubsubplus-connector-kafka-sink/blob/master/LICENSE" + distribution = "repo" + } + } + organization { + name = "Solace" + url = "https://www.solace.com" + } + developers { + developer { + name = "Support for Solace" + email = "support@solace.com" + organization = "Solace" + organizationUrl = "http://solace.community" + } + } + scm { + connection = "scm:git:git://github.com/SolaceProducts/pubsubplus-connector-kafka-sink.git" + developerConnection = "scm:git:git@github.com:SolaceProducts/pubsubplus-connector-kafka-sink.git" + url = "https://github.com/SolaceProducts/pubsubplus-connector-kafka-sink.git" + } + } + } + } +} + +nexusPublishing { + repositories { + internal { + nexusUrl = uri('http://apps-jenkins:9090/nexus/') + snapshotRepositoryUrl = uri('http://apps-jenkins:9090/nexus/content/repositories/snapshots') + // gets credentials from project.properties["internalUsername"] project.properties["internalPassword"] + } + sonatype { + nexusUrl = uri('https://oss.sonatype.org/service/local/') + snapshotRepositoryUrl = uri('https://oss.sonatype.org/content/repositories/snapshots') + // gets credentials from project.properties["sonatypeUsername"] project.properties["sonatypePassword"] + } + } +} + +signing { + required { + !isSnapshot + } + useGpgCmd() + sign publishing.publications.maven +} + +tasks.withType(Sign) { + onlyIf { + gradle.taskGraph.allTasks.any {task -> + task.name.startsWith("publish") && task.name.contains('Sonatype') + } + } +} + +tasks.withType(InitializeNexusStagingRepository).configureEach { + shouldRunAfter(tasks.withType(Sign)) +} \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 16cc23c..daf8797 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1 +1,2 @@ -version=2.1.0 \ No newline at end of file +group=com.solace.connector.kafka.connect +version=2.2.0 \ No newline at end of file diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java index 2f1c37a..01b10b2 100644 --- a/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolSessionHandler.java @@ -167,7 +167,6 @@ public void configureSession() { /** * Create and connect JCSMPSession - * @return * @throws JCSMPException */ public void connectSession() throws JCSMPException { diff --git a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java index 2a854ae..ab9d9ee 100644 --- a/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java +++ b/src/main/java/com/solace/connector/kafka/connect/sink/SolaceSinkSender.java @@ -208,7 +208,6 @@ public synchronized void commit() throws JCSMPException { /** * Shutdown TXProducer and Topic Producer. - * @return */ public void shutdown() { producerHandler.close(); From 17b0bac9eb691e015f318709508f0159df32535a Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Thu, 9 Dec 2021 12:39:43 -0500 Subject: [PATCH 08/12] add publish dependency on test and integrationTest --- build.gradle | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index 8676719..9326226 100644 --- a/build.gradle +++ b/build.gradle @@ -103,6 +103,7 @@ project.integrationTest { useJUnitPlatform() outputs.upToDateWhen { false } dependsOn prepDistForIntegrationTesting + shouldRunAfter test retry { maxRetries = 3 } @@ -280,8 +281,10 @@ tasks.withType(Sign) { task.name.startsWith("publish") && task.name.contains('Sonatype') } } + shouldRunAfter test, integrationTest } tasks.withType(InitializeNexusStagingRepository).configureEach { - shouldRunAfter(tasks.withType(Sign)) + dependsOn test, integrationTest + shouldRunAfter tasks.withType(Sign) } \ No newline at end of file From 3805cb781d876a27de1517219ba319c712ded71c Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Thu, 9 Dec 2021 17:30:40 -0500 Subject: [PATCH 09/12] publish directly to internal nexus releases repo --- build.gradle | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/build.gradle b/build.gradle index 9326226..5b14953 100644 --- a/build.gradle +++ b/build.gradle @@ -250,15 +250,22 @@ publishing { } } } + repositories { + maven { + def releasesUrl = uri('http://apps-jenkins:9090/nexus/content/repositories/releases') + def snapshotRepositoryUrl = uri('http://apps-jenkins:9090/nexus/content/repositories/snapshots') + url = isSnapshot ? snapshotRepositoryUrl : releasesUrl + name = 'internal' + credentials { + username = project.properties[name + "Username"] + password = project.properties[name + "Password"] + } + } + } } nexusPublishing { repositories { - internal { - nexusUrl = uri('http://apps-jenkins:9090/nexus/') - snapshotRepositoryUrl = uri('http://apps-jenkins:9090/nexus/content/repositories/snapshots') - // gets credentials from project.properties["internalUsername"] project.properties["internalPassword"] - } sonatype { nexusUrl = uri('https://oss.sonatype.org/service/local/') snapshotRepositoryUrl = uri('https://oss.sonatype.org/content/repositories/snapshots') @@ -287,4 +294,8 @@ tasks.withType(Sign) { tasks.withType(InitializeNexusStagingRepository).configureEach { dependsOn test, integrationTest shouldRunAfter tasks.withType(Sign) -} \ No newline at end of file +} + +tasks.withType(PublishToMavenRepository).configureEach { + dependsOn test, integrationTest +} From 7ebb5334bcd0da5e1e42ea53eea291e20cb6f3b2 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Fri, 10 Dec 2021 16:51:56 -0500 Subject: [PATCH 10/12] add deployer-pom.xml --- .gitignore | 2 +- deployer-pom.xml | 396 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 397 insertions(+), 1 deletion(-) create mode 100644 deployer-pom.xml diff --git a/.gitignore b/.gitignore index 4d2299d..0e26c64 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ /bin/ # Jave related -target/** +/target/ *.jar *.war *.ear diff --git a/deployer-pom.xml b/deployer-pom.xml new file mode 100644 index 0000000..2d1b8e4 --- /dev/null +++ b/deployer-pom.xml @@ -0,0 +1,396 @@ + + 4.0.0 + com.solace.deploy + solace-deployer + Solace Deployer + 1.0.0 + A pom for publishing release version to maven central + pom + + + ${project.build.directory}/localRepository + + + + + solace-internal + + true + + http://apps-jenkins:9090/nexus/content/groups/stable/ + + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + 3.0.0 + + + enforce-property + + enforce + + + + + release.groupId + ^com.solace*(?:systems)?.*? + release.groupId must be a valid group ID that starts with "com.solace". + + + release.artifactId + + + release.version + ^(?:\d+\.){2}\d+$ + release.version must be a non-SNAPSHOT semantic version. + + + local.staging.dir + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.2.0 + + + regex-property + + regex-property + + + release.install.groupPath + ${release.groupId} + \. + / + true + + + + + + org.apache.maven.plugins + maven-install-plugin + 2.5.2 + + true + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.2.0 + + + copy + deploy + + copy + + + ${project.build.directory}/tmpRepository + + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.md5 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha1 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha256 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha512 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + pom + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + pom.md5 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + pom.sha1 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + pom.sha256 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + pom.sha512 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar + javadoc + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.md5 + javadoc + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha1 + javadoc + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha256 + javadoc + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha512 + javadoc + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar + sources + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.md5 + sources + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha1 + sources + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha256 + sources + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + jar.sha512 + sources + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + module + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + module.md5 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + module.sha1 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + module.sha256 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + ${release.groupId} + ${release.artifactId} + ${release.version} + module.sha512 + true + + ${local.staging.dir}/${release.install.groupPath}/${release.artifactId}/${release.version}/ + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.7 + + true + + + + org.apache.maven.plugins + maven-source-plugin + 3.2.1 + + false + + + + + From 929f31d869c167f2282541afa329cd553aba5db5 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Wed, 15 Dec 2021 15:31:21 -0500 Subject: [PATCH 11/12] update README for externally creating custom record processors --- README.md | 59 ++++++++++++++++++++++++++++++++++++------------------- 1 file changed, 39 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index a4aadc3..ca279b0 100644 --- a/README.md +++ b/README.md @@ -337,16 +337,22 @@ Kerberos has some very specific requirements to operate correctly. Some addition JDK 8 or higher is required for this project. -First, clone this GitHub repo: -```shell -git clone https://github.com/SolaceProducts/pubsubplus-connector-kafka-sink.git -cd pubsubplus-connector-kafka-sink -``` - -Then run the build script: -```shell -./gradlew clean build -``` +1. First, clone this GitHub repo: + ```shell + git clone https://github.com/SolaceProducts/pubsubplus-connector-kafka-sink.git + cd pubsubplus-connector-kafka-sink + ``` +2. Install the test support module: + ```shell + git submodule update --init --recursive + cd solace-integration-test-support + ./mvnw clean install -DskipTests + cd .. + ``` +3. Then run the build script: + ```shell + ./gradlew clean build + ``` This script creates artifacts in the `build` directory, including the deployable packaged PubSub+ Sink Connector archives under `build\distributions`. @@ -354,27 +360,40 @@ This script creates artifacts in the `build` directory, including the deployable An integration test suite is also included, which spins up a Docker-based deployment environment that includes a PubSub+ event broker, Zookeeper, Kafka broker, Kafka Connect. It deploys the connector to Kafka Connect and runs end-to-end tests. -1. Install the test support module: - ```shell - git submodule update --init --recursive - cd solace-integration-test-support - ./mvnw clean install -DskipTests - cd .. - ``` -2. Run the tests: +1. Run the tests: ```shell ./gradlew clean test integrationTest ``` ### Build a New Record Processor -The processing of a Kafka record to create a PubSub+ message is handled by an interface defined in [`SolRecordProcessorIF.java`](/src/main/java/com/solace/connector/kafka/connect/sink/SolRecordProcessorIF.java). This is a simple interface that creates the Kafka source records from the PubSub+ messages. This project includes three examples of classes that implement this interface: +The processing of a Kafka record to create a PubSub+ message is handled by [`SolRecordProcessorIF`](/src/main/java/com/solace/connector/kafka/connect/sink/SolRecordProcessorIF.java). This is a simple interface that creates the Kafka source records from the PubSub+ messages. + +To get started, import the following dependency into your project: + +**Maven** +```xml + + com.solace.connector.kafka.connect + pubsubplus-connector-kafka-sink + 2.2.0 + +``` + +**Gradle** +```groovy +compile "com.solace.connector.kafka.connect:pubsubplus-connector-kafka-sink:2.2.0" +``` + +Now you can implement your custom `SolRecordProcessorIF`. + +For reference, this project includes three examples which you can use as starting points for implementing your own custom record processors: * [SolSimpleRecordProcessor](/src/main/java/com/solace/connector/kafka/connect/sink/recordprocessor/SolSimpleRecordProcessor.java) * [SolSimpleKeyedRecordProcessor](/src/main/java/com/solace/connector/kafka/connect/sink/recordprocessor/SolSimpleKeyedRecordProcessor.java) * [SolDynamicDestinationRecordProcessor](/src/main/java/com/solace/connector/kafka/connect/sink/recordprocessor/SolDynamicDestinationRecordProcessor.java) -You can use these examples as starting points for implementing your own custom record processors. +Once you've built the jar file for your custom record processor project, place it into the same directory as this connector, and update the connector's `sol.record_processor_class` config to point to the class of your new record processor. More information on Kafka sink connector development can be found here: - [Apache Kafka Connect](https://kafka.apache.org/documentation/) From eb32120a831b5acc5b84af028d154a0ee15b8eb3 Mon Sep 17 00:00:00 2001 From: Jeffrey Douangpaseuth <11084623+Nephery@users.noreply.github.com> Date: Wed, 15 Dec 2021 23:47:16 -0500 Subject: [PATCH 12/12] fix CVE-2021-45046 in tests --- build.gradle | 6 +++--- solace-integration-test-support | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index 5b14953..0564ee8 100644 --- a/build.gradle +++ b/build.gradle @@ -45,9 +45,9 @@ dependencies { integrationTestImplementation 'org.testcontainers:junit-jupiter:1.16.0' integrationTestImplementation 'org.testcontainers:kafka:1.16.0' integrationTestImplementation 'org.testcontainers:toxiproxy:1.16.0' - integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.7.0' + integrationTestImplementation 'com.solace.test.integration:pubsubplus-junit-jupiter:0.7.1' integrationTestImplementation 'org.slf4j:slf4j-api:1.7.32' - integrationTestImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' + integrationTestImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.16.0' integrationTestImplementation 'org.apache.commons:commons-configuration2:2.6' integrationTestImplementation 'commons-beanutils:commons-beanutils:1.9.4' integrationTestImplementation 'com.google.code.gson:gson:2.3.1' @@ -57,7 +57,7 @@ dependencies { testImplementation 'org.junit.jupiter:junit-jupiter:5.8.1' testImplementation 'org.mockito:mockito-junit-jupiter:3.12.4' testImplementation 'org.hamcrest:hamcrest-all:1.3' - testImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.14.1' + testImplementation 'org.apache.logging.log4j:log4j-slf4j-impl:2.16.0' compile "org.apache.kafka:connect-api:$kafkaVersion" compile "com.solacesystems:sol-jcsmp:$solaceJavaAPIVersion" } diff --git a/solace-integration-test-support b/solace-integration-test-support index 84ae6ee..2d9cc15 160000 --- a/solace-integration-test-support +++ b/solace-integration-test-support @@ -1 +1 @@ -Subproject commit 84ae6eee5e1594d75b88b3be8b722205cda3318e +Subproject commit 2d9cc15cada6624218ff37e682027daba1627098