Skip to content

Commit

Permalink
feat: int test and build (#1)
Browse files Browse the repository at this point in the history
  • Loading branch information
Anush008 authored Jun 25, 2024
1 parent 2e8d5e3 commit 8e1d392
Show file tree
Hide file tree
Showing 6 changed files with 403 additions and 66 deletions.
34 changes: 34 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: Test

on:
pull_request:
types:
- opened
- edited
- synchronize
- reopened

permissions:
contents: write
checks: write

jobs:
build:
name: Build
runs-on: ubuntu-latest

steps:
- name: Checkout
uses: actions/checkout@v4

- name: Set up JDK 17
uses: actions/setup-java@v3
with:
java-version: '17'
distribution: 'temurin'

- name: Build And Test
uses: gradle/gradle-build-action@v2
with:
gradle-version: 8.5
arguments: build
175 changes: 113 additions & 62 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,98 +2,149 @@ import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;

plugins {
id 'java'
id 'java-library'
id 'maven-publish'
id 'java'
id 'java-library'
id 'maven-publish'

id 'com.github.johnrengelman.shadow' version '8.1.1'
id "com.diffplug.spotless" version "6.22.0"
id 'com.github.johnrengelman.shadow' version '8.1.1'
}

group = 'io.qdrant'
version = '1.0.0'
description = 'qdrant-kafka'
description = 'Kafka Sink Connector for Qdrant.'
java.sourceCompatibility = JavaVersion.VERSION_1_8
java.targetCompatibility = JavaVersion.VERSION_1_8


repositories {
mavenLocal()
mavenCentral()
maven {
url = uri('https://packages.confluent.io/maven/')
}
mavenLocal()
mavenCentral()
maven {
url = uri('https://packages.confluent.io/maven/')
}
}

dependencies {
implementation 'org.apache.kafka:connect-api:3.7.0'

implementation 'io.qdrant:client:1.9.1'
implementation 'io.grpc:grpc-protobuf:1.59.0'
implementation 'com.google.guava:guava:33.2.1-jre'
sourceSets {
intTest {
compileClasspath += sourceSets.main.output
runtimeClasspath += sourceSets.main.output
}
}

implementation 'com.fasterxml.jackson.core:jackson-databind:2.17.1'
implementation 'com.google.protobuf:protobuf-java-util:3.25.3'
implementation 'org.slf4j:slf4j-api:2.0.13'
configurations {
intTestImplementation.extendsFrom implementation
intTestImplementation.extendsFrom testImplementation
intTestRuntimeOnly.extendsFrom runtimeOnly
intTestRuntimeOnly.extendsFrom testRuntimeOnly
}

testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.2'
testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.10.2'
def kafkaVersion = '3.5.0'

dependencies {
implementation "org.apache.kafka:connect-api:$kafkaVersion"
implementation 'io.qdrant:client:1.9.1'
implementation 'io.grpc:grpc-protobuf:1.59.0'
implementation 'com.google.guava:guava:33.2.1-jre'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.17.1'
implementation 'com.google.protobuf:protobuf-java-util:3.25.3'
implementation 'org.slf4j:slf4j-api:2.0.13'

testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.2'
testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.10.2'

intTestImplementation 'org.testcontainers:junit-jupiter:1.19.6'
intTestImplementation 'org.testcontainers:qdrant:1.19.6'
intTestImplementation "org.apache.kafka:kafka_2.13:$kafkaVersion"
intTestImplementation "org.apache.kafka:kafka_2.13:$kafkaVersion:test"
intTestImplementation "org.apache.kafka:kafka-clients:$kafkaVersion"
intTestImplementation "org.apache.kafka:connect-runtime:$kafkaVersion"
intTestImplementation "org.apache.kafka:kafka_2.13:$kafkaVersion"
intTestImplementation "org.apache.kafka:kafka_2.13:$kafkaVersion:test"
intTestImplementation "org.apache.kafka:kafka-clients:$kafkaVersion:test"
intTestImplementation "org.apache.kafka:connect-runtime:$kafkaVersion:test"
}


java {
withSourcesJar()
withJavadocJar()
withSourcesJar()
withJavadocJar()
}

test {
useJUnitPlatform()
}

spotless {
java {
importOrder()
removeUnusedImports()
cleanthat()
googleJavaFormat()
formatAnnotations()
}
}

shadowJar {
minimize()
mergeServiceFiles()
archiveClassifier.set('confluent')
mergeServiceFiles()
archiveClassifier.set('')
}

ext.releaseDate = DateTimeFormatter.ISO_LOCAL_DATE.format(LocalDateTime.now())
def archiveFilename = 'qdrant-kafka'

task prepareConfluentArchive(type: Copy) {
group = 'Confluent'
dependsOn 'shadowJar'

def baseDir = "$archiveFilename-${project.version}"
from('archive/manifest.json') {
expand project.properties
destinationDir = file "$buildDir/confluentArchive/$baseDir"
}

from('archive/assets') {
into 'assets'
}

from('archive/etc') {
include 'qdrant-kafka.properties'
into 'etc'
}

from("$buildDir/libs") {
include "${project.name}-${project.version}-confluent.jar"
into 'lib'
}

from('.') {
include 'README.md', 'LICENSE'
into 'doc'
}
group = 'Confluent'
dependsOn 'shadowJar'

def baseDir = "$archiveFilename-${project.version}"
from('archive/manifest.json') {
expand project.properties
destinationDir = file "$buildDir/confluentArchive/$baseDir"
}

from('archive/assets') {
into 'assets'
}

from('archive/etc') {
include 'qdrant-kafka.properties'
into 'etc'
}

from("$buildDir/libs") {
include "${project.name}-${project.version}.jar"
into 'lib'
}

from('.') {
include 'README.md', 'LICENSE'
into 'doc'
}
}

task createConfluentArchive(type: Zip) {
group = 'Confluent'
dependsOn prepareConfluentArchive
from "$buildDir/confluentArchive"
archiveBaseName.set('')
archiveAppendix.set(archiveFilename)
archiveVersion.set(project.version.toString())
destinationDirectory.set(file("$buildDir/confluent"))
}
group = 'Confluent'
dependsOn prepareConfluentArchive
from "$buildDir/confluentArchive"
archiveBaseName.set('')
archiveAppendix.set(archiveFilename)
archiveVersion.set(project.version.toString())
destinationDirectory.set(file("$buildDir/confluent"))
}

tasks.register('integrationTest', Test) {
description = 'Runs integration tests.'
group = 'verification'

testClassesDirs = sourceSets.intTest.output.classesDirs
classpath = sourceSets.intTest.runtimeClasspath
shouldRunAfter test

useJUnitPlatform()

testLogging {
events "passed"
}
}

check.dependsOn integrationTest
143 changes: 143 additions & 0 deletions src/intTest/java/io/qdrant/kafka/BaseKafkaConnectTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
/* (C)2024 */
package io.qdrant.kafka;

import static org.apache.kafka.connect.json.JsonConverterConfig.SCHEMAS_ENABLE_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.CONNECTOR_CLASS_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.KEY_CONVERTER_CLASS_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.TASKS_MAX_CONFIG;
import static org.apache.kafka.connect.runtime.ConnectorConfig.VALUE_CONVERTER_CLASS_CONFIG;
import static org.apache.kafka.connect.runtime.SinkConnectorConfig.TOPICS_CONFIG;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.clients.admin.Admin;
import org.apache.kafka.clients.admin.DeleteTopicsResult;
import org.apache.kafka.connect.json.JsonConverter;
import org.apache.kafka.connect.runtime.AbstractStatus;
import org.apache.kafka.connect.runtime.rest.entities.ConnectorStateInfo;
import org.apache.kafka.connect.util.clusters.EmbeddedConnectCluster;
import org.apache.kafka.test.TestUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class BaseKafkaConnectTest extends BaseQdrantTest {

static final Logger LOGGER = LoggerFactory.getLogger(BaseKafkaConnectTest.class);

EmbeddedConnectCluster connect;

final String topicName;

final String connectorName;

protected BaseKafkaConnectTest(final String topicName, final String connectorName) {
this.topicName = topicName;
this.connectorName = connectorName;
}

@BeforeEach
void startConnect() {
connect = new EmbeddedConnectCluster.Builder().name("qdrant-it-connect-cluster").build();
connect.start();
connect.kafka().createTopic(topicName);
}

@AfterEach
void stopConnect() {
try (final Admin admin = connect.kafka().createAdminClient()) {
final DeleteTopicsResult result = admin.deleteTopics(Arrays.asList(topicName));
result.all().get();
} catch (final ExecutionException | InterruptedException e) {
throw new RuntimeException(e);
}
connect.stop();
}

long waitForConnectorToStart(final String name, final int numTasks) throws InterruptedException {
TestUtils.waitForCondition(
() -> assertConnectorAndTasksRunning(name, numTasks).orElse(false),
TimeUnit.MINUTES.toMillis(60),
"Connector tasks did not start in time.");
return System.currentTimeMillis();
}

Optional<Boolean> assertConnectorAndTasksRunning(final String connectorName, final int numTasks) {
try {
final ConnectorStateInfo info = connect.connectorStatus(connectorName);
final boolean result =
info != null
&& info.tasks().size() >= numTasks
&& info.connector().state().equals(AbstractStatus.State.RUNNING.toString())
&& info.tasks().stream()
.allMatch(s -> s.state().equals(AbstractStatus.State.RUNNING.toString()));
return Optional.of(result);
} catch (final Exception e) {
LOGGER.error("Could not check connector state info.");
return Optional.empty();
}
}

Map<String, String> connectorProperties() {
final Map<String, String> props = new HashMap<>(getDefaultProperties());
props.put(CONNECTOR_CLASS_CONFIG, QdrantSinkConnector.class.getName());
props.put(TOPICS_CONFIG, topicName);
props.put(TASKS_MAX_CONFIG, Integer.toString(1));
props.put(KEY_CONVERTER_CLASS_CONFIG, JsonConverter.class.getName());
props.put(VALUE_CONVERTER_CLASS_CONFIG, JsonConverter.class.getName());
props.put("value.converter." + SCHEMAS_ENABLE_CONFIG, "false");
props.put("key.converter." + SCHEMAS_ENABLE_CONFIG, "false");
return props;
}

void writeUnnamedPoint(String collectionName, int id) {
connect
.kafka()
.produce(
topicName,
String.format(
"{\n"
+ //
" \"collection_name\": \"%s\",\n"
+ //
" \"id\": %d,\n"
+ //
" \"vector\": [\n"
+ //
" 0.1,\n"
+ //
" 0.2,\n"
+ //
" 0.3,\n"
+ //
" 0.4,\n"
+ //
" 0.5,\n"
+ //
" 0.6,\n"
+ //
" 0.7,\n"
+ //
" 0.8\n"
+ //
" ],\n"
+ //
" \"payload\": {\n"
+ //
" \"name\": \"kafka\",\n"
+ //
" \"description\": \"Kafka is a distributed streaming platform for all\",\n"
+ //
" \"url\": \"https://kafka.apache.com/\"\n"
+ //
" }\n"
+ //
"}",
collectionName, id));
}
}
Loading

0 comments on commit 8e1d392

Please sign in to comment.