Skip to content

Commit

Permalink
Merge branch 'master' into MODINVSTOR-1243
Browse files Browse the repository at this point in the history
  • Loading branch information
SerhiiNosko authored Aug 28, 2024
2 parents 6ae8a4d + af5b69d commit 53e369a
Show file tree
Hide file tree
Showing 6 changed files with 53 additions and 43 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,29 @@
package org.folio.inventory.dataimport.consumers;

import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static java.nio.charset.StandardCharsets.UTF_8;
import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith;
import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig;
import static org.folio.ActionProfile.Action.CREATE;
import static org.folio.DataImportEventTypes.DI_COMPLETED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_ENV;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_HOST;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_MAX_REQUEST_SIZE;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_PORT;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_REPLICATION_FACTOR;
import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace;
import static org.folio.rest.jaxrs.model.EntityType.INSTANCE;
import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.when;

import com.github.tomakehurst.wiremock.client.WireMock;
import com.github.tomakehurst.wiremock.common.Slf4jNotifier;
import com.github.tomakehurst.wiremock.core.WireMockConfiguration;
Expand All @@ -13,6 +37,13 @@
import io.vertx.ext.unit.Async;
import io.vertx.ext.unit.TestContext;
import io.vertx.ext.unit.junit.VertxUnitRunner;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import net.mguenther.kafka.junit.EmbeddedKafkaCluster;
import net.mguenther.kafka.junit.KeyValue;
import net.mguenther.kafka.junit.ObserveKeyValues;
Expand All @@ -37,38 +68,6 @@
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;

import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;

import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static java.nio.charset.StandardCharsets.UTF_8;
import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith;
import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig;
import static org.folio.ActionProfile.Action.CREATE;
import static org.folio.DataImportEventTypes.DI_COMPLETED;
import static org.folio.DataImportEventTypes.DI_INCOMING_MARC_BIB_RECORD_PARSED;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_ENV;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_HOST;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_MAX_REQUEST_SIZE;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_PORT;
import static org.folio.inventory.dataimport.util.KafkaConfigConstants.KAFKA_REPLICATION_FACTOR;
import static org.folio.kafka.KafkaTopicNameHelper.getDefaultNameSpace;
import static org.folio.rest.jaxrs.model.EntityType.INSTANCE;
import static org.folio.rest.jaxrs.model.EntityType.MARC_BIBLIOGRAPHIC;
import static org.folio.rest.jaxrs.model.ProfileType.ACTION_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileType.JOB_PROFILE;
import static org.folio.rest.jaxrs.model.ProfileType.MAPPING_PROFILE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.when;

@RunWith(VertxUnitRunner.class)
public class DataImportConsumerVerticleTest {

Expand Down Expand Up @@ -127,6 +126,7 @@ public class DataImportConsumerVerticleTest {
@BeforeClass
public static void setUpClass(TestContext context) {
Async async = context.async();
vertx = Vertx.vertx();
cluster = provisionWith(defaultClusterConfig());
cluster.start();
String[] hostAndPort = cluster.getBrokerList().split(":");
Expand All @@ -136,7 +136,6 @@ public static void setUpClass(TestContext context) {
.kafkaPort(hostAndPort[1])
.build();

vertx = Vertx.vertx();
EventManager.registerKafkaEventPublisher(kafkaConfig, vertx, 1);

DeploymentOptions options = new DeploymentOptions()
Expand Down Expand Up @@ -202,8 +201,12 @@ public void shouldSendEventWithProcessedEventPayloadWhenProcessingCoreHandlerSuc
public static void tearDownClass(TestContext context) {
Async async = context.async();
vertx.close(ar -> {
cluster.stop();
async.complete();
if (ar.succeeded()) {
cluster.stop();
async.complete();
} else {
context.fail(ar.cause());
}
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
@RunWith(VertxUnitRunner.class)
public class MarcHridSetConsumerVerticleTest {

private static final String TENANT_ID = "diku";
private static final String KAFKA_ENV_NAME = "test-env";
private static Vertx vertx = Vertx.vertx();

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.folio.inventory.dataimport.consumers;
package org.folio.inventory.dataimport.handlers;

import com.github.tomakehurst.wiremock.client.WireMock;
import com.github.tomakehurst.wiremock.common.Slf4jNotifier;
Expand All @@ -24,6 +24,7 @@
import org.folio.inventory.consortium.cache.ConsortiumDataCache;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.cache.ProfileSnapshotCache;
import org.folio.inventory.dataimport.consumers.DataImportKafkaHandler;
import org.folio.inventory.storage.Storage;
import org.folio.kafka.KafkaConfig;
import org.folio.processing.events.EventManager;
Expand Down Expand Up @@ -135,11 +136,15 @@ public static void setUpClass() {
}

@AfterClass
public static void afterClass(TestContext context) {
public static void tearDownClass(TestContext context) {
Async async = context.async();
vertx.close(ar -> {
cluster.stop();
async.complete();
if (ar.succeeded()) {
cluster.stop();
async.complete();
} else {
context.fail(ar.cause());
}
});
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.folio.inventory.dataimport.consumers;
package org.folio.inventory.dataimport.handlers;

import io.vertx.core.Future;
import io.vertx.core.json.Json;
Expand All @@ -13,6 +13,7 @@
import org.folio.inventory.common.domain.Failure;
import org.folio.inventory.common.domain.Success;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.consumers.MarcBibInstanceHridSetKafkaHandler;
import org.folio.inventory.dataimport.handlers.actions.InstanceUpdateDelegate;
import org.folio.inventory.domain.instances.Instance;
import org.folio.inventory.domain.instances.InstanceCollection;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.folio.inventory.dataimport.consumers;
package org.folio.inventory.dataimport.handlers;

import static net.mguenther.kafka.junit.EmbeddedKafkaCluster.provisionWith;
import static net.mguenther.kafka.junit.EmbeddedKafkaClusterConfig.defaultClusterConfig;
Expand Down Expand Up @@ -37,6 +37,7 @@
import org.folio.inventory.common.Context;
import org.folio.inventory.common.domain.Success;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.consumers.MarcBibUpdateKafkaHandler;
import org.folio.inventory.dataimport.handlers.actions.InstanceUpdateDelegate;
import org.folio.inventory.domain.instances.Instance;
import org.folio.inventory.domain.instances.InstanceCollection;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package org.folio.inventory.dataimport.consumers;
package org.folio.inventory.dataimport.handlers;

import static com.github.tomakehurst.wiremock.client.WireMock.get;
import static org.folio.inventory.dataimport.consumers.MarcHoldingsRecordHridSetKafkaHandler.JOB_EXECUTION_ID_KEY;
Expand Down Expand Up @@ -36,6 +36,7 @@
import org.folio.HoldingsType;
import org.folio.inventory.common.domain.Failure;
import org.folio.inventory.dataimport.cache.MappingMetadataCache;
import org.folio.inventory.dataimport.consumers.MarcHoldingsRecordHridSetKafkaHandler;
import org.folio.inventory.domain.HoldingsRecordsSourceCollection;
import org.folio.inventory.services.HoldingsCollectionService;
import org.folio.processing.mapping.defaultmapper.processor.parameters.MappingParameters;
Expand Down

0 comments on commit 53e369a

Please sign in to comment.