diff --git a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task241007CreateUniqueFieldsTable.java b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task241007CreateUniqueFieldsTable.java new file mode 100644 index 000000000000..78ffb94785ec --- /dev/null +++ b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task241007CreateUniqueFieldsTable.java @@ -0,0 +1,226 @@ +package com.dotmarketing.startup.runonce; + +import com.dotcms.content.elasticsearch.business.ESContentletAPIImpl; +import com.dotcms.contenttype.model.field.Field; +import com.dotcms.notifications.bean.NotificationType; +import com.dotcms.util.JsonUtil; +import com.dotmarketing.business.APILocator; +import com.dotmarketing.common.db.DotConnect; +import com.dotmarketing.common.db.DotDatabaseMetaData; +import com.dotmarketing.common.db.Params; +import com.dotmarketing.db.DbConnectionFactory; +import com.dotmarketing.exception.DotDataException; +import com.dotmarketing.exception.DotRuntimeException; +import com.dotmarketing.startup.StartupTask; +import com.dotmarketing.util.Logger; +import com.dotmarketing.util.StringUtils; +import com.dotmarketing.util.UtilMethods; +import com.liferay.util.StringPool; +import io.vavr.control.Try; +import org.jetbrains.annotations.NotNull; +import org.postgresql.util.PGobject; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.sql.Array; +import java.sql.SQLException; +import java.util.*; +import java.util.stream.Collectors; + +import static com.dotcms.util.CollectionsUtils.list; + +/** + * Task Upgrade to Create/populate the unique_fields Table for Unique Field Validation + * + * This introduces a new table, unique_fields, which will be used to validate fields that must be unique. + * + * Table Structure + * + * + * CREATE TABLE unique_fields ( + * unique_key_val VARCHAR(64) PRIMARY KEY, + * supporting_values JSONB + * ); + * + * + * - unique_key_val: This field will store a hash created from a combination of the following: + * + * Content type ID + * Field variable name + * Field value + * Language + * Host ID (if the uniquePerSite option is enabled) + * + * - supporting_values: This field contains a JSON object with the following format: + * + * + * { + * "contentTypeID": "", + * "fieldVariableName": "", + * "fieldValue": "", + * "languageId": "", + * "hostId": "", + * "uniquePerSite": true|false, + * "contentletsId": [...] + * } + * + * + * The contentletsId array holds the IDs of contentlets with the same field value that existed before the database was upgraded. + * After the upgrade, no more contentlets with duplicate values will be allowed. + * + * Additional Details + * + *- The Host ID is included in the hash calculation only if the uniquePerSite field variable is enabled. + *- The unique_key_val field ensures that only truly unique values can be inserted moving forward. + *- This upgrade task also populates the unique_fields table with the existing unique field values from the current database. + */ +public class Task241007CreateUniqueFieldsTable implements StartupTask { + + private final static String CREATE_TABLE_QUERY = "CREATE TABLE IF NOT EXISTS unique_fields (" + + "unique_key_val VARCHAR(64) PRIMARY KEY," + + "supporting_values JSONB" + + " )"; + + private static final String RETRIVE_UNIQUE_FIELD_VALUES_QUERY = "SELECT structure.inode AS content_type_id," + + " field.velocity_var_name AS field_var_name," + + " contentlet.language_id AS language_id," + + " identifier.host_inode AS host_id," + + " jsonb_extract_path_text(contentlet_as_json -> 'fields', field.velocity_var_name)::jsonb ->> 'value' AS field_value," + + " ARRAY_AGG(contentlet.identifier) AS contentlet_identifier" + + " FROM contentlet" + + " INNER JOIN structure ON structure.inode = contentlet.structure_inode" + + " INNER JOIN field ON structure.inode = field.structure_inode" + + " INNER JOIN identifier ON contentlet.identifier = identifier.id" + + " WHERE jsonb_extract_path_text(contentlet_as_json->'fields', field.velocity_var_name) IS NOT NULL AND " + + " field.unique_ = true " + + " GROUP BY structure.inode," + + " field.velocity_var_name ," + + " contentlet.language_id," + + " identifier.host_inode," + + " jsonb_extract_path_text(contentlet_as_json -> 'fields', field.velocity_var_name)::jsonb ->> 'value'"; + private static final String INSERT_UNIQUE_FIELDS_QUERY = "INSERT INTO unique_fields(unique_key_val, supporting_values) VALUES(?, ?)"; + + @Override + public boolean forceRun() { + try { + final DotDatabaseMetaData databaseMetaData = new DotDatabaseMetaData(); + return !databaseMetaData.tableExists(DbConnectionFactory.getConnection(), "unique_fields"); + } catch (SQLException e) { + Logger.error(this, e.getMessage(),e); + return false; + } + } + + @Override + public void executeUpgrade() throws DotDataException, DotRuntimeException { + + if (forceRun()) { + createUniqueFieldTable(); + + try { + populate(); + } catch (SQLException e) { + throw new DotDataException(e); + } + } + } + + /** + * Populate the unique_fields table with the Unique Fields values + * + * @throws DotDataException + * @throws SQLException + */ + private void populate() throws DotDataException, SQLException { + final List> uniqueFieldsValues = retrieveUniqueFieldsValues(); + + final List params = new ArrayList<>(); + + for (final Map uniqueFieldsValue : uniqueFieldsValues) { + + final String hash = calculateHash(uniqueFieldsValue); + final List contentlets = Arrays.stream(((String[]) ((Array) uniqueFieldsValue.get("contentlet_identifier")) + .getArray())).collect(Collectors.toList()); + + final Map supportingValues = Map.of( + "contentTypeID", uniqueFieldsValue.get("content_type_id"), + "fieldVariableName", uniqueFieldsValue.get("field_var_name"), + "fieldValue", uniqueFieldsValue.get("field_value"), + "languageId", Long.parseLong(uniqueFieldsValue.get("language_id").toString()), + "hostId", uniqueFieldsValue.get("host_id"), + "uniquePerSite", false, + "contentletsId", contentlets + ); + + Params notificationParams = new Params.Builder().add(hash, getJSONObject(supportingValues)).build(); + params.add(notificationParams); + } + + try { + insertUniqueFieldsRegister(params); + } catch (DotDataException e) { + throw new DotRuntimeException(e); + } + } + + @NotNull + private static PGobject getJSONObject(Map supportingValues) { + final PGobject supportingValuesParam = new PGobject(); + supportingValuesParam.setType("json"); + Try.run(() -> supportingValuesParam.setValue(JsonUtil.getJsonAsString(supportingValues))).getOrElseThrow( + () -> new IllegalArgumentException("Invalid JSON")); + return supportingValuesParam; + } + + /** + * Inset a new register in the unique_field table. + * + * @param listOfParams + * @throws DotDataException + */ + private void insertUniqueFieldsRegister(final Collection listOfParams) throws DotDataException { + + new DotConnect().executeBatch(INSERT_UNIQUE_FIELDS_QUERY, listOfParams); + } + + /** + * Calculate hash use as value for the 'unique_key_val' unique_fields table field. + * @param uniqueFieldsValue + * @return + * @throws DotDataException + */ + private static String calculateHash(final Map uniqueFieldsValue) throws DotDataException { + final String contentTypeId = uniqueFieldsValue.get("content_type_id").toString(); + final String fieldVariableName = uniqueFieldsValue.get("field_var_name").toString(); + + final Field uniqueField = APILocator.getContentTypeFieldAPI().byContentTypeIdAndVar(contentTypeId, fieldVariableName); + + final boolean uniqueForSite = uniqueField.fieldVariableValue(ESContentletAPIImpl.UNIQUE_PER_SITE_FIELD_VARIABLE_NAME) + .map(Boolean::valueOf).orElse(false); + + final String valueToHash_1 = contentTypeId + fieldVariableName + + uniqueFieldsValue.get("language_id").toString() + + uniqueFieldsValue.get("field_value").toString() + + (uniqueForSite ? uniqueFieldsValue.get("host_id").toString() : StringPool.BLANK); + + return StringUtils.hashText(valueToHash_1); + } + + /** + * Create the unique_fields table + * @throws DotDataException + */ + private static void createUniqueFieldTable() throws DotDataException { + new DotConnect().setSQL(CREATE_TABLE_QUERY).loadObjectResults(); + } + + /** + * retrive the Unique Field value this data is later used to populate the unique_fields table + * + * @return + * @throws DotDataException + */ + private static List> retrieveUniqueFieldsValues() throws DotDataException { + return new DotConnect().setSQL(RETRIVE_UNIQUE_FIELD_VALUES_QUERY).loadObjectResults(); + } +} diff --git a/dotcms-integration/src/test/java/com/dotcms/MainSuite2b.java b/dotcms-integration/src/test/java/com/dotcms/MainSuite2b.java index c32833b30070..8af95fceeb2c 100644 --- a/dotcms-integration/src/test/java/com/dotcms/MainSuite2b.java +++ b/dotcms-integration/src/test/java/com/dotcms/MainSuite2b.java @@ -138,6 +138,7 @@ @RunWith(MainBaseSuite.class) @SuiteClasses({ + Task241007CreateUniqueFieldsTableTest.class, Task220825CreateVariantFieldTest.class, Task221007AddVariantIntoPrimaryKeyTest.class, com.dotcms.rest.api.v1.template.TemplateResourceTest.class, diff --git a/dotcms-integration/src/test/java/com/dotmarketing/startup/runonce/Task241007CreateUniqueFieldsTableTest.java b/dotcms-integration/src/test/java/com/dotmarketing/startup/runonce/Task241007CreateUniqueFieldsTableTest.java new file mode 100644 index 000000000000..5d08e8aed65b --- /dev/null +++ b/dotcms-integration/src/test/java/com/dotmarketing/startup/runonce/Task241007CreateUniqueFieldsTableTest.java @@ -0,0 +1,350 @@ +package com.dotmarketing.startup.runonce; + +import com.dotcms.contenttype.model.field.Field; +import com.dotcms.contenttype.model.field.ImmutableTextField; +import com.dotcms.contenttype.model.field.TextField; +import com.dotcms.contenttype.model.type.ContentType; +import com.dotcms.datagen.ContentTypeDataGen; +import com.dotcms.datagen.ContentletDataGen; +import com.dotcms.datagen.FieldDataGen; +import com.dotcms.datagen.FieldVariableDataGen; +import com.dotcms.util.IntegrationTestInitService; +import com.dotcms.util.JsonUtil; +import com.dotmarketing.business.APILocator; +import com.dotmarketing.common.db.DotConnect; +import com.dotmarketing.exception.DotDataException; +import com.dotmarketing.exception.DotSecurityException; +import com.dotmarketing.portlets.contentlet.model.Contentlet; +import com.dotmarketing.util.StringUtils; +import graphql.AssertException; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.io.IOException; +import java.math.BigInteger; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import static com.dotcms.content.elasticsearch.business.ESContentletAPIImpl.UNIQUE_PER_SITE_FIELD_VARIABLE_NAME; +import static org.junit.Assert.*; + +/** + * Test of {@link Task241007CreateUniqueFieldsTable} + */ +public class Task241007CreateUniqueFieldsTableTest { + + @BeforeClass + public static void prepare() throws Exception { + IntegrationTestInitService.getInstance().init(); + } + + @Before + public void cleaningUp() throws DotDataException { + new DotConnect().setSQL("DROP TABLE IF EXISTS unique_fields CASCADE").loadObjectResults(); + } + + /** + * Method to test: {@link Task241007CreateUniqueFieldsTable#forceRun()} + * When the table did not exist + * Should: return true + */ + @Test + public void runForce(){ + final Task241007CreateUniqueFieldsTable task241007CreateUniqueFieldsTable = new Task241007CreateUniqueFieldsTable(); + + assertTrue(task241007CreateUniqueFieldsTable.forceRun()); + } + + /** + * Method to test: {@link Task241007CreateUniqueFieldsTable#executeUpgrade()} + * When: the table did not exist and run the method + * Should: create the table and the forceRUn method must return false + */ + @Test + public void createTable() throws DotDataException { + + final Task241007CreateUniqueFieldsTable task241007CreateUniqueFieldsTable = new Task241007CreateUniqueFieldsTable(); + + assertTrue(task241007CreateUniqueFieldsTable.forceRun()); + + task241007CreateUniqueFieldsTable.executeUpgrade(); + + assertFalse(task241007CreateUniqueFieldsTable.forceRun()); + } + + /** + * Method to test: {@link Task241007CreateUniqueFieldsTable#executeUpgrade()} + * When: Run the method twice + * Should: not thrown any Exception + */ + @Test + public void runTwice() throws DotDataException { + final Task241007CreateUniqueFieldsTable task241007CreateUniqueFieldsTable = new Task241007CreateUniqueFieldsTable(); + + assertTrue(task241007CreateUniqueFieldsTable.forceRun()); + + task241007CreateUniqueFieldsTable.executeUpgrade(); + assertFalse(task241007CreateUniqueFieldsTable.forceRun()); + + task241007CreateUniqueFieldsTable.executeUpgrade(); + assertFalse(task241007CreateUniqueFieldsTable.forceRun()); + } + + /** + * Method to test: {@link Task241007CreateUniqueFieldsTable#executeUpgrade()} + * When: Run the method and already exists Contentlet with Unique field + * Should: populate the table with these values + */ + @Test + public void populate() throws DotDataException, NoSuchAlgorithmException, IOException { + final Field titleField = new FieldDataGen().type(TextField.class).name("title").next(); + final Field uniqueField = new FieldDataGen().type(TextField.class).name("unique").unique(true).next(); + + final ContentType contentType = new ContentTypeDataGen().field(titleField).field(uniqueField).nextPersisted(); + + final Contentlet contentlet_1 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_1_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), "Unique_1_" + System.currentTimeMillis()) + .nextPersisted(); + + final Contentlet contentlet_2 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_2_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), "Unique_2_" + System.currentTimeMillis()) + .nextPersisted(); + + final Task241007CreateUniqueFieldsTable task241007CreateUniqueFieldsTable = new Task241007CreateUniqueFieldsTable(); + + assertTrue(task241007CreateUniqueFieldsTable.forceRun()); + task241007CreateUniqueFieldsTable.executeUpgrade(); + assertFalse(task241007CreateUniqueFieldsTable.forceRun()); + + final List> results = new DotConnect().setSQL("SELECT * from unique_fields").loadObjectResults(); + + assertFalse(results.isEmpty()); + + final String valueToHash_1 = getHash(contentType, uniqueField, contentlet_1); + final String valueToHash_2 = getHash(contentType, uniqueField, contentlet_2); + + final Map result_1 = results.stream() + .filter(result -> result.get("unique_key_val").equals(valueToHash_1)) + .limit(1) + .findFirst() + .orElseThrow(() -> new AssertException("contenlet_1 expected")); + + final Map result_2 = results.stream() + .filter(result -> result.get("unique_key_val").equals(valueToHash_2)) + .limit(1) + .findFirst() + .orElseThrow(() -> new AssertException("contenlet_2 expected")); + + checkSupportingValues(result_1, contentType, uniqueField, contentlet_1); + checkSupportingValues(result_2, contentType, uniqueField, contentlet_2); + } + + private static void checkSupportingValues(Map result_1, ContentType contentType, Field uniqueField, Contentlet... contentlets) throws IOException { + final Map supportingValues_1 = JsonUtil.getJsonFromString(result_1.get("supporting_values").toString()); + assertEquals(contentType.id(), supportingValues_1.get("contentTypeID")); + assertEquals(uniqueField.variable(), supportingValues_1.get("fieldVariableName")); + assertEquals(contentlets[0].get(uniqueField.variable()), supportingValues_1.get("fieldValue")); + assertEquals(contentlets[0].getLanguageId(), Long.parseLong(supportingValues_1.get("languageId").toString())); + assertEquals(contentlets[0].getHost(), supportingValues_1.get("hostId")); + assertEquals(false, supportingValues_1.get("uniquePerSite")); + assertEquals(contentlets.length, ((List) supportingValues_1.get("contentletsId")).size()); + assertEquals(Arrays.stream(contentlets).map(Contentlet::getIdentifier).sorted().collect(Collectors.toList()), + ((List) supportingValues_1.get("contentletsId")).stream().sorted().collect(Collectors.toList())); + } + + private static String getHash(ContentType contentType, Field uniqueField, Contentlet contentlet_1) throws NoSuchAlgorithmException { + final String valueToHash_1 = contentType.id() + uniqueField.variable() + contentlet_1.getLanguageId() + + contentlet_1.get(uniqueField.variable()); + return StringUtils.hashText(valueToHash_1); + } + + private static String getHashIncludeSiteId(ContentType contentType, Field uniqueField, Contentlet contentlet) + throws NoSuchAlgorithmException { + final String valueToHash_1 = contentType.id() + uniqueField.variable() + contentlet.getLanguageId() + + contentlet.get(uniqueField.variable()) + contentlet.getHost(); + final MessageDigest digest = MessageDigest.getInstance("SHA-256"); + final byte[] hashBytes = digest.digest(valueToHash_1.getBytes()); + + StringBuilder hexString = new StringBuilder(); + for (byte b : hashBytes) { + hexString.append(String.format("%02x", b)); + } + + return hexString.toString(); + } + + /** + * Method to test: {@link Task241007CreateUniqueFieldsTable#executeUpgrade()} + * When: Run the method and already exists Contentlet with duplicated values for unique fields + * Should: populate the table with these values and in the contentlets ids attribute insert an array with all the contentlets + */ + @Test + public void populateWhenExistsDuplicatedValues() throws DotDataException, NoSuchAlgorithmException, IOException, DotSecurityException { + final Field titleField = new FieldDataGen().type(TextField.class).name("title").next(); + final Field uniqueField = new FieldDataGen().type(TextField.class).name("unique").next(); + + final ContentType contentType = new ContentTypeDataGen().field(titleField).field(uniqueField).nextPersisted(); + final String uniqueValue = "Unique_" + System.currentTimeMillis(); + + final Contentlet contentlet_1 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_1_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), uniqueValue) + .nextPersisted(); + + final Contentlet contentlet_2 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_2_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), uniqueValue) + .nextPersisted(); + + final ImmutableTextField uniqueFieldUpdated = ImmutableTextField.builder() + .from(uniqueField) + .unique(true) + .contentTypeId(contentType.id()) + .build(); + + APILocator.getContentTypeFieldAPI().save(uniqueFieldUpdated, APILocator.systemUser()); + + final Task241007CreateUniqueFieldsTable task241007CreateUniqueFieldsTable = new Task241007CreateUniqueFieldsTable(); + + assertTrue(task241007CreateUniqueFieldsTable.forceRun()); + task241007CreateUniqueFieldsTable.executeUpgrade(); + assertFalse(task241007CreateUniqueFieldsTable.forceRun()); + + final List> results = new DotConnect().setSQL("SELECT * from unique_fields").loadObjectResults(); + + assertFalse(results.isEmpty()); + + final String valueToHash_1 = getHash(contentType, uniqueField, contentlet_1); + + final List> uniqueValuesResult = results.stream() + .filter(result -> result.get("unique_key_val").equals(valueToHash_1)) + .collect(Collectors.toList()); + + assertEquals(1, uniqueValuesResult.size()); + + checkSupportingValues(uniqueValuesResult.get(0), contentType, uniqueField, contentlet_1, contentlet_2); + } + + + /** + * Method to test: {@link Task241007CreateUniqueFieldsTable#executeUpgrade()} + * When: Run the method and already exists Contentlet with duplicated values for not unique fields + * Should: do nothing really + */ + @Test + public void populateWhenExistsDuplicatedValuesButNotUniqueField() throws DotDataException, NoSuchAlgorithmException, IOException, DotSecurityException { + final Field titleField = new FieldDataGen().type(TextField.class).name("title").next(); + final Field uniqueField = new FieldDataGen().type(TextField.class).name("unique").next(); + + final ContentType contentType = new ContentTypeDataGen().field(titleField).field(uniqueField).nextPersisted(); + final String uniqueValue = "Unique_" + System.currentTimeMillis(); + + final Contentlet contentlet_1 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_1_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), uniqueValue) + .nextPersisted(); + + final Contentlet contentlet_2 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_2_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), uniqueValue) + .nextPersisted(); + + final ImmutableTextField uniqueFieldUpdated = ImmutableTextField.builder() + .from(uniqueField) + .contentTypeId(contentType.id()) + .build(); + + APILocator.getContentTypeFieldAPI().save(uniqueFieldUpdated, APILocator.systemUser()); + + final Task241007CreateUniqueFieldsTable task241007CreateUniqueFieldsTable = new Task241007CreateUniqueFieldsTable(); + + assertTrue(task241007CreateUniqueFieldsTable.forceRun()); + task241007CreateUniqueFieldsTable.executeUpgrade(); + assertFalse(task241007CreateUniqueFieldsTable.forceRun()); + + final List> results = new DotConnect().setSQL("SELECT * from unique_fields").loadObjectResults(); + + if (!results.isEmpty()) { + final String valueToHash_1 = getHash(contentType, uniqueField, contentlet_1); + + final List> uniqueValuesResult = results.stream() + .filter(result -> result.get("unique_key_val").equals(valueToHash_1)) + .collect(Collectors.toList()); + + assertTrue(uniqueValuesResult.isEmpty()); + } else { + assertTrue(true); + } + assertFalse(results.isEmpty()); + } + + /** + * Method to test: {@link Task241007CreateUniqueFieldsTable#executeUpgrade()} + * When: Run the method and already exists Contentlet with Unique field and uniquePerSite enabled + * Should: populate the table with these values and use the siteId to calculated the hash + */ + @Test + public void populateWithUniquePerSiteEnabled() throws DotDataException, NoSuchAlgorithmException, IOException { + final Field titleField = new FieldDataGen().type(TextField.class).name("title").next(); + final Field uniqueField = new FieldDataGen().type(TextField.class).name("unique").unique(true).next(); + + final ContentType contentType = new ContentTypeDataGen().field(titleField).field(uniqueField).nextPersisted(); + + new FieldVariableDataGen() + .key(UNIQUE_PER_SITE_FIELD_VARIABLE_NAME) + .value("true") + .field(contentType.fields().stream() + .filter(field -> field.variable().equals(uniqueField.variable())) + .limit(1) + .findFirst() + .orElseThrow()) + .nextPersisted(); + + final Contentlet contentlet_1 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_1_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), "Unique_1_" + System.currentTimeMillis()) + .nextPersisted(); + + final Contentlet contentlet_2 = new ContentletDataGen(contentType) + .setProperty(titleField.variable(), "Title_2_" + System.currentTimeMillis()) + .setProperty(uniqueField.variable(), "Unique_2_" + System.currentTimeMillis()) + .nextPersisted(); + + final Task241007CreateUniqueFieldsTable task241007CreateUniqueFieldsTable = new Task241007CreateUniqueFieldsTable(); + + assertTrue(task241007CreateUniqueFieldsTable.forceRun()); + task241007CreateUniqueFieldsTable.executeUpgrade(); + assertFalse(task241007CreateUniqueFieldsTable.forceRun()); + + final List> results = new DotConnect().setSQL("SELECT * from unique_fields").loadObjectResults(); + + assertFalse(results.isEmpty()); + + final String valueToHash_1 = getHashIncludeSiteId(contentType, uniqueField, contentlet_1); + final String valueToHash_2 = getHashIncludeSiteId(contentType, uniqueField, contentlet_2); + + final Map result_1 = results.stream() + .filter(result -> result.get("unique_key_val").equals(valueToHash_1)) + .limit(1) + .findFirst() + .orElseThrow(() -> new AssertException("contenlet_1 expected")); + + final Map result_2 = results.stream() + .filter(result -> result.get("unique_key_val").equals(valueToHash_2)) + .limit(1) + .findFirst() + .orElseThrow(() -> new AssertException("contenlet_2 expected")); + + checkSupportingValues(result_1, contentType, uniqueField, contentlet_1); + checkSupportingValues(result_2, contentType, uniqueField, contentlet_2); + + } +}