diff --git a/src/main/java/org/springframework/data/redis/core/RedisTemplate.java b/src/main/java/org/springframework/data/redis/core/RedisTemplate.java index 58ec2200c3..c52142334f 100644 --- a/src/main/java/org/springframework/data/redis/core/RedisTemplate.java +++ b/src/main/java/org/springframework/data/redis/core/RedisTemplate.java @@ -86,6 +86,7 @@ * @author Denis Zavedeev * @author ihaohong * @author Chen Li + * @author Ilya Viaznin * @author Vedran Pavic * @param the Redis key type against which the template works (usually a String) * @param the Redis value type against which the template works @@ -112,8 +113,7 @@ public class RedisTemplate extends RedisAccessor implements RedisOperation private final ValueOperations valueOps = new DefaultValueOperations<>(this); private final ListOperations listOps = new DefaultListOperations<>(this); private final SetOperations setOps = new DefaultSetOperations<>(this); - private final StreamOperations streamOps = new DefaultStreamOperations<>(this, - ObjectHashMapper.getSharedInstance()); + private final StreamOperations streamOps; private final ZSetOperations zSetOps = new DefaultZSetOperations<>(this); private final GeoOperations geoOps = new DefaultGeoOperations<>(this); private final HyperLogLogOperations hllOps = new DefaultHyperLogLogOperations<>(this); @@ -122,7 +122,18 @@ public class RedisTemplate extends RedisAccessor implements RedisOperation /** * Constructs a new RedisTemplate instance. */ - public RedisTemplate() {} + public RedisTemplate() { + streamOps = new DefaultStreamOperations<>(this, ObjectHashMapper.getSharedInstance()); + } + + /** + * Constructs a new {@link RedisTemplate} instance with custom hash mapper + * + * @param hashMapper Custom {@link ObjectHashMapper} instance + */ + public RedisTemplate(ObjectHashMapper hashMapper) { + streamOps = new DefaultStreamOperations<>(this, hashMapper); + } @Override public void afterPropertiesSet() { diff --git a/src/main/java/org/springframework/data/redis/core/ReferenceRedisAdapter.java b/src/main/java/org/springframework/data/redis/core/ReferenceRedisAdapter.java new file mode 100644 index 0000000000..21ae2f88c2 --- /dev/null +++ b/src/main/java/org/springframework/data/redis/core/ReferenceRedisAdapter.java @@ -0,0 +1,80 @@ +package org.springframework.data.redis.core; + +import org.springframework.data.redis.core.convert.RedisConverter; +import org.springframework.data.redis.core.convert.RedisCustomConversions; +import org.springframework.data.redis.core.convert.ReferenceMappingRedisConverter; +import org.springframework.data.redis.core.mapping.RedisMappingContext; +import org.springframework.lang.Nullable; + +/** + * Wrapper for {@link RedisKeyValueAdapter} with correct cyclic reference resolving + * + * @author Ilya Viaznin + * @see RedisKeyValueAdapter + */ +public class ReferenceRedisAdapter extends RedisKeyValueAdapter { + + /** + * Evaluation caching + */ + private boolean isReferenceConverter; + + /** + * Creates new {@link ReferenceRedisAdapter} with default {@link RedisMappingContext} and default + * {@link RedisCustomConversions}. + * + * @param redisOps must not be {@literal null}. + */ + public ReferenceRedisAdapter(RedisOperations redisOps) { + this(redisOps, new RedisMappingContext()); + } + + /** + * Creates new {@link ReferenceRedisAdapter} with default {@link RedisCustomConversions}. + * + * @param redisOps must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + */ + public ReferenceRedisAdapter(RedisOperations redisOps, RedisMappingContext mappingContext) { + this(redisOps, mappingContext, new RedisCustomConversions()); + } + + /** + * Creates new {@link ReferenceRedisAdapter}. + * + * @param redisOps must not be {@literal null}. + * @param mappingContext must not be {@literal null}. + * @param customConversions can be {@literal null}. + * @since 2.0 + */ + public ReferenceRedisAdapter(RedisOperations redisOps, RedisMappingContext mappingContext, + @Nullable org.springframework.data.convert.CustomConversions customConversions) { + super(redisOps, mappingContext, customConversions); + } + + /** + * Creates new {@link ReferenceRedisAdapter} with specific {@link RedisConverter}. + * + * @param redisOps must not be {@literal null}. + * @param redisConverter must not be {@literal null}. + */ + public ReferenceRedisAdapter(RedisOperations redisOps, RedisConverter redisConverter) { + super(redisOps, redisConverter); + isReferenceConverter = redisConverter instanceof ReferenceMappingRedisConverter; + } + + @Override + public T get(Object id, String keyspace, Class type) { + T val; + if (isReferenceConverter) { + var converter = (ReferenceMappingRedisConverter) getConverter(); + converter.clearResolvedCtx(); + val = super.get(id, keyspace, type); + converter.clearResolvedCtx(); + } + else + val = super.get(id, keyspace, type); + + return val; + } +} diff --git a/src/main/java/org/springframework/data/redis/core/convert/ReferenceMappingRedisConverter.java b/src/main/java/org/springframework/data/redis/core/convert/ReferenceMappingRedisConverter.java new file mode 100644 index 0000000000..041fb47a44 --- /dev/null +++ b/src/main/java/org/springframework/data/redis/core/convert/ReferenceMappingRedisConverter.java @@ -0,0 +1,1423 @@ +package org.springframework.data.redis.core.convert; + +import org.springframework.beans.factory.InitializingBean; +import org.springframework.core.CollectionFactory; +import org.springframework.core.convert.ConversionService; +import org.springframework.core.convert.ConverterNotFoundException; +import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.core.convert.support.GenericConversionService; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mapping.*; +import org.springframework.data.mapping.model.EntityInstantiator; +import org.springframework.data.mapping.model.EntityInstantiators; +import org.springframework.data.mapping.model.PersistentEntityParameterValueProvider; +import org.springframework.data.mapping.model.PropertyValueProvider; +import org.springframework.data.redis.core.PartialUpdate; +import org.springframework.data.redis.core.PartialUpdate.PropertyUpdate; +import org.springframework.data.redis.core.PartialUpdate.UpdateCommand; +import org.springframework.data.redis.core.index.Indexed; +import org.springframework.data.redis.core.mapping.RedisMappingContext; +import org.springframework.data.redis.core.mapping.RedisPersistentEntity; +import org.springframework.data.redis.core.mapping.RedisPersistentProperty; +import org.springframework.data.redis.util.ByteUtils; +import org.springframework.data.util.ProxyUtils; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Nullable; +import org.springframework.util.*; +import org.springframework.util.comparator.NullSafeComparator; + +import java.lang.reflect.Array; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * {@link RedisConverter} implementation creating flat binary map structure out of a given domain type. Considers {@link Indexed} annotation for enabling helper structures for + * finder operations.

+ * NOTE {@link ReferenceMappingRedisConverter} is an {@link InitializingBean} and requires + * {@link ReferenceMappingRedisConverter#afterPropertiesSet()} to be called. + * + *
+ * 
+ * @RedisHash("persons")
+ * class Person {
+ *
+ *   @Id String id;
+ *   String firstname;
+ *
+ *   List<String> nicknames;
+ *   List<Person> coworkers;
+ *
+ *   Address address;
+ *   @Reference Country nationality;
+ * }
+ * 
+ * 
+ *

+ * The above is represented as: + * + *

+ * 
+ * _class=org.example.Person
+ * id=1
+ * firstname=rand
+ * lastname=al'thor
+ * coworkers.[0].firstname=mat
+ * coworkers.[0].nicknames.[0]=prince of the ravens
+ * coworkers.[1].firstname=perrin
+ * coworkers.[1].address.city=two rivers
+ * nationality=nationality:andora
+ * 
+ * 
+ * + * @author Christoph Strobl + * @author Greg Turnquist + * @author Mark Paluch + * @author Golam Mazid Sajib + * @author Ilya Viaznin + */ +public class ReferenceMappingRedisConverter implements RedisConverter, InitializingBean { + + private static final String INVALID_TYPE_ASSIGNMENT = "Value of type %s cannot be assigned to property %s of type %s"; + + private final RedisMappingContext mappingContext; + + private final GenericConversionService conversionService; + + private final EntityInstantiators entityInstantiators; + + private final RedisTypeMapper typeMapper; + + private final Comparator listKeyComparator = new NullSafeComparator<>(NaturalOrderingKeyComparator.INSTANCE, + true); + + private IndexResolver indexResolver; + + private @Nullable ReferenceResolver referenceResolver; + + private CustomConversions customConversions; + + /** + * Tmp store for resolved references + */ + private final Map resolvedCtx = new ConcurrentHashMap<>(); + + /** + * Creates new {@link ReferenceMappingRedisConverter}. + * + * @param context can be {@literal null}. + * + * @since 2.4 + */ + public ReferenceMappingRedisConverter(RedisMappingContext context) { + this(context, null, null, null); + } + + /** + * Creates new {@link ReferenceMappingRedisConverter} and defaults {@link RedisMappingContext} when {@literal null}. + * + * @param mappingContext can be {@literal null}. + * @param indexResolver can be {@literal null}. + * @param referenceResolver can be not be {@literal null}. + */ + public ReferenceMappingRedisConverter(@Nullable RedisMappingContext mappingContext, @Nullable IndexResolver indexResolver, + @Nullable ReferenceResolver referenceResolver) { + this(mappingContext, indexResolver, referenceResolver, null); + } + + /** + * Creates new {@link ReferenceMappingRedisConverter} and defaults {@link RedisMappingContext} when {@literal null}. + * + * @param mappingContext can be {@literal null}. + * @param indexResolver can be {@literal null}. + * @param referenceResolver can be {@literal null}. + * @param typeMapper can be {@literal null}. + * + * @since 2.1 + */ + public ReferenceMappingRedisConverter(@Nullable RedisMappingContext mappingContext, @Nullable IndexResolver indexResolver, + @Nullable ReferenceResolver referenceResolver, @Nullable RedisTypeMapper typeMapper) { + + this.mappingContext = mappingContext != null ? mappingContext : new RedisMappingContext(); + + this.entityInstantiators = new EntityInstantiators(); + this.conversionService = new DefaultConversionService(); + this.customConversions = new RedisCustomConversions(); + this.typeMapper = typeMapper != null ? typeMapper + : new DefaultRedisTypeMapper(DefaultRedisTypeMapper.DEFAULT_TYPE_KEY, this.mappingContext); + + this.indexResolver = indexResolver != null ? indexResolver : new PathIndexResolver(this.mappingContext); + this.referenceResolver = referenceResolver; + } + + @Override + @SuppressWarnings("unchecked") + public R read(Class type, RedisData source) { + TypeInformation readType = typeMapper.readType(source.getBucket().getPath(), TypeInformation.of(type)); + + return readType.isCollectionLike() + ? (R) readCollectionOrArray("", ArrayList.class, Object.class, source.getBucket()) + : doReadInternal("", type, source); + + } + + @Nullable + private R readInternal(String path, Class type, RedisData source) { + return source.getBucket().isEmpty() ? null : doReadInternal(path, type, source); + } + + @SuppressWarnings("unchecked") + private R doReadInternal(String path, Class type, RedisData source) { + + TypeInformation readType = typeMapper.readType(source.getBucket().getPath(), TypeInformation.of(type)); + + if (customConversions.hasCustomReadTarget(Map.class, readType.getType())) { + + Map partial = new HashMap<>(); + + if (!path.isEmpty()) { + + for (Map.Entry entry : source.getBucket().extract(path + ".").entrySet()) { + partial.put(entry.getKey().substring(path.length() + 1), entry.getValue()); + } + + } + else { + partial.putAll(source.getBucket().asMap()); + } + R instance = (R) conversionService.convert(partial, readType.getType()); + + RedisPersistentEntity entity = mappingContext.getPersistentEntity(readType); + if (entity != null && entity.hasIdProperty()) { + + PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(instance); + + propertyAccessor.setProperty(entity.getRequiredIdProperty(), source.getId()); + instance = propertyAccessor.getBean(); + } + return instance; + } + + if (conversionService.canConvert(byte[].class, readType.getType())) { + return (R) conversionService.convert(source.getBucket().get(StringUtils.hasText(path) ? path : "_raw"), + readType.getType()); + } + + RedisPersistentEntity entity = mappingContext.getRequiredPersistentEntity(readType); + EntityInstantiator instantiator = entityInstantiators.getInstantiatorFor(entity); + + Object instance = instantiator.createInstance((RedisPersistentEntity) entity, + new PersistentEntityParameterValueProvider<>(entity, + new ConverterAwareParameterValueProvider(path, source, conversionService), this.conversionService)); + + PersistentPropertyAccessor accessor = entity.getPropertyAccessor(instance); + + entity.doWithProperties((PropertyHandler) persistentProperty -> { + + InstanceCreatorMetadata creator = entity.getInstanceCreatorMetadata(); + + if (creator != null && creator.isCreatorParameter(persistentProperty)) { + return; + } + + Object targetValue = readProperty(path, source, persistentProperty); + + if (targetValue != null) { + accessor.setProperty(persistentProperty, targetValue); + } + }); + + readAssociation(path, source, entity, accessor); + + return (R) accessor.getBean(); + } + + @Nullable + protected Object readProperty(String path, RedisData source, RedisPersistentProperty persistentProperty) { + + String currentPath = !path.isEmpty() ? path + "." + persistentProperty.getName() : persistentProperty.getName(); + TypeInformation typeInformation = typeMapper.readType(source.getBucket().getPropertyPath(currentPath), + persistentProperty.getTypeInformation()); + + if (typeInformation.isMap()) { + + Class mapValueType = null; + + if (typeInformation.getMapValueType() != null) { + mapValueType = typeInformation.getMapValueType().getType(); + } + + if (mapValueType == null && persistentProperty.isMap()) { + mapValueType = persistentProperty.getMapValueType(); + } + + if (mapValueType == null) { + throw new IllegalArgumentException("Unable to retrieve MapValueType"); + } + + if (conversionService.canConvert(byte[].class, mapValueType)) { + return readMapOfSimpleTypes(currentPath, typeInformation.getType(), + typeInformation.getRequiredComponentType().getType(), mapValueType, source); + } + + return readMapOfComplexTypes(currentPath, typeInformation.getType(), + typeInformation.getRequiredComponentType().getType(), mapValueType, source); + } + + if (typeInformation.isCollectionLike()) { + + if (!isByteArray(typeInformation)) { + + return readCollectionOrArray(currentPath, typeInformation.getType(), + typeInformation.getRequiredComponentType().getType(), source.getBucket()); + } + + if (!source.getBucket().hasValue(currentPath) && isByteArray(typeInformation)) { + + return readCollectionOrArray(currentPath, typeInformation.getType(), + typeInformation.getRequiredComponentType().getType(), source.getBucket()); + } + } + + if (mappingContext.getPersistentEntity(typeInformation) != null + && !conversionService.canConvert(byte[].class, typeInformation.getRequiredActualType().getType())) { + + Bucket bucket = source.getBucket().extract(currentPath + "."); + + RedisData newBucket = new RedisData(bucket); + + return readInternal(currentPath, typeInformation.getType(), newBucket); + } + + byte[] sourceBytes = source.getBucket().get(currentPath); + + if (typeInformation.getType().isPrimitive() && sourceBytes == null) { + return null; + } + + if (persistentProperty.isIdProperty() && ObjectUtils.isEmpty(path)) { + return sourceBytes != null ? fromBytes(sourceBytes, typeInformation.getType()) : source.getId(); + } + + if (sourceBytes == null) { + return null; + } + + if (customConversions.hasCustomReadTarget(byte[].class, persistentProperty.getType())) { + return fromBytes(sourceBytes, persistentProperty.getType()); + } + + Class typeToUse = getTypeHint(currentPath, source.getBucket(), persistentProperty.getType()); + return fromBytes(sourceBytes, typeToUse); + } + + private void readAssociation(String path, RedisData source, RedisPersistentEntity entity, + PersistentPropertyAccessor accessor) { + + entity.doWithAssociations((AssociationHandler) association -> { + + String currentPath = !path.isEmpty() ? path + "." + association.getInverse().getName() + : association.getInverse().getName(); + + if (association.getInverse().isCollectionLike()) { + + Bucket bucket = source.getBucket().extract(currentPath + ".["); + + Collection target = CollectionFactory.createCollection(association.getInverse().getType(), + association.getInverse().getComponentType(), bucket.size()); + + for (Map.Entry entry : bucket.entrySet()) { + + String referenceKey = fromBytes(entry.getValue(), String.class); + + if (!KeyspaceIdentifier.isValid(referenceKey)) { + continue; + } + + KeyspaceIdentifier identifier = KeyspaceIdentifier.of(referenceKey); + Map rawHash = referenceResolver.resolveReference(identifier.getId(), identifier.getKeyspace()); + + if (!CollectionUtils.isEmpty(rawHash) && isNotResolved(identifier)) { + var data = new RedisData(rawHash); + markAsResolved(identifier, data); + + target.add(read(association.getInverse().getActualType(), data)); + } + } + + accessor.setProperty(association.getInverse(), target); + + } + else { + + byte[] binKey = source.getBucket().get(currentPath); + if (binKey == null || binKey.length == 0) { + return; + } + + String referenceKey = fromBytes(binKey, String.class); + if (KeyspaceIdentifier.isValid(referenceKey)) { + + KeyspaceIdentifier identifier = KeyspaceIdentifier.of(referenceKey); + + Map rawHash = referenceResolver.resolveReference(identifier.getId(), + identifier.getKeyspace()); + + if (!CollectionUtils.isEmpty(rawHash) && isNotResolved(identifier)) { + var data = new RedisData(rawHash); + markAsResolved(identifier, data); + + accessor.setProperty(association.getInverse(), + read(association.getInverse().getActualType(), data)); + } + } + } + }); + } + + /** + * Mark path as resolved + * + * @param identifier Identifier + * @param data Redis data + */ + private void markAsResolved(KeyspaceIdentifier identifier, RedisData data) { + resolvedCtx.put(identifier.getKeyspace() + identifier.getId(), data); + } + + /** + * Check path is not resolved + * + * @param identifier Identifier + * + * @return Path is not resolved + */ + private boolean isNotResolved(KeyspaceIdentifier identifier) { + return !resolvedCtx.containsKey(identifier.getKeyspace() + identifier.getId()); + } + + /** + * Clear resolving path ctx + */ + public void clearResolvedCtx() { + resolvedCtx.clear(); + } + + @Override + @SuppressWarnings({"rawtypes"}) + public void write(Object source, RedisData sink) { + + if (source == null) { + return; + } + + if (source instanceof PartialUpdate) { + writePartialUpdate((PartialUpdate) source, sink); + return; + } + + RedisPersistentEntity entity = mappingContext.getPersistentEntity(source.getClass()); + + if (!customConversions.hasCustomWriteTarget(source.getClass())) { + typeMapper.writeType(ClassUtils.getUserClass(source), sink.getBucket().getPath()); + } + + if (entity == null) { + + typeMapper.writeType(ClassUtils.getUserClass(source), sink.getBucket().getPath()); + sink.getBucket().put("_raw", conversionService.convert(source, byte[].class)); + return; + } + + sink.setKeyspace(entity.getKeySpace()); + + if (entity.getTypeInformation().isCollectionLike()) { + writeCollection(entity.getKeySpace(), "", (List) source, entity.getTypeInformation().getRequiredComponentType(), + sink); + } + else { + writeInternal(entity.getKeySpace(), "", source, entity.getTypeInformation(), sink); + } + + Object identifier = entity.getIdentifierAccessor(source).getIdentifier(); + + if (identifier != null) { + sink.setId(getConversionService().convert(identifier, String.class)); + } + + Long ttl = entity.getTimeToLiveAccessor().getTimeToLive(source); + if (ttl != null && ttl > 0) { + sink.setTimeToLive(ttl); + } + + for (IndexedData indexedData : indexResolver.resolveIndexesFor(entity.getTypeInformation(), source)) { + sink.addIndexedData(indexedData); + } + } + + protected void writePartialUpdate(PartialUpdate update, RedisData sink) { + + RedisPersistentEntity entity = mappingContext.getRequiredPersistentEntity(update.getTarget()); + + write(update.getValue(), sink); + + for (String key : sink.getBucket().keySet()) { + if (typeMapper.isTypeKey(key)) { + sink.getBucket().remove(key); + break; + } + } + + if (update.isRefreshTtl() && !update.getPropertyUpdates().isEmpty()) { + + Long ttl = entity.getTimeToLiveAccessor().getTimeToLive(update); + if (ttl != null && ttl > 0) { + sink.setTimeToLive(ttl); + } + } + + for (PropertyUpdate pUpdate : update.getPropertyUpdates()) { + + String path = pUpdate.getPropertyPath(); + + if (UpdateCommand.SET.equals(pUpdate.getCmd())) { + writePartialPropertyUpdate(update, pUpdate, sink, entity, path); + } + } + } + + /** + * @param update + * @param pUpdate + * @param sink + * @param entity + * @param path + */ + private void writePartialPropertyUpdate(PartialUpdate update, PropertyUpdate pUpdate, RedisData sink, + RedisPersistentEntity entity, String path) { + + RedisPersistentProperty targetProperty = getTargetPropertyOrNullForPath(path, update.getTarget()); + + if (targetProperty == null) { + + targetProperty = getTargetPropertyOrNullForPath(path.replaceAll("\\.\\[.*\\]", ""), update.getTarget()); + + TypeInformation ti = targetProperty == null ? TypeInformation.OBJECT + : (targetProperty.isMap() ? (targetProperty.getTypeInformation().getMapValueType() != null + ? targetProperty.getTypeInformation().getRequiredMapValueType() + : TypeInformation.OBJECT) : targetProperty.getTypeInformation().getActualType()); + + writeInternal(entity.getKeySpace(), pUpdate.getPropertyPath(), pUpdate.getValue(), ti, sink); + return; + } + + if (targetProperty.isAssociation()) { + + if (targetProperty.isCollectionLike()) { + + RedisPersistentEntity ref = mappingContext.getPersistentEntity(targetProperty.getRequiredAssociation() + .getInverse().getTypeInformation().getRequiredComponentType() + .getRequiredActualType()); + + int i = 0; + for (Object o : (Collection) pUpdate.getValue()) { + + Object refId = ref.getPropertyAccessor(o).getProperty(ref.getRequiredIdProperty()); + if (refId != null) { + sink.getBucket().put(pUpdate.getPropertyPath() + ".[" + i + "]", toBytes(ref.getKeySpace() + ":" + refId)); + i++; + } + } + } + else { + + RedisPersistentEntity ref = mappingContext + .getRequiredPersistentEntity(targetProperty.getRequiredAssociation().getInverse().getTypeInformation()); + + Object refId = ref.getPropertyAccessor(pUpdate.getValue()).getProperty(ref.getRequiredIdProperty()); + if (refId != null) { + sink.getBucket().put(pUpdate.getPropertyPath(), toBytes(ref.getKeySpace() + ":" + refId)); + } + } + } + else if (targetProperty.isCollectionLike() && !isByteArray(targetProperty)) { + + Collection collection = pUpdate.getValue() instanceof Collection ? (Collection) pUpdate.getValue() + : Collections.singleton(pUpdate.getValue()); + writeCollection(entity.getKeySpace(), pUpdate.getPropertyPath(), collection, + targetProperty.getTypeInformation().getRequiredActualType(), sink); + } + else if (targetProperty.isMap()) { + + Map map = new HashMap<>(); + + if (pUpdate.getValue() instanceof Map) { + map.putAll((Map) pUpdate.getValue()); + } + else if (pUpdate.getValue() instanceof Map.Entry) { + map.put(((Map.Entry) pUpdate.getValue()).getKey(), ((Map.Entry) pUpdate.getValue()).getValue()); + } + else { + throw new MappingException( + String.format("Cannot set update value for map property '%s' to '%s'; Please use a Map or Map.Entry", + pUpdate.getPropertyPath(), pUpdate.getValue())); + } + + writeMap(entity.getKeySpace(), pUpdate.getPropertyPath(), targetProperty.getMapValueType(), map, sink); + } + else { + + writeInternal(entity.getKeySpace(), pUpdate.getPropertyPath(), pUpdate.getValue(), + targetProperty.getTypeInformation(), sink); + + Set data = indexResolver.resolveIndexesFor(entity.getKeySpace(), pUpdate.getPropertyPath(), + targetProperty.getTypeInformation(), pUpdate.getValue()); + + if (data.isEmpty()) { + + data = indexResolver.resolveIndexesFor(entity.getKeySpace(), pUpdate.getPropertyPath(), + targetProperty.getOwner().getTypeInformation(), pUpdate.getValue()); + + } + sink.addIndexedData(data); + } + } + + @Nullable + RedisPersistentProperty getTargetPropertyOrNullForPath(String path, Class type) { + + try { + + PersistentPropertyPath persistentPropertyPath = mappingContext + .getPersistentPropertyPath(path, type); + return persistentPropertyPath.getLeafProperty(); + } catch (Exception e) { + // that's just fine + } + + return null; + } + + /** + * @param keyspace + * @param path + * @param value + * @param typeHint + * @param sink + */ + private void writeInternal(@Nullable String keyspace, String path, @Nullable Object value, + TypeInformation typeHint, RedisData sink) { + + if (value == null) { + return; + } + + if (customConversions.hasCustomWriteTarget(value.getClass())) { + + Optional> targetType = customConversions.getCustomWriteTarget(value.getClass()); + + if (!StringUtils.hasText(path) && targetType.isPresent() + && ClassUtils.isAssignable(byte[].class, targetType.get())) { + sink.getBucket().put(StringUtils.hasText(path) ? path : "_raw", conversionService.convert(value, byte[].class)); + } + else { + + if (!ClassUtils.isAssignable(typeHint.getType(), value.getClass())) { + throw new MappingException( + String.format(INVALID_TYPE_ASSIGNMENT, value.getClass(), path, typeHint.getType())); + } + writeToBucket(path, value, sink, typeHint.getType()); + } + return; + } + + if (value instanceof byte[]) { + sink.getBucket().put(StringUtils.hasText(path) ? path : "_raw", (byte[]) value); + return; + } + + if (value.getClass() != typeHint.getType()) { + typeMapper.writeType(value.getClass(), sink.getBucket().getPropertyPath(path)); + } + + RedisPersistentEntity entity = mappingContext.getRequiredPersistentEntity(value.getClass()); + PersistentPropertyAccessor accessor = entity.getPropertyAccessor(value); + + entity.doWithProperties((PropertyHandler) persistentProperty -> { + + String propertyStringPath = (!path.isEmpty() ? path + "." : "") + persistentProperty.getName(); + + Object propertyValue = accessor.getProperty(persistentProperty); + if (persistentProperty.isIdProperty()) { + + if (propertyValue != null) { + sink.getBucket().put(propertyStringPath, toBytes(propertyValue)); + } + return; + } + + if (persistentProperty.isMap()) { + + if (propertyValue != null) { + writeMap(keyspace, propertyStringPath, persistentProperty.getMapValueType(), (Map) propertyValue, sink); + } + } + else if (persistentProperty.isCollectionLike() && !isByteArray(persistentProperty)) { + + if (propertyValue == null) { + writeCollection(keyspace, propertyStringPath, null, + persistentProperty.getTypeInformation().getRequiredComponentType(), sink); + } + else { + + if (Iterable.class.isAssignableFrom(propertyValue.getClass())) { + + writeCollection(keyspace, propertyStringPath, (Iterable) propertyValue, + persistentProperty.getTypeInformation().getRequiredComponentType(), sink); + } + else if (propertyValue.getClass().isArray()) { + + writeCollection(keyspace, propertyStringPath, CollectionUtils.arrayToList(propertyValue), + persistentProperty.getTypeInformation().getRequiredComponentType(), sink); + } + else { + + throw new RuntimeException("Don't know how to handle " + propertyValue.getClass() + " type collection"); + } + } + + } + else if (propertyValue != null) { + + if (customConversions.isSimpleType(ProxyUtils.getUserClass(propertyValue.getClass()))) { + + writeToBucket(propertyStringPath, propertyValue, sink, persistentProperty.getType()); + } + else { + writeInternal(keyspace, propertyStringPath, propertyValue, + persistentProperty.getTypeInformation().getRequiredActualType(), sink); + } + } + }); + + writeAssociation(path, entity, value, sink); + } + + private void writeAssociation(String path, RedisPersistentEntity entity, @Nullable Object value, RedisData sink) { + + if (value == null) { + return; + } + + PersistentPropertyAccessor accessor = entity.getPropertyAccessor(value); + + entity.doWithAssociations((AssociationHandler) association -> { + + Object refObject = accessor.getProperty(association.getInverse()); + if (refObject == null) { + return; + } + + if (association.getInverse().isCollectionLike()) { + + RedisPersistentEntity ref = mappingContext.getRequiredPersistentEntity( + association.getInverse().getTypeInformation().getRequiredComponentType().getRequiredActualType()); + + String keyspace = ref.getKeySpace(); + String propertyStringPath = (!path.isEmpty() ? path + "." : "") + association.getInverse().getName(); + + int i = 0; + for (Object o : (Collection) refObject) { + + Object refId = ref.getPropertyAccessor(o).getProperty(ref.getRequiredIdProperty()); + if (refId != null) { + sink.getBucket().put(propertyStringPath + ".[" + i + "]", toBytes(keyspace + ":" + refId)); + i++; + } + } + + } + else { + + RedisPersistentEntity ref = mappingContext + .getRequiredPersistentEntity(association.getInverse().getTypeInformation()); + String keyspace = ref.getKeySpace(); + + if (keyspace != null) { + Object refId = ref.getPropertyAccessor(refObject).getProperty(ref.getRequiredIdProperty()); + + if (refId != null) { + String propertyStringPath = (!path.isEmpty() ? path + "." : "") + association.getInverse().getName(); + sink.getBucket().put(propertyStringPath, toBytes(keyspace + ":" + refId)); + } + } + } + }); + } + + /** + * @param keyspace + * @param path + * @param values + * @param typeHint + * @param sink + */ + private void writeCollection(@Nullable String keyspace, String path, @Nullable Iterable values, + TypeInformation typeHint, RedisData sink) { + + if (values == null) { + return; + } + + int i = 0; + for (Object value : values) { + + if (value == null) { + break; + } + + String currentPath = path + (path.equals("") ? "" : ".") + "[" + i + "]"; + + if (!ClassUtils.isAssignable(typeHint.getType(), value.getClass())) { + throw new MappingException( + String.format(INVALID_TYPE_ASSIGNMENT, value.getClass(), currentPath, typeHint.getType())); + } + + if (customConversions.hasCustomWriteTarget(value.getClass())) { + writeToBucket(currentPath, value, sink, typeHint.getType()); + } + else { + writeInternal(keyspace, currentPath, value, typeHint, sink); + } + i++; + } + } + + private void writeToBucket(String path, @Nullable Object value, RedisData sink, Class propertyType) { + + if (value == null || (value instanceof Optional && !((Optional) value).isPresent())) { + return; + } + + if (value instanceof byte[]) { + sink.getBucket().put(path, toBytes(value)); + return; + } + + if (customConversions.hasCustomWriteTarget(value.getClass())) { + + Optional> targetType = customConversions.getCustomWriteTarget(value.getClass()); + + if (!propertyType.isPrimitive() && !targetType.filter(it -> ClassUtils.isAssignable(Map.class, it)).isPresent() + && customConversions.isSimpleType(value.getClass()) && value.getClass() != propertyType) { + typeMapper.writeType(value.getClass(), sink.getBucket().getPropertyPath(path)); + } + + if (targetType.filter(it -> ClassUtils.isAssignable(Map.class, it)).isPresent()) { + + Map map = (Map) conversionService.convert(value, targetType.get()); + for (Map.Entry entry : map.entrySet()) { + sink.getBucket().put(path + (StringUtils.hasText(path) ? "." : "") + entry.getKey(), + toBytes(entry.getValue())); + } + } + else if (targetType.filter(it -> ClassUtils.isAssignable(byte[].class, it)).isPresent()) { + sink.getBucket().put(path, toBytes(value)); + } + else { + throw new IllegalArgumentException( + String.format("Cannot convert value '%s' of type %s to bytes", value, value.getClass())); + } + } + } + + @Nullable + private Object readCollectionOrArray(String path, Class collectionType, Class valueType, Bucket bucket) { + + List keys = new ArrayList<>(bucket.extractAllKeysFor(path)); + keys.sort(listKeyComparator); + + boolean isArray = collectionType.isArray(); + Class collectionTypeToUse = isArray ? ArrayList.class : collectionType; + Collection target = CollectionFactory.createCollection(collectionTypeToUse, valueType, keys.size()); + + for (String key : keys) { + + if (typeMapper.isTypeKey(key)) { + continue; + } + + Bucket elementData = bucket.extract(key); + + TypeInformation typeInformation = typeMapper.readType(elementData.getPropertyPath(key), + TypeInformation.of(valueType)); + + Class typeToUse = typeInformation.getType(); + if (conversionService.canConvert(byte[].class, typeToUse)) { + target.add(fromBytes(elementData.get(key), typeToUse)); + } + else { + target.add(readInternal(key, typeToUse, new RedisData(elementData))); + } + } + + return isArray ? toArray(target, collectionType, valueType) : (target.isEmpty() ? null : target); + } + + /** + * @param keyspace + * @param path + * @param mapValueType + * @param source + * @param sink + */ + private void writeMap(@Nullable String keyspace, String path, Class mapValueType, Map source, + RedisData sink) { + + if (CollectionUtils.isEmpty(source)) { + return; + } + + for (Map.Entry entry : source.entrySet()) { + + if (entry.getValue() == null || entry.getKey() == null) { + continue; + } + + String currentPath = path + ".[" + mapMapKey(entry.getKey()) + "]"; + + if (!ClassUtils.isAssignable(mapValueType, entry.getValue().getClass())) { + throw new MappingException( + String.format(INVALID_TYPE_ASSIGNMENT, entry.getValue().getClass(), currentPath, mapValueType)); + } + + if (customConversions.hasCustomWriteTarget(entry.getValue().getClass())) { + writeToBucket(currentPath, entry.getValue(), sink, mapValueType); + } + else { + writeInternal(keyspace, currentPath, entry.getValue(), TypeInformation.of(mapValueType), sink); + } + } + } + + private String mapMapKey(Object key) { + + if (conversionService.canConvert(key.getClass(), byte[].class)) { + return new String(conversionService.convert(key, byte[].class)); + } + + return conversionService.convert(key, String.class); + } + + /** + * @param path + * @param mapType + * @param keyType + * @param valueType + * @param source + * + * @return + */ + @Nullable + private Map readMapOfSimpleTypes(String path, Class mapType, Class keyType, Class valueType, + RedisData source) { + + Bucket partial = source.getBucket().extract(path + ".["); + + Map target = CollectionFactory.createMap(mapType, partial.size()); + + for (Map.Entry entry : partial.entrySet()) { + + if (typeMapper.isTypeKey(entry.getKey())) { + continue; + } + + Object key = extractMapKeyForPath(path, entry.getKey(), keyType); + Class typeToUse = getTypeHint(path + ".[" + key + "]", source.getBucket(), valueType); + target.put(key, fromBytes(entry.getValue(), typeToUse)); + } + + return target.isEmpty() ? null : target; + } + + /** + * @param path + * @param mapType + * @param keyType + * @param valueType + * @param source + * + * @return + */ + @Nullable + private Map readMapOfComplexTypes(String path, Class mapType, Class keyType, Class valueType, + RedisData source) { + + Set keys = source.getBucket().extractAllKeysFor(path); + + Map target = CollectionFactory.createMap(mapType, keys.size()); + + for (String key : keys) { + + Bucket partial = source.getBucket().extract(key); + + Object mapKey = extractMapKeyForPath(path, key, keyType); + + TypeInformation typeInformation = typeMapper.readType(source.getBucket().getPropertyPath(key), + TypeInformation.of(valueType)); + + Object o = readInternal(key, typeInformation.getType(), new RedisData(partial)); + target.put(mapKey, o); + } + + return target.isEmpty() ? null : target; + } + + @Nullable + private Object extractMapKeyForPath(String path, String key, Class targetType) { + + String regex = "^(" + Pattern.quote(path) + "\\.\\[)(.*?)(\\])"; + Pattern pattern = Pattern.compile(regex); + + Matcher matcher = pattern.matcher(key); + if (!matcher.find()) { + throw new IllegalArgumentException( + String.format("Cannot extract map value for key '%s' in path '%s'.", key, path)); + } + + Object mapKey = matcher.group(2); + + if (ClassUtils.isAssignable(targetType, mapKey.getClass())) { + return mapKey; + } + + return conversionService.convert(toBytes(mapKey), targetType); + } + + private Class getTypeHint(String path, Bucket bucket, Class fallback) { + + TypeInformation typeInformation = typeMapper.readType(bucket.getPropertyPath(path), + TypeInformation.of(fallback)); + return typeInformation.getType(); + } + + /** + * Convert given source to binary representation using the underlying {@link ConversionService}. + * + * @param source + * + * @return + * + * @throws ConverterNotFoundException + */ + public byte[] toBytes(Object source) { + + if (source instanceof byte[]) { + return (byte[]) source; + } + + return conversionService.convert(source, byte[].class); + } + + /** + * Convert given binary representation to desired target type using the underlying {@link ConversionService}. + * + * @param source + * @param type new String(source.getBucket().extract("_class").get("_class")) + * + * @return + * + * @throws ConverterNotFoundException + */ + public T fromBytes(byte[] source, Class type) { + + if (type.isInstance(source)) { + return type.cast(source); + } + + return conversionService.convert(source, type); + } + + /** + * Converts a given {@link Collection} into an array considering primitive types. + * + * @param source {@link Collection} of values to be added to the array. + * @param arrayType {@link Class} of array. + * @param valueType to be used for conversion before setting the actual value. + * + * @return + */ + @Nullable + private Object toArray(Collection source, Class arrayType, Class valueType) { + + if (source.isEmpty()) { + return null; + } + + if (!ClassUtils.isPrimitiveArray(arrayType)) { + return source.toArray((Object[]) Array.newInstance(valueType, source.size())); + } + + Object targetArray = Array.newInstance(valueType, source.size()); + Iterator iterator = source.iterator(); + int i = 0; + while (iterator.hasNext()) { + Array.set(targetArray, i, conversionService.convert(iterator.next(), valueType)); + i++; + } + return i > 0 ? targetArray : null; + } + + public void setIndexResolver(IndexResolver indexResolver) { + this.indexResolver = indexResolver; + } + + public void setReferenceResolver(ReferenceResolver referenceResolver) { + this.referenceResolver = referenceResolver; + } + + /** + * Set {@link CustomConversions} to be applied. + * + * @param customConversions + */ + public void setCustomConversions(@Nullable CustomConversions customConversions) { + this.customConversions = customConversions != null ? customConversions : new RedisCustomConversions(); + } + + @Override + public RedisMappingContext getMappingContext() { + return this.mappingContext; + } + + @Nullable + @Override + public IndexResolver getIndexResolver() { + return this.indexResolver; + } + + @Override + public ConversionService getConversionService() { + return this.conversionService; + } + + @Override + public void afterPropertiesSet() { + this.initializeConverters(); + } + + private void initializeConverters() { + customConversions.registerConvertersIn(conversionService); + } + + private static boolean isByteArray(RedisPersistentProperty property) { + return property.getType().equals(byte[].class); + } + + private static boolean isByteArray(TypeInformation type) { + return type.getType().equals(byte[].class); + } + + /** + * @author Christoph Strobl + * @author Mark Paluch + */ + private class ConverterAwareParameterValueProvider implements PropertyValueProvider { + + private final String path; + + private final RedisData source; + + private final ConversionService conversionService; + + ConverterAwareParameterValueProvider(String path, RedisData source, ConversionService conversionService) { + + this.path = path; + this.source = source; + this.conversionService = conversionService; + } + + @Override + @SuppressWarnings("unchecked") + public T getPropertyValue(RedisPersistentProperty property) { + + Object value = readProperty(path, source, property); + + if (value == null || ClassUtils.isAssignableValue(property.getType(), value)) { + return (T) value; + } + + return (T) conversionService.convert(value, property.getType()); + } + } + + private enum NaturalOrderingKeyComparator implements Comparator { + + INSTANCE; + + public int compare(String s1, String s2) { + + int s1offset = 0; + int s2offset = 0; + + while (s1offset < s1.length() && s2offset < s2.length()) { + + Part thisPart = extractPart(s1, s1offset); + Part thatPart = extractPart(s2, s2offset); + + int result = thisPart.compareTo(thatPart); + + if (result != 0) { + return result; + } + + s1offset += thisPart.length(); + s2offset += thatPart.length(); + } + + return 0; + } + + private Part extractPart(String source, int offset) { + + StringBuilder builder = new StringBuilder(); + + char c = source.charAt(offset); + builder.append(c); + + boolean isDigit = Character.isDigit(c); + for (int i = offset + 1; i < source.length(); i++) { + + c = source.charAt(i); + if ((isDigit && !Character.isDigit(c)) || (!isDigit && Character.isDigit(c))) { + break; + } + builder.append(c); + } + + return new Part(builder.toString(), isDigit); + } + + private static class Part implements Comparable { + + private final String rawValue; + + private final @Nullable Long longValue; + + Part(String value, boolean isDigit) { + + this.rawValue = value; + this.longValue = isDigit ? Long.valueOf(value) : null; + } + + boolean isNumeric() { + return longValue != null; + } + + int length() { + return rawValue.length(); + } + + @Override + public int compareTo(Part that) { + + if (this.isNumeric() && that.isNumeric()) { + return this.longValue.compareTo(that.longValue); + } + + return this.rawValue.compareTo(that.rawValue); + } + } + } + + /** + * Value object representing a Redis Hash/Object identifier composed from keyspace and object id in the form of {@literal keyspace:id}. + * + * @author Mark Paluch + * @author Stefan Berger + * @since 1.8.10 + */ + public static class KeyspaceIdentifier { + + public static final String PHANTOM = "phantom"; + + public static final String DELIMITER = ":"; + + public static final String PHANTOM_SUFFIX = DELIMITER + PHANTOM; + + private final String keyspace; + + private final String id; + + private final boolean phantomKey; + + private KeyspaceIdentifier(String keyspace, String id, boolean phantomKey) { + + this.keyspace = keyspace; + this.id = id; + this.phantomKey = phantomKey; + } + + /** + * Parse a {@code key} into {@link KeyspaceIdentifier}. + * + * @param key the key representation. + * + * @return {@link BinaryKeyspaceIdentifier} for binary key. + */ + public static KeyspaceIdentifier of(String key) { + + Assert.isTrue(isValid(key), String.format("Invalid key %s", key)); + + boolean phantomKey = key.endsWith(PHANTOM_SUFFIX); + int keyspaceEndIndex = key.indexOf(DELIMITER); + String keyspace = key.substring(0, keyspaceEndIndex); + String id; + + if (phantomKey) { + id = key.substring(keyspaceEndIndex + 1, key.length() - PHANTOM_SUFFIX.length()); + } + else { + id = key.substring(keyspaceEndIndex + 1); + } + + return new KeyspaceIdentifier(keyspace, id, phantomKey); + } + + /** + * Check whether the {@code key} is valid, in particular whether the key contains a keyspace and an id part in the form of {@literal keyspace:id}. + * + * @param key the key. + * + * @return {@literal true} if the key is valid. + */ + public static boolean isValid(@Nullable String key) { + + if (key == null) { + return false; + } + + int keyspaceEndIndex = key.indexOf(DELIMITER); + + return keyspaceEndIndex > 0 && key.length() > keyspaceEndIndex; + } + + public String getKeyspace() { + return this.keyspace; + } + + public String getId() { + return this.id; + } + + public boolean isPhantomKey() { + return this.phantomKey; + } + } + + /** + * Value object representing a binary Redis Hash/Object identifier composed from keyspace and object id in the form of {@literal keyspace:id}. + * + * @author Mark Paluch + * @author Stefan Berger + * @since 1.8.10 + */ + public static class BinaryKeyspaceIdentifier { + + public static final byte[] PHANTOM = KeyspaceIdentifier.PHANTOM.getBytes(); + + public static final byte DELIMITER = ':'; + + public static final byte[] PHANTOM_SUFFIX = ByteUtils.concat(new byte[]{DELIMITER}, PHANTOM); + + private final byte[] keyspace; + + private final byte[] id; + + private final boolean phantomKey; + + private BinaryKeyspaceIdentifier(byte[] keyspace, byte[] id, boolean phantomKey) { + + this.keyspace = keyspace; + this.id = id; + this.phantomKey = phantomKey; + } + + /** + * Parse a binary {@code key} into {@link BinaryKeyspaceIdentifier}. + * + * @param key the binary key representation. + * + * @return {@link BinaryKeyspaceIdentifier} for binary key. + */ + public static BinaryKeyspaceIdentifier of(byte[] key) { + + Assert.isTrue(isValid(key), String.format("Invalid key %s", new String(key))); + + boolean phantomKey = ByteUtils.startsWith(key, PHANTOM_SUFFIX, key.length - PHANTOM_SUFFIX.length); + + int keyspaceEndIndex = ByteUtils.indexOf(key, DELIMITER); + byte[] keyspace = extractKeyspace(key, keyspaceEndIndex); + byte[] id = extractId(key, phantomKey, keyspaceEndIndex); + + return new BinaryKeyspaceIdentifier(keyspace, id, phantomKey); + } + + /** + * Check whether the {@code key} is valid, in particular whether the key contains a keyspace and an id part in the form of {@literal keyspace:id}. + * + * @param key the key. + * + * @return {@literal true} if the key is valid. + */ + public static boolean isValid(byte[] key) { + + if (key.length == 0) { + return false; + } + + int keyspaceEndIndex = ByteUtils.indexOf(key, DELIMITER); + + return keyspaceEndIndex > 0 && key.length > keyspaceEndIndex; + } + + private static byte[] extractId(byte[] key, boolean phantomKey, int keyspaceEndIndex) { + + int idSize; + + if (phantomKey) { + idSize = (key.length - PHANTOM_SUFFIX.length) - (keyspaceEndIndex + 1); + } + else { + + idSize = key.length - (keyspaceEndIndex + 1); + } + + byte[] id = new byte[idSize]; + System.arraycopy(key, keyspaceEndIndex + 1, id, 0, idSize); + + return id; + } + + private static byte[] extractKeyspace(byte[] key, int keyspaceEndIndex) { + + byte[] keyspace = new byte[keyspaceEndIndex]; + System.arraycopy(key, 0, keyspace, 0, keyspaceEndIndex); + + return keyspace; + } + + public byte[] getKeyspace() { + return this.keyspace; + } + + public byte[] getId() { + return this.id; + } + + public boolean isPhantomKey() { + return this.phantomKey; + } + } +} diff --git a/src/test/java/org/springframework/data/redis/core/ReferenceRedisAdapterIntegrationTests.java b/src/test/java/org/springframework/data/redis/core/ReferenceRedisAdapterIntegrationTests.java new file mode 100644 index 0000000000..8ef5540ba7 --- /dev/null +++ b/src/test/java/org/springframework/data/redis/core/ReferenceRedisAdapterIntegrationTests.java @@ -0,0 +1,132 @@ +package org.springframework.data.redis.core; + +import lombok.Data; +import lombok.experimental.Accessors; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Reference; +import org.springframework.data.redis.connection.RedisConnectionFactory; +import org.springframework.data.redis.connection.lettuce.extension.LettuceConnectionFactoryExtension; +import org.springframework.data.redis.core.convert.*; +import org.springframework.data.redis.core.index.IndexConfiguration; +import org.springframework.data.redis.core.mapping.RedisMappingContext; +import org.springframework.data.redis.hash.ObjectHashMapper; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatNoException; + +/** + * Unit tests for {@link ReferenceMappingRedisConverter} + * + * @author Ilya Viaznin + */ +@ExtendWith(LettuceConnectionFactoryExtension.class) +public class ReferenceRedisAdapterIntegrationTests { + + private final RedisConnectionFactory connectionFactory; + + public ReferenceRedisAdapterIntegrationTests(RedisConnectionFactory connectionFactory) { + this.connectionFactory = connectionFactory; + } + + private ReferenceRedisAdapter adapter; + + private User user; + + private Employee employee; + + @BeforeEach + void setUp() { + var mappingContext = new RedisMappingContext(new MappingConfiguration(new IndexConfiguration(), new KeyspaceConfiguration())); + var converter = new ReferenceMappingRedisConverter(mappingContext); + var hashMapper = new ObjectHashMapper(converter); + var template = new RedisTemplate<>(hashMapper); + adapter = new ReferenceRedisAdapter(template, converter); + + converter.setIndexResolver(new PathIndexResolver(mappingContext)); + converter.setReferenceResolver(new ReferenceResolverImpl(template)); + template.setConnectionFactory(connectionFactory); + + adapter.afterPropertiesSet(); + converter.afterPropertiesSet(); + template.afterPropertiesSet(); + + user = new User(); + employee = new Employee().setUser(user); + user.setEmployee(employee); + } + + @AfterEach + void clean() { + adapter.deleteAllOf(User.class.getName()); + adapter.deleteAllOf(Employee.class.getName()); + } + + @Test + void getRecordWithCyclicReferenceNoStackOverflow() { + user.setId(0L) + .setName("Sam"); + employee.setId(0L); + + adapter.put(user.getId(), user, User.class.getName()); + adapter.put(employee.getId(), employee, Employee.class.getName()); + + assertThatNoException().isThrownBy(() -> adapter.get(user.getId(), User.class.getName(), User.class)); + assertThatNoException().isThrownBy(() -> adapter.get(employee.getId(), Employee.class.getName(), Employee.class)); + } + + @Test + void getCyclicReferenceValuesIsCorrect() { + user.setId(0L) + .setName("Elena"); + employee.setId(0L); + + adapter.put(user.getId(), user, User.class.getName()); + adapter.put(employee.getId(), employee, Employee.class.getName()); + + var userFromRedis = adapter.get(user.getId(), User.class.getName(), User.class); + var employeeFromRedis = adapter.get(employee.getId(), Employee.class.getName(), Employee.class); + + assertThat(userFromRedis).isNotNull(); + assertThat(employeeFromRedis).isNotNull(); + + assertThat(userFromRedis.getId()).isEqualTo(user.getId()); + assertThat(userFromRedis.getName()).isEqualTo(user.getName()); + assertThat(userFromRedis.getEmployee()).isNotNull(); + assertThat(userFromRedis.getEmployee().getId()).isEqualTo(employee.getId()); + + assertThat(employeeFromRedis.getId()).isEqualTo(employee.getId()); + assertThat(employeeFromRedis.getUser()).isNotNull(); + assertThat(employeeFromRedis.getUser().getId()).isEqualTo(user.getId()); + assertThat(employeeFromRedis.getUser().getName()).isEqualTo(user.getName()); + } + + @Data + @Accessors(chain = true) + @RedisHash + static class User { + + @Id + private Long id; + + private String name; + + @Reference + private Employee employee; + } + + @Data + @Accessors(chain = true) + @RedisHash + static class Employee { + + @Id + private Long id; + + @Reference + private User user; + } +} diff --git a/src/test/java/org/springframework/data/redis/core/convert/ReferenceMappingRedisConverterUnitTests.java b/src/test/java/org/springframework/data/redis/core/convert/ReferenceMappingRedisConverterUnitTests.java new file mode 100644 index 0000000000..d85a14db0d --- /dev/null +++ b/src/test/java/org/springframework/data/redis/core/convert/ReferenceMappingRedisConverterUnitTests.java @@ -0,0 +1,2115 @@ +/* + * Copyright 2015-2022 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.redis.core.convert; + +import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.AllArgsConstructor; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledOnJre; +import org.junit.jupiter.api.condition.JRE; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.ReadingConverter; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.redis.core.PartialUpdate; +import org.springframework.data.redis.core.convert.KeyspaceConfiguration.KeyspaceSettings; +import org.springframework.data.redis.core.mapping.RedisMappingContext; +import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer; +import org.springframework.data.redis.test.util.RedisTestData; +import org.springframework.util.StringUtils; + +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.time.*; +import java.util.*; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.when; +import static org.springframework.data.redis.core.convert.ConversionTestEntities.*; + +/** + * Unit tests for {@link MappingRedisConverter}. + * + * @author Christoph Strobl + * @author Greg Turnquist + * @author Mark Paluch + * @author Golam Mazid Sajib + * @author Ilya Viaznin + */ +@ExtendWith(MockitoExtension.class) +class ReferenceMappingRedisConverterUnitTests { + + @Mock ReferenceResolver resolverMock; + + private ReferenceMappingRedisConverter converter; + + private Person rand; + + @BeforeEach + void setUp() { + + converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + converter.afterPropertiesSet(); + + rand = new Person(); + } + + @Test // DATAREDIS-425 + void writeAppendsTypeHintForRootCorrectly() { + assertThat(write(rand)).containingTypeHint("_class", Person.class); + } + + @Test // DATAREDIS-543 + void writeSkipsTypeHintIfConfigured() { + + converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + converter.afterPropertiesSet(); + + assertThat(write(rand)).containingTypeHint("_class", Person.class); + } + + @Test // DATAREDIS-425 + void writeAppendsKeyCorrectly() { + + rand.id = "1"; + + assertThat(write(rand).getId()).isEqualTo("1"); + } + + @Test // DATAREDIS-425 + void writeAppendsKeyCorrectlyWhenThereIsAnAdditionalIdFieldInNestedElement() { + + AddressWithId address = new AddressWithId(); + address.id = "tear"; + address.city = "Tear"; + + rand.id = "1"; + rand.address = address; + + RedisTestData data = write(rand); + + assertThat(data.getId()).isEqualTo("1"); + assertThat(data).containsEntry("address.id", "tear"); + } + + @Test // DATAREDIS-425 + void writeDoesNotAppendPropertiesWithNullValues() { + + rand.firstname = "rand"; + + assertThat(write(rand)).without("lastname"); + } + + @Test // DATAREDIS-425 + void writeDoesNotAppendPropertiesWithEmptyCollections() { + + rand.firstname = "rand"; + + assertThat(write(rand)).without("nicknames"); + } + + @Test // DATAREDIS-425 + void writeAppendsSimpleRootPropertyCorrectly() { + + rand.firstname = "nynaeve"; + + assertThat(write(rand)).containsEntry("firstname", "nynaeve"); + } + + @Test // DATAREDIS-425 + void writeAppendsListOfSimplePropertiesCorrectly() { + + rand.nicknames = Arrays.asList("dragon reborn", "lews therin"); + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("nicknames.[0]", "dragon reborn").containsEntry("nicknames.[1]", "lews therin"); + } + + @Test // DATAREDIS-425 + void writeAppendsComplexObjectCorrectly() { + + Address address = new Address(); + address.city = "two rivers"; + address.country = "andora"; + rand.address = address; + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("address.city", "two rivers").containsEntry("address.country", "andora"); + } + + @Test // DATAREDIS-425 + void writeAppendsListOfComplexObjectsCorrectly() { + + Person mat = new Person(); + mat.firstname = "mat"; + mat.nicknames = Collections.singletonList("prince of the ravens"); + + Person perrin = new Person(); + perrin.firstname = "perrin"; + perrin.address = new Address(); + perrin.address.city = "two rivers"; + + rand.coworkers = Arrays.asList(mat, perrin); + rand.id = UUID.randomUUID().toString(); + rand.firstname = "rand"; + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("coworkers.[0].firstname", "mat") // + .containsEntry("coworkers.[0].nicknames.[0]", "prince of the ravens") // + .containsEntry("coworkers.[1].firstname", "perrin") // + .containsEntry("coworkers.[1].address.city", "two rivers"); + } + + @Test // DATAREDIS-425 + void writeDoesNotAddTypeInformationCorrectlyForMatchingTypes() { + + Address address = new Address(); + address.city = "two rivers"; + + rand.address = address; + + RedisTestData target = write(rand); + + assertThat(target).without("address._class"); + } + + @Test // DATAREDIS-425, DATAREDIS-543 + void writeAddsTypeInformationCorrectlyForNonMatchingTypes() { + + AddressWithPostcode address = new AddressWithPostcode(); + address.city = "two rivers"; + address.postcode = "1234"; + + rand.address = address; + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("address._class", "with-post-code"); + } + + @Test // DATAREDIS-425 + void readConsidersTypeInformationCorrectlyForNonMatchingTypes() { + + Map map = new HashMap<>(); + map.put("address._class", AddressWithPostcode.class.getName()); + map.put("address.postcode", "1234"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.address).isInstanceOf(AddressWithPostcode.class); + } + + @Test // DATAREDIS-544 + void readEntityViaConstructor() { + + Map map = new HashMap<>(); + map.put("id", "bart"); + map.put("firstname", "Bart"); + map.put("lastname", "Simpson"); + + map.put("father.id", "homer"); + map.put("father.firstname", "Homer"); + map.put("father.lastname", "Simpson"); + + RecursiveConstructorPerson target = converter.read(RecursiveConstructorPerson.class, + new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.id).isEqualTo("bart"); + assertThat(target.firstname).isEqualTo("Bart"); + assertThat(target.lastname).isEqualTo("Simpson"); + assertThat(target.father).isNotNull(); + assertThat(target.father.id).isEqualTo("homer"); + assertThat(target.father.firstname).isEqualTo("Homer"); + assertThat(target.father.lastname).isEqualTo("Simpson"); + assertThat(target.father.father).isNull(); + } + + @Test // DATAREDIS-425 + void writeAddsTypeInformationCorrectlyForNonMatchingTypesInCollections() { + + Person mat = new TaVeren(); + mat.firstname = "mat"; + + rand.coworkers = Collections.singletonList(mat); + + RedisTestData target = write(rand); + + assertThat(target).containingTypeHint("coworkers.[0]._class", TaVeren.class); + } + + @Test // DATAREDIS-425 + void readConvertsSimplePropertiesCorrectly() { + + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("firstname", "rand"))); + + assertThat(converter.read(Person.class, rdo).firstname).isEqualTo("rand"); + } + + @Test // DATAREDIS-425 + void readConvertsListOfSimplePropertiesCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("nicknames.[0]", "dragon reborn"); + map.put("nicknames.[1]", "lews therin"); + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + assertThat(converter.read(Person.class, rdo).nicknames).containsExactly("dragon reborn", "lews therin"); + } + + @Test // DATAREDIS-425 + void readConvertsUnorderedListOfSimplePropertiesCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("nicknames.[9]", "car'a'carn"); + map.put("nicknames.[10]", "lews therin"); + map.put("nicknames.[1]", "dragon reborn"); + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + assertThat(converter.read(Person.class, rdo).nicknames).containsExactly("dragon reborn", "car'a'carn", + "lews therin"); + } + + @Test // DATAREDIS-768 + void readConvertsUnorderedListOfSimpleIntegerPropertiesCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("positions.[9]", "0"); + map.put("positions.[10]", "1"); + map.put("positions.[1]", "2"); + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + assertThat(converter.read(Person.class, rdo).positions).containsExactly(2, 0, 1); + } + + @Test // DATAREDIS-425 + void readComplexPropertyCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("address.city", "two rivers"); + map.put("address.country", "andor"); + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + Person target = converter.read(Person.class, rdo); + + assertThat(target.address).isNotNull(); + assertThat(target.address.city).isEqualTo("two rivers"); + assertThat(target.address.country).isEqualTo("andor"); + } + + @Test // DATAREDIS-425 + void readListComplexPropertyCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("coworkers.[0].firstname", "mat"); + map.put("coworkers.[0].nicknames.[0]", "prince of the ravens"); + map.put("coworkers.[0].nicknames.[1]", "gambler"); + map.put("coworkers.[1].firstname", "perrin"); + map.put("coworkers.[1].address.city", "two rivers"); + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + Person target = converter.read(Person.class, rdo); + + assertThat(target.coworkers).isNotNull(); + assertThat(target.coworkers.get(0).firstname).isEqualTo("mat"); + assertThat(target.coworkers.get(0).nicknames).isNotNull(); + assertThat(target.coworkers.get(0).nicknames.get(0)).isEqualTo("prince of the ravens"); + assertThat(target.coworkers.get(0).nicknames.get(1)).isEqualTo("gambler"); + + assertThat(target.coworkers.get(1).firstname).isEqualTo("perrin"); + assertThat(target.coworkers.get(1).address.city).isEqualTo("two rivers"); + } + + @Test // DATAREDIS-425 + void readUnorderedListOfComplexPropertyCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("coworkers.[10].firstname", "perrin"); + map.put("coworkers.[10].address.city", "two rivers"); + map.put("coworkers.[1].firstname", "mat"); + map.put("coworkers.[1].nicknames.[1]", "gambler"); + map.put("coworkers.[1].nicknames.[0]", "prince of the ravens"); + + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + Person target = converter.read(Person.class, rdo); + + assertThat(target.coworkers).isNotNull(); + assertThat(target.coworkers.get(0).firstname).isEqualTo("mat"); + assertThat(target.coworkers.get(0).nicknames).isNotNull(); + assertThat(target.coworkers.get(0).nicknames.get(0)).isEqualTo("prince of the ravens"); + assertThat(target.coworkers.get(0).nicknames.get(1)).isEqualTo("gambler"); + + assertThat(target.coworkers.get(1).firstname).isEqualTo("perrin"); + assertThat(target.coworkers.get(1).address.city).isEqualTo("two rivers"); + } + + @Test // DATAREDIS-425 + void readListComplexPropertyCorrectlyAndConsidersTypeInformation() { + + Map map = new LinkedHashMap<>(); + map.put("coworkers.[0]._class", TaVeren.class.getName()); + map.put("coworkers.[0].firstname", "mat"); + + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + Person target = converter.read(Person.class, rdo); + + assertThat(target.coworkers).isNotNull(); + assertThat(target.coworkers.get(0)).isInstanceOf(TaVeren.class); + assertThat(target.coworkers.get(0).firstname).isEqualTo("mat"); + } + + @Test // DATAREDIS-425 + void writeAppendsMapWithSimpleKeyCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("hair-color", "red"); + map.put("eye-color", "grey"); + + rand.physicalAttributes = map; + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("physicalAttributes.[hair-color]", "red") // + .containsEntry("physicalAttributes.[eye-color]", "grey"); + } + + @Test // DATAREDIS-425 + void writeAppendsMapWithSimpleKeyOnNestedObjectCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("hair-color", "red"); + map.put("eye-color", "grey"); + + rand.coworkers = new ArrayList<>(); + rand.coworkers.add(new Person()); + rand.coworkers.get(0).physicalAttributes = map; + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("coworkers.[0].physicalAttributes.[hair-color]", "red") // + .containsEntry("coworkers.[0].physicalAttributes.[eye-color]", "grey"); + } + + @Test // DATAREDIS-425 + void readSimpleMapValuesCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("physicalAttributes.[hair-color]", "red"); + map.put("physicalAttributes.[eye-color]", "grey"); + + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + Person target = converter.read(Person.class, rdo); + + assertThat(target.physicalAttributes).isNotNull(); + assertThat(target.physicalAttributes.get("hair-color")).isEqualTo("red"); + assertThat(target.physicalAttributes.get("eye-color")).isEqualTo("grey"); + } + + @Test // DATAREDIS-768 + void readSimpleIntegerMapValuesCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("integerMapKeyMapping.[1]", "2"); + map.put("integerMapKeyMapping.[3]", "4"); + + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + TypeWithMaps target = converter.read(TypeWithMaps.class, rdo); + + assertThat(target.integerMapKeyMapping).isNotNull(); + assertThat(target.integerMapKeyMapping.get(1)).isEqualTo(2); + assertThat(target.integerMapKeyMapping.get(3)).isEqualTo(4); + } + + @Test // DATAREDIS-768 + void readMapWithDecimalMapKeyCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("decimalMapKeyMapping.[1.7]", "2"); + map.put("decimalMapKeyMapping.[3.1]", "4"); + + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + TypeWithMaps target = converter.read(TypeWithMaps.class, rdo); + + assertThat(target.decimalMapKeyMapping).isNotNull(); + assertThat(target.decimalMapKeyMapping.get(1.7D)).isEqualTo("2"); + assertThat(target.decimalMapKeyMapping.get(3.1D)).isEqualTo("4"); + } + + @Test // DATAREDIS-768 + void writeMapWithDecimalMapKeyCorrectly() { + + TypeWithMaps source = new TypeWithMaps(); + source.decimalMapKeyMapping = new LinkedHashMap<>(); + source.decimalMapKeyMapping.put(1.7D, "2"); + source.decimalMapKeyMapping.put(3.1D, "4"); + + RedisTestData target = write(source); + + assertThat(target).containsEntry("decimalMapKeyMapping.[1.7]", "2") // + .containsEntry("decimalMapKeyMapping.[3.1]", "4"); + } + + @Test // DATAREDIS-768 + void readMapWithDateMapKeyCorrectly() { + + Date judgmentDay = Date.from(Instant.parse("1979-08-29T12:00:00Z")); + + Map map = new LinkedHashMap<>(); + map.put("dateMapKeyMapping.[" + judgmentDay.getTime() + "]", "skynet"); + + RedisData rdo = new RedisData(Bucket.newBucketFromStringMap(map)); + + TypeWithMaps target = converter.read(TypeWithMaps.class, rdo); + + assertThat(target.dateMapKeyMapping).isNotNull(); + assertThat(target.dateMapKeyMapping.get(judgmentDay)).isEqualTo("skynet"); + } + + @Test // DATAREDIS-768 + void writeMapWithDateMapKeyCorrectly() { + + Date judgmentDay = Date.from(Instant.parse("1979-08-29T12:00:00Z")); + + TypeWithMaps source = new TypeWithMaps(); + source.dateMapKeyMapping = Collections.singletonMap(judgmentDay, "skynet"); + + assertThat(write(source)).containsEntry("dateMapKeyMapping.[" + judgmentDay.getTime() + "]", "skynet"); + } + + @Test // DATAREDIS-425 + void writeAppendsMapWithComplexObjectsCorrectly() { + + Map map = new LinkedHashMap<>(); + Person janduin = new Person(); + janduin.firstname = "janduin"; + map.put("father", janduin); + Person tam = new Person(); + tam.firstname = "tam"; + map.put("step-father", tam); + + rand.relatives = map; + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("relatives.[father].firstname", "janduin") // + .containsEntry("relatives.[step-father].firstname", "tam"); + } + + @Test // DATAREDIS-425 + void readMapWithComplexObjectsCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("relatives.[father].firstname", "janduin"); + map.put("relatives.[step-father].firstname", "tam"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.relatives).isNotNull(); + assertThat(target.relatives.get("father")).isNotNull(); + assertThat(target.relatives.get("father").firstname).isEqualTo("janduin"); + assertThat(target.relatives.get("step-father")).isNotNull(); + assertThat(target.relatives.get("step-father").firstname).isEqualTo("tam"); + } + + @Test // DATAREDIS-768 + void readMapWithIntegerKeysAndComplexObjectsCorrectly() { + + Map map = new LinkedHashMap<>(); + map.put("favoredRelatives.[1].firstname", "janduin"); + map.put("favoredRelatives.[2].firstname", "tam"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.favoredRelatives).isNotNull(); + assertThat(target.favoredRelatives.get(1)).isNotNull(); + assertThat(target.favoredRelatives.get(1).firstname).isEqualTo("janduin"); + assertThat(target.favoredRelatives.get(2)).isNotNull(); + assertThat(target.favoredRelatives.get(2).firstname).isEqualTo("tam"); + } + + @Test // DATAREDIS-425 + void writeAppendsTypeInformationCorrectlyForMapWithComplexObjects() { + + Map map = new LinkedHashMap<>(); + Person lews = new TaVeren(); + lews.firstname = "lews"; + map.put("previous-incarnation", lews); + + rand.relatives = map; + + RedisTestData target = write(rand); + + assertThat(target).containingTypeHint("relatives.[previous-incarnation]._class", TaVeren.class); + } + + @Test // DATAREDIS-425 + void readConsidersTypeInformationCorrectlyForMapWithComplexObjects() { + + Map map = new LinkedHashMap<>(); + map.put("relatives.[previous-incarnation]._class", TaVeren.class.getName()); + map.put("relatives.[previous-incarnation].firstname", "lews"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.relatives.get("previous-incarnation")).isNotNull(); + assertThat(target.relatives.get("previous-incarnation")).isInstanceOf(TaVeren.class); + assertThat(target.relatives.get("previous-incarnation").firstname).isEqualTo("lews"); + } + + @Test // DATAREDIS-425 + void writesIntegerValuesCorrectly() { + + rand.age = 20; + + assertThat(write(rand)).containsEntry("age", "20"); + } + + @Test // DATAREDIS-425 + void writesLocalDateTimeValuesCorrectly() { + + rand.localDateTime = LocalDateTime.parse("2016-02-19T10:18:01"); + + assertThat(write(rand)).containsEntry("localDateTime", "2016-02-19T10:18:01"); + } + + @Test // DATAREDIS-425 + void readsLocalDateTimeValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("localDateTime", "2016-02-19T10:18:01")))); + + assertThat(target.localDateTime).isEqualTo(LocalDateTime.parse("2016-02-19T10:18:01")); + } + + @Test // DATAREDIS-425 + void writesLocalDateValuesCorrectly() { + + rand.localDate = LocalDate.parse("2016-02-19"); + + assertThat(write(rand)).containsEntry("localDate", "2016-02-19"); + } + + @Test // DATAREDIS-425 + void readsLocalDateValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("localDate", "2016-02-19")))); + + assertThat(target.localDate).isEqualTo(LocalDate.parse("2016-02-19")); + } + + @Test // DATAREDIS-425 + void writesLocalTimeValuesCorrectly() { + + rand.localTime = LocalTime.parse("11:12:13"); + + assertThat(write(rand)).containsEntry("localTime", "11:12:13"); + } + + @Test // DATAREDIS-425 + void readsLocalTimeValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("localTime", "11:12")))); + + assertThat(target.localTime).isEqualTo(LocalTime.parse("11:12:00")); + } + + @Test // DATAREDIS-425 + void writesZonedDateTimeValuesCorrectly() { + + rand.zonedDateTime = ZonedDateTime.parse("2007-12-03T10:15:30+01:00[Europe/Paris]"); + + assertThat(write(rand)).containsEntry("zonedDateTime", "2007-12-03T10:15:30+01:00[Europe/Paris]"); + } + + @Test // DATAREDIS-425 + void readsZonedDateTimeValuesCorrectly() { + + Person target = converter.read(Person.class, new RedisData(Bucket + .newBucketFromStringMap(Collections.singletonMap("zonedDateTime", "2007-12-03T10:15:30+01:00[Europe/Paris]")))); + + assertThat(target.zonedDateTime).isEqualTo(ZonedDateTime.parse("2007-12-03T10:15:30+01:00[Europe/Paris]")); + } + + @Test // DATAREDIS-425 + void writesInstantValuesCorrectly() { + + rand.instant = Instant.parse("2007-12-03T10:15:30.01Z"); + + assertThat(write(rand)).containsEntry("instant", "2007-12-03T10:15:30.010Z"); + } + + @Test // DATAREDIS-425 + void readsInstantValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("instant", "2007-12-03T10:15:30.01Z")))); + + assertThat(target.instant).isEqualTo(Instant.parse("2007-12-03T10:15:30.01Z")); + } + + @Test // DATAREDIS-425 + void writesZoneIdValuesCorrectly() { + + rand.zoneId = ZoneId.of("Europe/Paris"); + + assertThat(write(rand)).containsEntry("zoneId", "Europe/Paris"); + } + + @Test // DATAREDIS-425, GH-2307 + void readsZoneIdValuesCorrectly() { + + Map map = new HashMap<>(); + map.put("zoneId", "Europe/Paris"); + map.put("zoneId._class", "java.time.ZoneRegion"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.zoneId).isEqualTo(ZoneId.of("Europe/Paris")); + } + + @Test // DATAREDIS-425 + void writesDurationValuesCorrectly() { + + rand.duration = Duration.parse("P2DT3H4M"); + + assertThat(write(rand)).containsEntry("duration", "PT51H4M"); + } + + @Test // DATAREDIS-425 + void readsDurationValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("duration", "PT51H4M")))); + + assertThat(target.duration).isEqualTo(Duration.parse("P2DT3H4M")); + } + + @Test // DATAREDIS-425 + void writesPeriodValuesCorrectly() { + + rand.period = Period.parse("P1Y2M25D"); + + assertThat(write(rand)).containsEntry("period", "P1Y2M25D"); + } + + @Test // DATAREDIS-425 + void readsPeriodValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("period", "P1Y2M25D")))); + + assertThat(target.period).isEqualTo(Period.parse("P1Y2M25D")); + } + + @Test // DATAREDIS-425, DATAREDIS-593 + void writesEnumValuesCorrectly() { + + rand.gender = Gender.FEMALE; + + assertThat(write(rand)).containsEntry("gender", "FEMALE"); + } + + @Test // DATAREDIS-425, DATAREDIS-593 + void readsEnumValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("gender", "FEMALE")))); + + assertThat(target.gender).isEqualTo(Gender.FEMALE); + } + + @Test // DATAREDIS-425 + void writesBooleanValuesCorrectly() { + + rand.alive = Boolean.TRUE; + + assertThat(write(rand)).containsEntry("alive", "1"); + } + + @Test // DATAREDIS-425 + void readsBooleanValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("alive", "1")))); + + assertThat(target.alive).isEqualTo(Boolean.TRUE); + } + + @Test // DATAREDIS-425 + void readsStringBooleanValuesCorrectly() { + + Person target = converter.read(Person.class, + new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("alive", "true")))); + + assertThat(target.alive).isEqualTo(Boolean.TRUE); + } + + @Test // DATAREDIS-425 + void writesDateValuesCorrectly() { + + Calendar cal = Calendar.getInstance(); + cal.set(1978, Calendar.NOVEMBER, 25); + + rand.birthdate = cal.getTime(); + + assertThat(write(rand)).containsEntry("birthdate", rand.birthdate); + } + + @Test // DATAREDIS-425 + void readsDateValuesCorrectly() { + + Calendar cal = Calendar.getInstance(); + cal.set(1978, Calendar.NOVEMBER, 25); + + Date date = cal.getTime(); + + Person target = converter.read(Person.class, new RedisData( + Bucket.newBucketFromStringMap(Collections.singletonMap("birthdate", Long.valueOf(date.getTime()).toString())))); + + assertThat(target.birthdate).isEqualTo(date); + } + + @Test // DATAREDIS-425 + void writeSingleReferenceOnRootCorrectly() { + + Location location = new Location(); + location.id = "1"; + location.name = "tar valon"; + + rand.location = location; + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("location", "locations:1") // + .without("location.id") // + .without("location.name"); + } + + @Test // DATAREDIS-425 + void readLoadsReferenceDataOnRootCorrectly() { + + Location location = new Location(); + location.id = "1"; + location.name = "tar valon"; + + Map locationMap = new LinkedHashMap<>(); + locationMap.put("id", location.id); + locationMap.put("name", location.name); + + when(resolverMock.resolveReference(eq("1"), eq("locations"))) + .thenReturn(Bucket.newBucketFromStringMap(locationMap).rawMap()); + + Map map = new LinkedHashMap<>(); + map.put("location", "locations:1"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.location).isEqualTo(location); + } + + @Test // DATAREDIS-425 + void writeSingleReferenceOnNestedElementCorrectly() { + + Location location = new Location(); + location.id = "1"; + location.name = "tar valon"; + + Person egwene = new Person(); + egwene.location = location; + + rand.coworkers = Collections.singletonList(egwene); + + assertThat(write(rand)).containsEntry("coworkers.[0].location", "locations:1") // + .without("coworkers.[0].location.id") // + .without("coworkers.[0].location.name"); + } + + @Test // DATAREDIS-425 + void readLoadsReferenceDataOnNestedElementCorrectly() { + + Location location = new Location(); + location.id = "1"; + location.name = "tar valon"; + + Map locationMap = new LinkedHashMap<>(); + locationMap.put("id", location.id); + locationMap.put("name", location.name); + + when(resolverMock.resolveReference(eq("1"), eq("locations"))) + .thenReturn(Bucket.newBucketFromStringMap(locationMap).rawMap()); + + Map map = new LinkedHashMap<>(); + map.put("coworkers.[0].location", "locations:1"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.coworkers.get(0).location).isEqualTo(location); + } + + @Test // DATAREDIS-425 + void writeListOfReferencesOnRootCorrectly() { + + Location tarValon = new Location(); + tarValon.id = "1"; + tarValon.name = "tar valon"; + + Location falme = new Location(); + falme.id = "2"; + falme.name = "falme"; + + Location tear = new Location(); + tear.id = "3"; + tear.name = "city of tear"; + + rand.visited = Arrays.asList(tarValon, falme, tear); + + RedisTestData target = write(rand); + + assertThat(target).containsEntry("visited.[0]", "locations:1") // + .containsEntry("visited.[1]", "locations:2") // + .containsEntry("visited.[2]", "locations:3"); + } + + @Test // DATAREDIS-425 + void readLoadsListOfReferencesOnRootCorrectly() { + + Location tarValon = new Location(); + tarValon.id = "1"; + tarValon.name = "tar valon"; + + Location falme = new Location(); + falme.id = "2"; + falme.name = "falme"; + + Location tear = new Location(); + tear.id = "3"; + tear.name = "city of tear"; + + Map tarValonMap = new LinkedHashMap<>(); + tarValonMap.put("id", tarValon.id); + tarValonMap.put("name", tarValon.name); + + Map falmeMap = new LinkedHashMap<>(); + falmeMap.put("id", falme.id); + falmeMap.put("name", falme.name); + + Map tearMap = new LinkedHashMap<>(); + tearMap.put("id", tear.id); + tearMap.put("name", tear.name); + + Bucket.newBucketFromStringMap(tearMap).rawMap(); + + when(resolverMock.resolveReference(eq("1"), eq("locations"))) + .thenReturn(Bucket.newBucketFromStringMap(tarValonMap).rawMap()); + when(resolverMock.resolveReference(eq("2"), eq("locations"))) + .thenReturn(Bucket.newBucketFromStringMap(falmeMap).rawMap()); + when(resolverMock.resolveReference(eq("3"), eq("locations"))) + .thenReturn(Bucket.newBucketFromStringMap(tearMap).rawMap()); + + Map map = new LinkedHashMap<>(); + map.put("visited.[0]", "locations:1"); + map.put("visited.[1]", "locations:2"); + map.put("visited.[2]", "locations:3"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.visited.get(0)).isEqualTo(tarValon); + assertThat(target.visited.get(1)).isEqualTo(falme); + assertThat(target.visited.get(2)).isEqualTo(tear); + } + + @Test // DATAREDIS-425 + void writeSetsAnnotatedTimeToLiveCorrectly() { + + ExpiringPerson birgitte = new ExpiringPerson(); + birgitte.id = "birgitte"; + birgitte.name = "Birgitte Silverbow"; + + assertThat(write(birgitte).getRedisData().getTimeToLive()).isEqualTo(5L); + } + + @Test // DATAREDIS-425 + void writeDoesNotTTLWhenNotPresent() { + + Location tear = new Location(); + tear.id = "tear"; + tear.name = "Tear"; + + assertThat(write(tear).getRedisData().getTimeToLive()).isNull(); + } + + @Test // DATAREDIS-425 + void writeShouldConsiderKeyspaceConfiguration() { + + this.converter.getMappingContext().getMappingConfiguration().getKeyspaceConfiguration() + .addKeyspaceSettings(new KeyspaceSettings(Address.class, "o_O")); + + Address address = new Address(); + address.city = "Tear"; + + assertThat(write(address).getRedisData().getKeyspace()).isEqualTo("o_O"); + } + + @Test // DATAREDIS-425 + void writeShouldConsiderTimeToLiveConfiguration() { + + KeyspaceSettings assignment = new KeyspaceSettings(Address.class, "o_O"); + assignment.setTimeToLive(5L); + + this.converter.getMappingContext().getMappingConfiguration().getKeyspaceConfiguration() + .addKeyspaceSettings(assignment); + + Address address = new Address(); + address.city = "Tear"; + + assertThat(write(address).getRedisData().getTimeToLive()).isEqualTo(5L); + } + + @Test // DATAREDIS-425, DATAREDIS-634 + void writeShouldHonorCustomConversionOnRootType() { + + RedisCustomConversions customConversions = new RedisCustomConversions( + Collections.singletonList(new AddressToBytesConverter())); + + RedisMappingContext mappingContext = new RedisMappingContext(); + mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); + + this.converter = new ReferenceMappingRedisConverter(mappingContext, null, resolverMock); + this.converter.setCustomConversions(customConversions); + this.converter.afterPropertiesSet(); + + Address address = new Address(); + address.country = "Tel'aran'rhiod"; + address.city = "unknown"; + + assertThat(write(address)).containsEntry("_raw", "{\"city\":\"unknown\",\"country\":\"Tel'aran'rhiod\"}"); + } + + @Test // DATAREDIS-425, DATAREDIS-634 + void writeShouldHonorCustomConversionOnNestedType() { + + RedisCustomConversions customConversions = new RedisCustomConversions( + Collections.singletonList(new AddressToBytesConverter())); + + RedisMappingContext mappingContext = new RedisMappingContext(); + mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); + + this.converter = new ReferenceMappingRedisConverter(mappingContext, null, resolverMock); + this.converter.setCustomConversions(customConversions); + this.converter.afterPropertiesSet(); + + Address address = new Address(); + address.country = "Tel'aran'rhiod"; + address.city = "unknown"; + rand.address = address; + + assertThat(write(rand)).containsEntry("address", "{\"city\":\"unknown\",\"country\":\"Tel'aran'rhiod\"}"); + } + + @Test // DATAREDIS-425 + void writeShouldHonorIndexOnCustomConversionForNestedType() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new AddressToBytesConverter()))); + this.converter.afterPropertiesSet(); + + Address address = new Address(); + address.country = "andor"; + rand.address = address; + + assertThat(write(rand).getRedisData().getIndexedData()) + .contains(new SimpleIndexedPropertyValue(KEYSPACE_PERSON, "address.country", "andor")); + } + + @Test // DATAREDIS-425 + void writeShouldHonorIndexAnnotationsOnWhenCustomConversionOnNestedype() { + + this.converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new AddressToBytesConverter()))); + this.converter.afterPropertiesSet(); + + Address address = new Address(); + address.country = "Tel'aran'rhiod"; + address.city = "unknown"; + rand.address = address; + + assertThat(write(rand).getRedisData().getIndexedData().isEmpty()).isFalse(); + } + + @Test // DATAREDIS-425 + void readShouldHonorCustomConversionOnRootType() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new BytesToAddressConverter()))); + this.converter.afterPropertiesSet(); + + Map map = new LinkedHashMap<>(); + map.put("_raw", "{\"city\":\"unknown\",\"country\":\"Tel'aran'rhiod\"}"); + + Address target = converter.read(Address.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.city).isEqualTo("unknown"); + assertThat(target.country).isEqualTo("Tel'aran'rhiod"); + } + + @Test // DATAREDIS-425 + void readShouldHonorCustomConversionOnNestedType() { + + this.converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new BytesToAddressConverter()))); + this.converter.afterPropertiesSet(); + + Map map = new LinkedHashMap<>(); + map.put("address", "{\"city\":\"unknown\",\"country\":\"Tel'aran'rhiod\"}"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.address).isNotNull(); + assertThat(target.address.city).isEqualTo("unknown"); + assertThat(target.address.country).isEqualTo("Tel'aran'rhiod"); + } + + @Test // DATAREDIS-544 + void readShouldHonorCustomConversionOnNestedTypeViaConstructorCreation() { + + this.converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new BytesToAddressConverter()))); + this.converter.afterPropertiesSet(); + + Map map = new LinkedHashMap<>(); + map.put("address", "{\"city\":\"unknown\",\"country\":\"Tel'aran'rhiod\"}"); + + PersonWithConstructorAndAddress target = converter.read(PersonWithConstructorAndAddress.class, + new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target.address).isNotNull(); + assertThat(target.address.city).isEqualTo("unknown"); + assertThat(target.address.country).isEqualTo("Tel'aran'rhiod"); + } + + @Test // DATAREDIS-425 + void writeShouldPickUpTimeToLiveFromPropertyIfPresent() { + + ExipringPersonWithExplicitProperty aviendha = new ExipringPersonWithExplicitProperty(); + aviendha.id = "aviendha"; + aviendha.ttl = 2L; + + assertThat(write(aviendha).getRedisData().getTimeToLive()).isEqualTo(120L); + } + + @Test // DATAREDIS-425 + void writeShouldUseDefaultTimeToLiveIfPropertyIsPresentButNull() { + + ExipringPersonWithExplicitProperty aviendha = new ExipringPersonWithExplicitProperty(); + aviendha.id = "aviendha"; + + assertThat(write(aviendha).getRedisData().getTimeToLive()).isEqualTo(5L); + } + + @Test // DATAREDIS-425 + void writeShouldConsiderMapConvertersForRootType() { + + this.converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new SpeciesToMapConverter()))); + this.converter.afterPropertiesSet(); + + Species myrddraal = new Species(); + myrddraal.name = "myrddraal"; + myrddraal.alsoKnownAs = Arrays.asList("halfmen", "fades", "neverborn"); + + assertThat(write(myrddraal)).containsEntry("species-name", "myrddraal").containsEntry("species-nicknames", + "halfmen,fades,neverborn"); + } + + @Test // DATAREDIS-425 + void writeShouldConsiderMapConvertersForNestedType() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new SpeciesToMapConverter()))); + this.converter.afterPropertiesSet(); + + rand.species = new Species(); + rand.species.name = "human"; + + assertThat(write(rand)).containsEntry("species.species-name", "human"); + } + + @Test // DATAREDIS-425 + void readShouldConsiderMapConvertersForRootType() { + + this.converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new MapToSpeciesConverter()))); + this.converter.afterPropertiesSet(); + Map map = new LinkedHashMap<>(); + map.put("species-name", "trolloc"); + + Species target = converter.read(Species.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target).isNotNull(); + assertThat(target.name).isEqualTo("trolloc"); + } + + @Test // DATAREDIS-425 + void readShouldConsiderMapConvertersForNestedType() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new MapToSpeciesConverter()))); + this.converter.afterPropertiesSet(); + + Map map = new LinkedHashMap<>(); + map.put("species.species-name", "trolloc"); + + Person target = converter.read(Person.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target).isNotNull(); + assertThat(target.species.name).isEqualTo("trolloc"); + } + + @Test // DATAREDIS-425 + void writeShouldConsiderMapConvertersInsideLists() { + + this.converter = new ReferenceMappingRedisConverter(new RedisMappingContext(), null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new SpeciesToMapConverter()))); + this.converter.afterPropertiesSet(); + + TheWheelOfTime twot = new TheWheelOfTime(); + twot.species = new ArrayList<>(); + + Species myrddraal = new Species(); + myrddraal.name = "myrddraal"; + myrddraal.alsoKnownAs = Arrays.asList("halfmen", "fades", "neverborn"); + twot.species.add(myrddraal); + + assertThat(write(twot)).containsEntry("species.[0].species-name", "myrddraal") + .containsEntry("species.[0].species-nicknames", "halfmen,fades,neverborn"); + } + + @Test // DATAREDIS-425 + void readShouldConsiderMapConvertersForValuesInList() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new MapToSpeciesConverter()))); + this.converter.afterPropertiesSet(); + + Map map = new LinkedHashMap<>(); + map.put("species.[0].species-name", "trolloc"); + + TheWheelOfTime target = converter.read(TheWheelOfTime.class, new RedisData(Bucket.newBucketFromStringMap(map))); + + assertThat(target).isNotNull(); + assertThat(target.species).isNotNull(); + assertThat(target.species.get(0)).isNotNull(); + assertThat(target.species.get(0).name).isEqualTo("trolloc"); + } + + @Test // DATAREDIS-492 + void writeHandlesArraysOfSimpleTypeProperly() { + + WithArrays source = new WithArrays(); + source.arrayOfSimpleTypes = new String[] { "rand", "mat", "perrin" }; + + assertThat(write(source)).containsEntry("arrayOfSimpleTypes.[0]", "rand") + .containsEntry("arrayOfSimpleTypes.[1]", "mat").containsEntry("arrayOfSimpleTypes.[2]", "perrin"); + } + + @Test // DATAREDIS-492 + void readHandlesArraysOfSimpleTypeProperly() { + + Map source = new LinkedHashMap<>(); + source.put("arrayOfSimpleTypes.[0]", "rand"); + source.put("arrayOfSimpleTypes.[1]", "mat"); + source.put("arrayOfSimpleTypes.[2]", "perrin"); + + WithArrays target = read(WithArrays.class, source); + + assertThat(target.arrayOfSimpleTypes).isEqualTo(new String[] { "rand", "mat", "perrin" }); + } + + @Test // GH-1981 + void readHandlesByteArrays() { + + Map source = new LinkedHashMap<>(); + source.put("avatar", "foo-bar-baz"); + source.put("otherAvatar", "foo-bar-baz"); + + WithArrays target = read(WithArrays.class, source); + + assertThat(target.avatar).isEqualTo("foo-bar-baz".getBytes()); + } + + @Test // GH-1981 + void writeHandlesByteArrays() { + + WithArrays withArrays = new WithArrays(); + withArrays.avatar = "foo-bar-baz".getBytes(); + + assertThat(write(withArrays)).containsEntry("avatar", "foo-bar-baz"); + } + + @Test // GH-1981 + void readHandlesByteArraysUsingCollectionRepresentation() { + + Map source = new LinkedHashMap<>(); + source.put("avatar.[0]", "102"); + source.put("avatar.[1]", "111"); + source.put("avatar.[2]", "111"); + + WithArrays target = read(WithArrays.class, source); + + assertThat(target.avatar).isEqualTo("foo".getBytes()); + } + + @Test // DATAREDIS-492 + void writeHandlesArraysOfComplexTypeProperly() { + + WithArrays source = new WithArrays(); + + Species trolloc = new Species(); + trolloc.name = "trolloc"; + + Species myrddraal = new Species(); + myrddraal.name = "myrddraal"; + myrddraal.alsoKnownAs = Arrays.asList("halfmen", "fades", "neverborn"); + + source.arrayOfCompexTypes = new Species[] { trolloc, myrddraal }; + + assertThat(write(source)).containsEntry("arrayOfCompexTypes.[0].name", "trolloc") // + .containsEntry("arrayOfCompexTypes.[1].name", "myrddraal") // + .containsEntry("arrayOfCompexTypes.[1].alsoKnownAs.[0]", "halfmen") // + .containsEntry("arrayOfCompexTypes.[1].alsoKnownAs.[1]", "fades") // + .containsEntry("arrayOfCompexTypes.[1].alsoKnownAs.[2]", "neverborn"); + } + + @Test // DATAREDIS-492 + void readHandlesArraysOfComplexTypeProperly() { + + Map source = new LinkedHashMap<>(); + source.put("arrayOfCompexTypes.[0].name", "trolloc"); + source.put("arrayOfCompexTypes.[1].name", "myrddraal"); + source.put("arrayOfCompexTypes.[1].alsoKnownAs.[0]", "halfmen"); + source.put("arrayOfCompexTypes.[1].alsoKnownAs.[1]", "fades"); + source.put("arrayOfCompexTypes.[1].alsoKnownAs.[2]", "neverborn"); + + WithArrays target = read(WithArrays.class, source); + + assertThat(target.arrayOfCompexTypes[0]).isNotNull(); + assertThat(target.arrayOfCompexTypes[0].name).isEqualTo("trolloc"); + assertThat(target.arrayOfCompexTypes[1]).isNotNull(); + assertThat(target.arrayOfCompexTypes[1].name).isEqualTo("myrddraal"); + assertThat(target.arrayOfCompexTypes[1].alsoKnownAs).containsExactly("halfmen", "fades", "neverborn"); + } + + @Test // DATAREDIS-489 + void writeHandlesArraysOfObjectTypeProperly() { + + Species trolloc = new Species(); + trolloc.name = "trolloc"; + + WithArrays source = new WithArrays(); + source.arrayOfObject = new Object[] { "rand", trolloc, 100L }; + + assertThat(write(source)).containsEntry("arrayOfObject.[0]", "rand") // + .containsEntry("arrayOfObject.[0]._class", "java.lang.String") + .containsEntry("arrayOfObject.[1]._class", Species.class.getName()) // + .containsEntry("arrayOfObject.[1].name", "trolloc") // + .containsEntry("arrayOfObject.[2]._class", "java.lang.Long") // + .containsEntry("arrayOfObject.[2]", "100"); + } + + @Test // DATAREDIS-489 + void readHandlesArraysOfObjectTypeProperly() { + + Map source = new LinkedHashMap<>(); + source.put("arrayOfObject.[0]", "rand"); + source.put("arrayOfObject.[0]._class", "java.lang.String"); + source.put("arrayOfObject.[1]._class", Species.class.getName()); + source.put("arrayOfObject.[1].name", "trolloc"); + source.put("arrayOfObject.[2]._class", "java.lang.Long"); + source.put("arrayOfObject.[2]", "100"); + + WithArrays target = read(WithArrays.class, source); + + assertThat(target.arrayOfObject[0]).isNotNull(); + assertThat(target.arrayOfObject[0]).isInstanceOf(String.class); + assertThat(target.arrayOfObject[1]).isNotNull(); + assertThat(target.arrayOfObject[1]).isInstanceOf(Species.class); + assertThat(target.arrayOfObject[2]).isNotNull(); + assertThat(target.arrayOfObject[2]).isInstanceOf(Long.class); + } + + @Test // DATAREDIS-489 + void writeShouldAppendTyeHintToObjectPropertyValueTypesCorrectly() { + + TypeWithObjectValueTypes sample = new TypeWithObjectValueTypes(); + sample.object = "bar"; + + RedisTestData bucket = write(sample); + + assertThat(bucket).containsEntry("object", "bar").containsEntry("object._class", "java.lang.String"); + } + + @Test // DATAREDIS-489 + void shouldWriteReadObjectPropertyValueTypeCorrectly() { + + TypeWithObjectValueTypes di = new TypeWithObjectValueTypes(); + di.object = "foo"; + + RedisTestData rd = write(di); + + TypeWithObjectValueTypes result = converter.read(TypeWithObjectValueTypes.class, rd.getRedisData()); + assertThat(result.object).isInstanceOf(String.class); + } + + @Test // DATAREDIS-489 + void writeShouldAppendTyeHintToObjectMapValueTypesCorrectly() { + + TypeWithObjectValueTypes sample = new TypeWithObjectValueTypes(); + sample.map.put("string", "bar"); + sample.map.put("long", 1L); + sample.map.put("date", new Date()); + + RedisTestData bucket = write(sample); + + assertThat(bucket).containsEntry("map.[string]", "bar").containsEntry("map.[string]._class", "java.lang.String"); + assertThat(bucket).containsEntry("map.[long]", "1").containsEntry("map.[long]._class", "java.lang.Long"); + assertThat(bucket).containsEntry("map.[date]._class", "java.util.Date"); + } + + @Test // DATAREDIS-489 + void shouldWriteReadObjectMapValueTypeCorrectly() { + + TypeWithObjectValueTypes sample = new TypeWithObjectValueTypes(); + sample.map.put("string", "bar"); + sample.map.put("long", 1L); + sample.map.put("date", new Date()); + + RedisTestData rd = write(sample); + + TypeWithObjectValueTypes result = converter.read(TypeWithObjectValueTypes.class, rd.getRedisData()); + assertThat(result.map.get("string")).isInstanceOf(String.class); + assertThat(result.map.get("long")).isInstanceOf(Long.class); + assertThat(result.map.get("date")).isInstanceOf(Date.class); + } + + @Test // DATAREDIS-489 + void writeShouldAppendTyeHintToObjectListValueTypesCorrectly() { + + TypeWithObjectValueTypes sample = new TypeWithObjectValueTypes(); + sample.list.add("string"); + sample.list.add(1L); + sample.list.add(new Date()); + + RedisTestData bucket = write(sample); + + assertThat(bucket).containsEntry("list.[0]", "string").containsEntry("list.[0]._class", "java.lang.String"); + assertThat(bucket).containsEntry("list.[1]", "1").containsEntry("list.[1]._class", "java.lang.Long"); + assertThat(bucket).containsEntry("list.[2]._class", "java.util.Date"); + } + + @Test // DATAREDIS-489 + void shouldWriteReadObjectListValueTypeCorrectly() { + + TypeWithObjectValueTypes sample = new TypeWithObjectValueTypes(); + sample.list.add("string"); + sample.list.add(1L); + sample.list.add(new Date()); + + RedisTestData rd = write(sample); + + TypeWithObjectValueTypes result = converter.read(TypeWithObjectValueTypes.class, rd.getRedisData()); + assertThat(result.list.get(0)).isInstanceOf(String.class); + assertThat(result.list.get(1)).isInstanceOf(Long.class); + assertThat(result.list.get(2)).isInstanceOf(Date.class); + } + + @Test // DATAREDIS-909 + void shouldWriteReadObjectWithConstructorConversion() { + + Device sample = new Device(Instant.now(), Collections.singleton("foo")); + + RedisTestData rd = write(sample); + + Device result = converter.read(Device.class, rd.getRedisData()); + assertThat(result.now).isEqualTo(sample.now); + assertThat(result.profiles).isEqualTo(sample.profiles); + } + + @Test // DATAREDIS-509 + void writeHandlesArraysOfPrimitivesProperly() { + + Map source = new LinkedHashMap<>(); + source.put("arrayOfPrimitives.[0]", "1"); + source.put("arrayOfPrimitives.[1]", "2"); + source.put("arrayOfPrimitives.[2]", "3"); + + WithArrays target = read(WithArrays.class, source); + + assertThat(target.arrayOfPrimitives[0]).isEqualTo(1); + assertThat(target.arrayOfPrimitives[1]).isEqualTo(2); + assertThat(target.arrayOfPrimitives[2]).isEqualTo(3); + } + + @Test // DATAREDIS-509 + void readHandlesArraysOfPrimitivesProperly() { + + WithArrays source = new WithArrays(); + source.arrayOfPrimitives = new int[] { 1, 2, 3 }; + assertThat(write(source)).containsEntry("arrayOfPrimitives.[0]", "1").containsEntry("arrayOfPrimitives.[1]", "2") + .containsEntry("arrayOfPrimitives.[2]", "3"); + } + + @Test // DATAREDIS-471 + void writeShouldNotAppendClassTypeHint() { + + Person value = new Person(); + value.firstname = "rand"; + value.age = 24; + + PartialUpdate update = new PartialUpdate<>("123", value); + + assertThat(write(update).getBucket().get("_class")).isNull(); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdateSimpleValueCorrectly() { + + Person value = new Person(); + value.firstname = "rand"; + value.age = 24; + + PartialUpdate update = new PartialUpdate<>("123", value); + + assertThat(write(update)).containsEntry("firstname", "rand").containsEntry("age", "24"); + } + + @Test // GH-1981 + void writeShouldWritePartialUpdateFromEntityByteArrayValueCorrectly() { + + WithArrays value = new WithArrays(); + value.avatar = "foo-bar-baz".getBytes(); + + PartialUpdate update = new PartialUpdate<>("123", value); + + assertThat(write(update)).containsEntry("avatar", "foo-bar-baz"); + } + + @Test // GH-1981 + void writeShouldWritePartialUpdateFromSetByteArrayValueCorrectly() { + + PartialUpdate update = PartialUpdate.newPartialUpdate(42, WithArrays.class).set("avatar", + "foo-bar-baz".getBytes()); + + assertThat(write(update)).containsEntry("avatar", "foo-bar-baz"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleValueCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("firstname", "rand").set("age", 24); + + assertThat(write(update)).containsEntry("firstname", "rand").containsEntry("age", "24"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdateNestedPathWithSimpleValueCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("address.city", "two rivers"); + + assertThat(write(update)).containsEntry("address.city", "two rivers"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithComplexValueCorrectly() { + + Address address = new Address(); + address.city = "two rivers"; + address.country = "andor"; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("address", address); + + assertThat(write(update)).containsEntry("address.city", "two rivers").containsEntry("address.country", "andor"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleListValueCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("nicknames", + Arrays.asList("dragon", "lews")); + + assertThat(write(update)).containsEntry("nicknames.[0]", "dragon").containsEntry("nicknames.[1]", "lews"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithComplexListValueCorrectly() { + + Person mat = new Person(); + mat.firstname = "mat"; + mat.age = 24; + + Person perrin = new Person(); + perrin.firstname = "perrin"; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("coworkers", + Arrays.asList(mat, perrin)); + + assertThat(write(update)).containsEntry("coworkers.[0].firstname", "mat").containsEntry("coworkers.[0].age", "24") + .containsEntry("coworkers.[1].firstname", "perrin"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleListValueWhenNotPassedInAsCollectionCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("nicknames", "dragon"); + + assertThat(write(update)).containsEntry("nicknames.[0]", "dragon"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithComplexListValueWhenNotPassedInAsCollectionCorrectly() { + + Person mat = new Person(); + mat.firstname = "mat"; + mat.age = 24; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("coworkers", mat); + + assertThat(write(update)).containsEntry("coworkers.[0].firstname", "mat").containsEntry("coworkers.[0].age", "24"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleListValueWhenNotPassedInAsCollectionWithPositionalParameterCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("nicknames.[5]", "dragon"); + + assertThat(write(update)).containsEntry("nicknames.[5]", "dragon"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithComplexListValueWhenNotPassedInAsCollectionWithPositionalParameterCorrectly() { + + Person mat = new Person(); + mat.firstname = "mat"; + mat.age = 24; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("coworkers.[5]", mat); + + assertThat(write(update)).containsEntry("coworkers.[5].firstname", "mat").containsEntry("coworkers.[5].age", "24"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleMapValueCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("physicalAttributes", + Collections.singletonMap("eye-color", "grey")); + + assertThat(write(update)).containsEntry("physicalAttributes.[eye-color]", "grey"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithComplexMapValueCorrectly() { + + Person tam = new Person(); + tam.firstname = "tam"; + tam.alive = false; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("relatives", + Collections.singletonMap("father", tam)); + + assertThat(write(update)).containsEntry("relatives.[father].firstname", "tam") + .containsEntry("relatives.[father].alive", "0"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleMapValueWhenNotPassedInAsCollectionCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("physicalAttributes", + Collections.singletonMap("eye-color", "grey").entrySet().iterator().next()); + + assertThat(write(update)).containsEntry("physicalAttributes.[eye-color]", "grey"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithComplexMapValueWhenNotPassedInAsCollectionCorrectly() { + + Person tam = new Person(); + tam.firstname = "tam"; + tam.alive = false; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("relatives", + Collections.singletonMap("father", tam).entrySet().iterator().next()); + + assertThat(write(update)).containsEntry("relatives.[father].firstname", "tam") + .containsEntry("relatives.[father].alive", "0"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleMapValueWhenNotPassedInAsCollectionWithPositionalParameterCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("physicalAttributes.[eye-color]", + "grey"); + + assertThat(write(update)).containsEntry("physicalAttributes.[eye-color]", "grey"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithSimpleMapValueOnNestedElementCorrectly() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("relatives.[father].firstname", "tam"); + + assertThat(write(update)).containsEntry("relatives.[father].firstname", "tam"); + } + + @Test // DATAREDIS-471 + void writeShouldThrowExceptionOnPartialUpdatePathWithSimpleMapValueWhenItsASingleValueWithoutPath() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("physicalAttributes", "grey"); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> write(update)); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithRegisteredCustomConversionCorrectly() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new AddressToBytesConverter()))); + this.converter.afterPropertiesSet(); + + Address address = new Address(); + address.country = "Tel'aran'rhiod"; + address.city = "unknown"; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("address", address); + + assertThat(write(update)).containsEntry("address", "{\"city\":\"unknown\",\"country\":\"Tel'aran'rhiod\"}"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithReferenceCorrectly() { + + Location tar = new Location(); + tar.id = "1"; + tar.name = "tar valon"; + + Location tear = new Location(); + tear.id = "2"; + tear.name = "city of tear"; + + PartialUpdate update = new PartialUpdate<>("123", Person.class).set("visited", Arrays.asList(tar, tear)); + + assertThat(write(update)).containsEntry("visited.[0]", "locations:1").containsEntry("visited.[1]", "locations:2") // + .without("visited.id") // + .without("visited.name"); + } + + @Test // DATAREDIS-471 + void writeShouldWritePartialUpdatePathWithListOfReferencesCorrectly() { + + Location location = new Location(); + location.id = "1"; + location.name = "tar valon"; + + PartialUpdate update = new PartialUpdate<>("123", Person.class) // + .set("location", location); + + assertThat(write(update)).containsEntry("location", "locations:1") // + .without("location.id") // + .without("location.name"); + } + + @Test // DATAREDIS-471 + void writeShouldThrowExceptionForUpdateValueNotAssignableToDomainTypeProperty() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class) // + .set("age", "twenty-four"); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> write(update)) + .withMessageContaining("java.lang.String cannot be assigned"); + } + + @Test // DATAREDIS-471 + void writeShouldThrowExceptionForUpdateCollectionValueNotAssignableToDomainTypeProperty() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class) // + .set("coworkers.[0]", "buh buh the bear"); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> write(update)) + .withMessageContaining("java.lang.String cannot be assigned").withMessageContaining(Person.class.getName()) + .withMessageContaining("coworkers.[0]"); + } + + @Test // DATAREDIS-471 + void writeShouldThrowExceptionForUpdateValueInCollectionNotAssignableToDomainTypeProperty() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class) // + .set("coworkers", Collections.singletonList("foo")); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> write(update)) + .withMessageContaining("java.lang.String cannot be assigned").withMessageContaining(Person.class.getName()) + .withMessageContaining("coworkers"); + } + + @Test // DATAREDIS-471 + void writeShouldThrowExceptionForUpdateMapValueNotAssignableToDomainTypeProperty() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class) // + .set("relatives.[father]", "buh buh the bear"); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> write(update)) + .withMessageContaining("java.lang.String cannot be assigned").withMessageContaining(Person.class.getName()) + .withMessageContaining("relatives.[father]"); + } + + @Test // DATAREDIS-471 + void writeShouldThrowExceptionForUpdateValueInMapNotAssignableToDomainTypeProperty() { + + PartialUpdate update = new PartialUpdate<>("123", Person.class) // + .set("relatives", Collections.singletonMap("father", "buh buh the bear")); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> write(update)) + .withMessageContaining("java.lang.String cannot be assigned").withMessageContaining(Person.class.getName()) + .withMessageContaining("relatives.[father]"); + } + + @Test // DATAREDIS-875 + void shouldNotWriteTypeHintForPrimitveTypes() { + + Size source = new Size(); + source.height = 1; + + assertThat(write(source).getBucket().get("height._class")).isNull(); + } + + @Test // DATAREDIS-875 + void shouldReadPrimitveTypes() { + + Map source = new LinkedHashMap<>(); + source.put("height", "1000"); + + assertThat(read(Size.class, source).height).isEqualTo(1000); + } + + @Test // DATAREDIS-925 + void readUUID() { + + UUID uuid = UUID.randomUUID(); + Map source = new LinkedHashMap<>(); + source.put("uuid", uuid.toString()); + + assertThat(read(JustSomeDifferentPropertyTypes.class, source).uuid).isEqualTo(uuid); + } + + @Test // DATAREDIS-925 + void writeUUID() { + + JustSomeDifferentPropertyTypes source = new JustSomeDifferentPropertyTypes(); + source.uuid = UUID.randomUUID(); + + assertThat(write(source)).containsEntry("uuid", source.uuid.toString()); + } + + @Test // DATAREDIS-955 + void readInnerListShouldNotInfluenceOuterWithSameName() { + + Map source = new LinkedHashMap<>(); + source.put("inners.[0].values.[0]", "i-1"); + source.put("inners.[0].values.[1]", "i-2"); + source.put("values.[0]", "o-1"); + source.put("values.[1]", "o-2"); + + Outer outer = read(Outer.class, source); + + assertThat(outer.values).isEqualTo(Arrays.asList("o-1", "o-2")); + assertThat(outer.inners.get(0).values).isEqualTo(Arrays.asList("i-1", "i-2")); + } + + @Test // DATAREDIS-955 + void readInnerListShouldNotInfluenceOuterWithSameNameWhenNull() { + + Map source = new LinkedHashMap<>(); + source.put("inners.[0].values.[0]", "i-1"); + source.put("inners.[0].values.[1]", "i-2"); + + Outer outer = read(Outer.class, source); + + assertThat(outer.values).isNull(); + assertThat(outer.inners.get(0).values).isEqualTo(Arrays.asList("i-1", "i-2")); + } + + @Test // DATAREDIS-911 + void writeEntityWithCustomConverter() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new AccountInfoToBytesConverter()))); + this.converter.afterPropertiesSet(); + + AccountInfo accountInfo = new AccountInfo(); + accountInfo.setId("ai-id-1"); + accountInfo.setAccount("123456"); + accountInfo.setAccountName("Inamur Rahman Sadid"); + + assertThat(write(accountInfo).getRedisData().getId()).isEqualTo(accountInfo.getId()); + } + + @Test // DATAREDIS-911 + void readEntityWithCustomConverter() { + + this.converter = new ReferenceMappingRedisConverter(null, null, resolverMock); + this.converter + .setCustomConversions(new RedisCustomConversions(Collections.singletonList(new BytesToAccountInfoConverter()))); + this.converter.afterPropertiesSet(); + + Bucket bucket = new Bucket(); + bucket.put("_raw", "ai-id-1|123456|Golam Mazid Sajib".getBytes(StandardCharsets.UTF_8)); + + RedisData redisData = new RedisData(bucket); + redisData.setKeyspace(KEYSPACE_ACCOUNT); + redisData.setId("ai-id-1"); + + AccountInfo target = converter.read(AccountInfo.class, redisData); + + assertThat(target.getAccount()).isEqualTo("123456"); + assertThat(target.getAccountName()).isEqualTo("Golam Mazid Sajib"); + } + + @Test // GH-2349 + void writeGenericEntity() { + + WithGenericEntity generic = new WithGenericEntity<>(); + generic.entity = new User("hello"); + + assertThat(write(generic)).hasSize(3) // + .containsEntry("_class", + "org.springframework.data.redis.core.convert.ReferenceMappingRedisConverterUnitTests$WithGenericEntity") + .containsEntry("entity.name", "hello") // + .containsEntry("entity._class", + "org.springframework.data.redis.core.convert.ReferenceMappingRedisConverterUnitTests$User"); + } + + @Test // GH-2349 + void readGenericEntity() { + + Bucket bucket = new Bucket(); + bucket.put("entity.name", "hello".getBytes()); + bucket.put("entity._class", + "org.springframework.data.redis.core.convert.ReferenceMappingRedisConverterUnitTests$User".getBytes()); + + RedisData redisData = new RedisData(bucket); + redisData.setKeyspace(KEYSPACE_ACCOUNT); + redisData.setId("ai-id-1"); + + WithGenericEntity generic = converter.read(WithGenericEntity.class, redisData); + + assertThat(generic.entity).isNotNull(); + assertThat(generic.entity.name).isEqualTo("hello"); + } + + @Test // DATAREDIS-1175 + @EnabledOnJre(JRE.JAVA_8) + // FIXME: https://github.com/spring-projects/spring-data-redis/issues/2168 + void writePlainList() { + + List source = Arrays.asList("Hello", "stream", "message", 100L); + RedisTestData target = write(source); + + assertThat(target).containsEntry("[0]", "Hello") // + .containsEntry("[1]", "stream") // + .containsEntry("[2]", "message") // + .containsEntry("[3]", "100"); + } + + @Test // DATAREDIS-1175 + void readPlainList() { + + Map source = new LinkedHashMap<>(); + source.put("[0]._class", "java.lang.String"); + source.put("[0]", "Hello"); + source.put("[1]._class", "java.lang.String"); + source.put("[1]", "stream"); + source.put("[2]._class", "java.lang.String"); + source.put("[2]", "message"); + source.put("[3]._class", "java.lang.Long"); + source.put("[3]", "100"); + + List target = read(List.class, source); + + assertThat(target).containsExactly("Hello", "stream", "message", 100L); + } + + private RedisTestData write(Object source) { + + RedisData rdo = new RedisData(); + converter.write(source, rdo); + return RedisTestData.from(rdo); + } + + private T read(Class type, Map source) { + return converter.read(type, new RedisData(Bucket.newBucketFromStringMap(source))); + } + + @WritingConverter + static class AddressToBytesConverter implements Converter { + + private final ObjectMapper mapper; + private final Jackson2JsonRedisSerializer
serializer; + + AddressToBytesConverter() { + + mapper = new ObjectMapper(); + mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker() + .withFieldVisibility(Visibility.ANY).withGetterVisibility(Visibility.NONE) + .withSetterVisibility(Visibility.NONE).withCreatorVisibility(Visibility.NONE)); + + serializer = new Jackson2JsonRedisSerializer<>(Address.class); + serializer.setObjectMapper(mapper); + } + + @Override + public byte[] convert(Address value) { + return serializer.serialize(value); + } + } + + @WritingConverter + static class SpeciesToMapConverter implements Converter> { + + @Override + public Map convert(Species source) { + + if (source == null) { + return null; + } + + Map map = new LinkedHashMap<>(); + if (source.name != null) { + map.put("species-name", source.name.getBytes(Charset.forName("UTF-8"))); + } + map.put("species-nicknames", + StringUtils.collectionToCommaDelimitedString(source.alsoKnownAs).getBytes(Charset.forName("UTF-8"))); + return map; + } + } + + @ReadingConverter + static class MapToSpeciesConverter implements Converter, Species> { + + @Override + public Species convert(Map source) { + + if (source == null || source.isEmpty()) { + return null; + } + + Species species = new Species(); + + if (source.containsKey("species-name")) { + species.name = new String(source.get("species-name"), Charset.forName("UTF-8")); + } + if (source.containsKey("species-nicknames")) { + species.alsoKnownAs = Arrays.asList(StringUtils + .commaDelimitedListToStringArray(new String(source.get("species-nicknames"), Charset.forName("UTF-8")))); + } + return species; + } + } + + @ReadingConverter + static class BytesToAddressConverter implements Converter { + + private final ObjectMapper mapper; + private final Jackson2JsonRedisSerializer
serializer; + + BytesToAddressConverter() { + + mapper = new ObjectMapper(); + mapper.setVisibility(mapper.getSerializationConfig().getDefaultVisibilityChecker() + .withFieldVisibility(Visibility.ANY).withGetterVisibility(Visibility.NONE) + .withSetterVisibility(Visibility.NONE).withCreatorVisibility(Visibility.NONE)); + + serializer = new Jackson2JsonRedisSerializer<>(Address.class); + serializer.setObjectMapper(mapper); + } + + @Override + public Address convert(byte[] value) { + return serializer.deserialize(value); + } + } + + @WritingConverter + static class AccountInfoToBytesConverter implements Converter { + + @Override + public byte[] convert(AccountInfo accountInfo) { + StringBuilder resp = new StringBuilder(); + resp.append(accountInfo.getId()).append("|").append(accountInfo.getAccount()).append("|") + .append(accountInfo.getAccountName()); + return resp.toString().getBytes(StandardCharsets.UTF_8); + } + } + + @ReadingConverter + static class BytesToAccountInfoConverter implements Converter { + + @Override + public AccountInfo convert(byte[] bytes) { + String[] values = new String(bytes, StandardCharsets.UTF_8).split("\\|"); + AccountInfo accountInfo = new AccountInfo(); + accountInfo.setId(values[0]); + accountInfo.setAccount(values[1]); + accountInfo.setAccountName(values[2]); + return accountInfo; + } + } + + static class WithGenericEntity { + T entity; + } + + @AllArgsConstructor + static class User { + String name; + } + +}