diff --git a/pom.xml b/pom.xml index 9f4b6bc897..ba99633bfa 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 4.5.0-SNAPSHOT + 4.5.0-QRC-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 58c63dfc97..ec441c4e1f 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -15,7 +15,7 @@ org.springframework.data spring-data-mongodb-parent - 4.5.0-SNAPSHOT + 4.5.0-QRC-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 096fd48022..1cd1bc9335 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -13,7 +13,7 @@ org.springframework.data spring-data-mongodb-parent - 4.5.0-SNAPSHOT + 4.5.0-QRC-SNAPSHOT ../pom.xml @@ -135,7 +135,7 @@ org.awaitility awaitility - 4.2.2 + ${awaitility} test diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index d627ba2468..5df30e0b92 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -15,17 +15,34 @@ */ package org.springframework.data.mongodb.core; +import java.nio.charset.StandardCharsets; import java.time.Duration; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.function.Function; +import java.util.stream.StreamSupport; +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonNull; +import org.bson.Document; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.timeseries.GranularityDefinition; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; +import org.springframework.lang.CheckReturnValue; +import org.springframework.lang.Contract; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -41,6 +58,7 @@ * @author Mark Paluch * @author Andreas Zink * @author Ben Foster + * @author Ross Lawley */ public class CollectionOptions { @@ -51,10 +69,12 @@ public class CollectionOptions { private ValidationOptions validationOptions; private @Nullable TimeSeriesOptions timeSeriesOptions; private @Nullable CollectionChangeStreamOptions changeStreamOptions; + private @Nullable EncryptedFieldsOptions encryptedFieldsOptions; private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped, @Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions, - @Nullable CollectionChangeStreamOptions changeStreamOptions) { + @Nullable CollectionChangeStreamOptions changeStreamOptions, + @Nullable EncryptedFieldsOptions encryptedFieldsOptions) { this.maxDocuments = maxDocuments; this.size = size; @@ -63,6 +83,7 @@ private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nul this.validationOptions = validationOptions; this.timeSeriesOptions = timeSeriesOptions; this.changeStreamOptions = changeStreamOptions; + this.encryptedFieldsOptions = encryptedFieldsOptions; } /** @@ -76,7 +97,7 @@ public static CollectionOptions just(Collation collation) { Assert.notNull(collation, "Collation must not be null"); - return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null); + return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null, null); } /** @@ -86,7 +107,7 @@ public static CollectionOptions just(Collation collation) { * @since 2.0 */ public static CollectionOptions empty() { - return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null); + return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null, null); } /** @@ -127,6 +148,46 @@ public static CollectionOptions emitChangedRevisions() { return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true)); } + /** + * Create new {@link CollectionOptions} with the given {@code encryptedFields}. + * + * @param encryptedFieldsOptions can be null + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(@Nullable EncryptedFieldsOptions encryptedFieldsOptions) { + return new CollectionOptions(null, null, null, null, ValidationOptions.NONE, null, null, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} reading encryption options from the given {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(MongoJsonSchema schema) { + return encryptedCollection(EncryptedFieldsOptions.fromSchema(schema)); + } + + /** + * Create new {@link CollectionOptions} building encryption options in a fluent style. + * + * @param optionsFunction must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection( + Function optionsFunction) { + return encryptedCollection(optionsFunction.apply(new EncryptedFieldsOptions())); + } + /** * Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}.
* NOTE: Using capped collections requires defining {@link #size(long)}. @@ -136,7 +197,7 @@ public static CollectionOptions emitChangedRevisions() { */ public CollectionOptions capped() { return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions, - changeStreamOptions); + changeStreamOptions, encryptedFieldsOptions); } /** @@ -148,7 +209,7 @@ public CollectionOptions capped() { */ public CollectionOptions maxDocuments(long maxDocuments) { return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, - changeStreamOptions); + changeStreamOptions, encryptedFieldsOptions); } /** @@ -160,7 +221,7 @@ public CollectionOptions maxDocuments(long maxDocuments) { */ public CollectionOptions size(long size) { return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, - changeStreamOptions); + changeStreamOptions, encryptedFieldsOptions); } /** @@ -172,18 +233,18 @@ public CollectionOptions size(long size) { */ public CollectionOptions collation(@Nullable Collation collation) { return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, - changeStreamOptions); + changeStreamOptions, encryptedFieldsOptions); } /** * Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given * {@link MongoJsonSchema}. * - * @param schema can be {@literal null}. + * @param schema must not be {@literal null}. * @return new {@link CollectionOptions}. * @since 2.1 */ - public CollectionOptions schema(@Nullable MongoJsonSchema schema) { + public CollectionOptions schema(MongoJsonSchema schema) { return validator(Validator.schema(schema)); } @@ -293,7 +354,7 @@ public CollectionOptions validation(ValidationOptions validationOptions) { Assert.notNull(validationOptions, "ValidationOptions must not be null"); return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, - changeStreamOptions); + changeStreamOptions, encryptedFieldsOptions); } /** @@ -307,7 +368,7 @@ public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) { Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null"); return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, - changeStreamOptions); + changeStreamOptions, encryptedFieldsOptions); } /** @@ -321,7 +382,22 @@ public CollectionOptions changeStream(CollectionChangeStreamOptions changeStream Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null"); return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, - changeStreamOptions); + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Set the {@link EncryptedFieldsOptions} for collections using queryable encryption. + * + * @param encryptedFieldsOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + */ + @Contract("_ -> new") + @CheckReturnValue + public CollectionOptions encrypted(EncryptedFieldsOptions encryptedFieldsOptions) { + + Assert.notNull(encryptedFieldsOptions, "EncryptedCollectionOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -392,14 +468,24 @@ public Optional getChangeStreamOptions() { return Optional.ofNullable(changeStreamOptions); } + /** + * Get the {@code encryptedFields} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.5 + */ + public Optional getEncryptedFieldsOptions() { + return Optional.ofNullable(encryptedFieldsOptions); + } + @Override public String toString() { return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped + ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions=" - + timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", disableValidation=" - + disableValidation() + ", strictValidation=" + strictValidation() + ", moderateValidation=" - + moderateValidation() + ", warnOnValidationError=" + warnOnValidationError() + ", failOnValidationError=" - + failOnValidationError() + '}'; + + timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", encryptedCollectionOptions=" + + encryptedFieldsOptions + ", disableValidation=" + disableValidation() + ", strictValidation=" + + strictValidation() + ", moderateValidation=" + moderateValidation() + ", warnOnValidationError=" + + warnOnValidationError() + ", failOnValidationError=" + failOnValidationError() + '}'; } @Override @@ -431,7 +517,10 @@ public boolean equals(@Nullable Object o) { if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) { return false; } - return ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions); + if (!ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions)) { + return false; + } + return ObjectUtils.nullSafeEquals(encryptedFieldsOptions, that.encryptedFieldsOptions); } @Override @@ -443,6 +532,7 @@ public int hashCode() { result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions); result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions); result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(encryptedFieldsOptions); return result; } @@ -461,7 +551,8 @@ public static class ValidationOptions { private final @Nullable ValidationLevel validationLevel; private final @Nullable ValidationAction validationAction; - public ValidationOptions(Validator validator, ValidationLevel validationLevel, ValidationAction validationAction) { + public ValidationOptions(@Nullable Validator validator, @Nullable ValidationLevel validationLevel, + @Nullable ValidationAction validationAction) { this.validator = validator; this.validationLevel = validationLevel; @@ -576,6 +667,188 @@ public int hashCode() { } } + /** + * Encapsulation of Encryption options for collections. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class EncryptedFieldsOptions { + + private static final EncryptedFieldsOptions NONE = new EncryptedFieldsOptions(); + + private final @Nullable MongoJsonSchema schema; + private final List queryableProperties; + + EncryptedFieldsOptions() { + this(null, List.of()); + } + + private EncryptedFieldsOptions(@Nullable MongoJsonSchema schema, + List queryableProperties) { + + this.schema = schema; + this.queryableProperties = queryableProperties; + } + + /** + * @return {@link EncryptedFieldsOptions#NONE} + */ + public static EncryptedFieldsOptions none() { + return NONE; + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromSchema(MongoJsonSchema schema) { + return new EncryptedFieldsOptions(schema, List.of()); + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromProperties(List properties) { + return new EncryptedFieldsOptions(null, List.copyOf(properties)); + } + + /** + * Add a new {@link QueryableJsonSchemaProperty queryable property} for the given source property. + *

+ * Please note that, a given {@link JsonSchemaProperty} may override options from a given {@link MongoJsonSchema} if + * set. + * + * @param property the queryable source - typically + * {@link org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty + * encrypted}. + * @param characteristics the query options to set. + * @return new instance of {@link EncryptedFieldsOptions}. + */ + @Contract("_, _ -> new") + @CheckReturnValue + public EncryptedFieldsOptions queryable(JsonSchemaProperty property, QueryCharacteristic... characteristics) { + + List targetPropertyList = new ArrayList<>(queryableProperties.size() + 1); + targetPropertyList.addAll(queryableProperties); + targetPropertyList.add(JsonSchemaProperty.queryable(property, List.of(characteristics))); + + return new EncryptedFieldsOptions(schema, targetPropertyList); + } + + public Document toDocument() { + return new Document("fields", selectPaths()); + } + + private List selectPaths() { + + Map fields = new LinkedHashMap<>(); + for (Document field : fromSchema()) { + fields.put(field.get("path", String.class), field); + } + for (Document field : fromProperties()) { + fields.put(field.get("path", String.class), field); + } + return List.copyOf(fields.values()); + } + + private List fromProperties() { + + if (queryableProperties.isEmpty()) { + return List.of(); + } + + List converted = new ArrayList<>(queryableProperties.size()); + for (QueryableJsonSchemaProperty property : queryableProperties) { + + Document field = new Document("path", property.getIdentifier()); + + if (!property.getTypes().isEmpty()) { + field.append("bsonType", property.getTypes().iterator().next().toBsonType().value()); + } + + if (property + .getTargetProperty() instanceof IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty encrypted) { + if (encrypted.getKeyId() != null) { + if (encrypted.getKeyId() instanceof String stringKey) { + field.append("keyId", + new BsonBinary(BsonBinarySubType.UUID_STANDARD, stringKey.getBytes(StandardCharsets.UTF_8))); + } else { + field.append("keyId", encrypted.getKeyId()); + } + } + } + + field.append("queries", StreamSupport.stream(property.getCharacteristics().spliterator(), false) + .map(QueryCharacteristic::toDocument).toList()); + + if (!field.containsKey("keyId")) { + field.append("keyId", BsonNull.VALUE); + } + + converted.add(field); + } + return converted; + } + + private List fromSchema() { + + if (schema == null) { + return List.of(); + } + + Document root = schema.schemaDocument(); + Map paths = new LinkedHashMap<>(); + collectPaths(root, null, paths); + + List fields = new ArrayList<>(); + if (!paths.isEmpty()) { + + for (Entry entry : paths.entrySet()) { + Document field = new Document("path", entry.getKey()); + field.append("keyId", entry.getValue().getOrDefault("keyId", BsonNull.VALUE)); + if (entry.getValue().containsKey("bsonType")) { + field.append("bsonType", entry.getValue().get("bsonType")); + } + field.put("queries", entry.getValue().get("queries")); + fields.add(field); + } + } + + return fields; + } + } + + private static void collectPaths(Document document, @Nullable String currentPath, Map paths) { + + if (document.containsKey("type") && document.get("type").equals("object")) { + Object o = document.get("properties"); + if (o == null) { + return; + } + + if (o instanceof Document properties) { + for (Entry entry : properties.entrySet()) { + if (entry.getValue() instanceof Document nested) { + + String path = currentPath == null ? entry.getKey() : (currentPath + "." + entry.getKey()); + if (nested.containsKey("encrypt")) { + Document target = new Document(nested.get("encrypt", Document.class)); + if (nested.containsKey("queries")) { + List queries = nested.get("queries", List.class); + if (!queries.isEmpty() && queries.iterator().next() instanceof Document qd) { + target.putAll(qd); + } + } + paths.put(path, target); + } else { + collectPaths(nested, path, paths); + } + } + } + } + } + } + /** * Encapsulation of options applied to define collections change stream behaviour. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java index f64391e8cd..601b6898b8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java @@ -19,11 +19,13 @@ * Encryption algorithms supported by MongoDB Client Side Field Level Encryption. * * @author Christoph Strobl + * @author Ross Lawley * @since 3.3 */ public final class EncryptionAlgorithms { public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"; public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random"; + public static final String RANGE = "Range"; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java index 65a5131dd1..38269787cb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -22,6 +22,7 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; import org.bson.BsonNull; import org.bson.Document; @@ -39,6 +40,7 @@ import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; @@ -83,6 +85,7 @@ * @author Mark Paluch * @author Christoph Strobl * @author Ben Foster + * @author Ross Lawley * @since 2.1 * @see MongoTemplate * @see ReactiveMongoTemplate @@ -375,8 +378,15 @@ public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Collec result.timeSeriesOptions(options); }); - collectionOptions.getChangeStreamOptions().ifPresent(it -> result - .changeStreamPreAndPostImagesOptions(new ChangeStreamPreAndPostImagesOptions(it.getPreAndPostImages()))); + collectionOptions.getChangeStreamOptions() // + .map(CollectionOptions.CollectionChangeStreamOptions::getPreAndPostImages) // + .map(ChangeStreamPreAndPostImagesOptions::new) // + .ifPresent(result::changeStreamPreAndPostImagesOptions); + + collectionOptions.getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .filter(Predicate.not(Document::isEmpty)) // + .ifPresent(result::encryptedFields); return result; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityResultConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityResultConverter.java new file mode 100644 index 0000000000..c04ae9d603 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityResultConverter.java @@ -0,0 +1,33 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; + +enum EntityResultConverter implements QueryResultConverter { + + INSTANCE; + + @Override + public Object mapDocument(Document document, ConversionResultSupplier reader) { + return reader.get(); + } + + @Override + public QueryResultConverter andThen(QueryResultConverter after) { + return (QueryResultConverter) after; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java index 67ed188655..e4becc491a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java @@ -19,6 +19,7 @@ import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.lang.Contract; /** * {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent @@ -45,7 +46,7 @@ public interface ExecutableAggregationOperation { /** * Start creating an aggregation operation that returns results mapped to the given domain type.
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to specify a potentially different - * input type for he aggregation. + * input type for the aggregation. * * @param domainType must not be {@literal null}. * @return new instance of {@link ExecutableAggregation}. @@ -76,10 +77,23 @@ interface AggregationWithCollection { * Trigger execution by calling one of the terminating methods. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.0 */ interface TerminatingAggregation { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingAggregation}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since x.y + */ + @Contract("_ -> new") + TerminatingAggregation map(QueryResultConverter converter); + /** * Apply pipeline operations as specified and get all matching elements. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java index ca5aa7a513..d74e955f6f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java @@ -43,25 +43,28 @@ public ExecutableAggregation aggregateAndReturn(Class domainType) { Assert.notNull(domainType, "DomainType must not be null"); - return new ExecutableAggregationSupport<>(template, domainType, null, null); + return new ExecutableAggregationSupport<>(template, domainType, QueryResultConverter.entity(), null, null); } /** * @author Christoph Strobl * @since 2.0 */ - static class ExecutableAggregationSupport + static class ExecutableAggregationSupport implements AggregationWithAggregation, ExecutableAggregation, TerminatingAggregation { private final MongoTemplate template; - private final Class domainType; + private final Class domainType; + private final QueryResultConverter resultConverter; private final Aggregation aggregation; private final String collection; - public ExecutableAggregationSupport(MongoTemplate template, Class domainType, Aggregation aggregation, + public ExecutableAggregationSupport(MongoTemplate template, Class domainType, + QueryResultConverter resultConverter, Aggregation aggregation, String collection) { this.template = template; this.domainType = domainType; + this.resultConverter = resultConverter; this.aggregation = aggregation; this.collection = collection; } @@ -71,7 +74,7 @@ public AggregationWithAggregation inCollection(String collection) { Assert.hasText(collection, "Collection must not be null nor empty"); - return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); + return new ExecutableAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); } @Override @@ -79,17 +82,26 @@ public TerminatingAggregation by(Aggregation aggregation) { Assert.notNull(aggregation, "Aggregation must not be null"); - return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); + return new ExecutableAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); + } + + @Override + public TerminatingAggregation map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new ExecutableAggregationSupport<>(template, domainType, this.resultConverter.andThen(converter), + aggregation, collection); } @Override public AggregationResults all() { - return template.aggregate(aggregation, getCollectionName(aggregation), domainType); + return template.doAggregate(aggregation, getCollectionName(aggregation), domainType, resultConverter); } @Override public Stream stream() { - return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType); + return template.doAggregateStream(aggregation, getCollectionName(aggregation), domainType, resultConverter, null); } private String getCollectionName(Aggregation aggregation) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java index 3358ff2b17..b4d6a4dd80 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java @@ -27,6 +27,7 @@ import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Contract; import org.springframework.lang.Nullable; import com.mongodb.client.MongoCollection; @@ -71,9 +72,33 @@ public interface ExecutableFindOperation { * Trigger find execution by calling one of the terminating methods. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.0 */ - interface TerminatingFind { + interface TerminatingFind extends TerminatingResults, TerminatingProjection { + + } + + /** + * Trigger find execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since x.y + */ + interface TerminatingResults { + + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingResults}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since x.y + */ + @Contract("_ -> new") + TerminatingResults map(QueryResultConverter converter); /** * Get exactly zero or one result. @@ -142,6 +167,16 @@ default Optional first() { */ Window scroll(ScrollPosition scrollPosition); + } + + /** + * Trigger find execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since x.y + */ + interface TerminatingProjection { + /** * Get the number of matching elements.
* This method uses an @@ -160,16 +195,30 @@ default Optional first() { * @return {@literal true} if at least one matching element exists. */ boolean exists(); + } /** - * Trigger geonear execution by calling one of the terminating methods. + * Trigger {@code geoNear} execution by calling one of the terminating methods. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.0 */ interface TerminatingFindNear { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingFindNear}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since x.y + */ + @Contract("_ -> new") + TerminatingFindNear map(QueryResultConverter converter); + /** * Find all matching elements and return them as {@link org.springframework.data.geo.GeoResult}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java index 4e6c3547c5..6cf5d5924f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java @@ -24,6 +24,7 @@ import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.data.domain.ScrollPosition; import org.springframework.data.domain.Window; +import org.springframework.data.geo.GeoResults; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.SerializationUtils; @@ -57,7 +58,8 @@ public ExecutableFind query(Class domainType) { Assert.notNull(domainType, "DomainType must not be null"); - return new ExecutableFindSupport<>(template, domainType, domainType, null, ALL_QUERY); + return new ExecutableFindSupport<>(template, domainType, domainType, QueryResultConverter.entity(), null, + ALL_QUERY); } /** @@ -65,19 +67,22 @@ public ExecutableFind query(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - static class ExecutableFindSupport + static class ExecutableFindSupport implements ExecutableFind, FindWithCollection, FindWithProjection, FindWithQuery { private final MongoTemplate template; private final Class domainType; - private final Class returnType; + private final Class returnType; + private final QueryResultConverter resultConverter; private final @Nullable String collection; private final Query query; - ExecutableFindSupport(MongoTemplate template, Class domainType, Class returnType, @Nullable String collection, + ExecutableFindSupport(MongoTemplate template, Class domainType, Class returnType, + QueryResultConverter resultConverter, @Nullable String collection, Query query) { this.template = template; this.domainType = domainType; + this.resultConverter = resultConverter; this.returnType = returnType; this.collection = collection; this.query = query; @@ -88,7 +93,7 @@ public FindWithProjection inCollection(String collection) { Assert.hasText(collection, "Collection name must not be null nor empty"); - return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + return new ExecutableFindSupport<>(template, domainType, returnType, resultConverter, collection, query); } @Override @@ -96,7 +101,8 @@ public FindWithQuery as(Class returnType) { Assert.notNull(returnType, "ReturnType must not be null"); - return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + return new ExecutableFindSupport<>(template, domainType, returnType, QueryResultConverter.entity(), collection, + query); } @Override @@ -104,7 +110,16 @@ public TerminatingFind matching(Query query) { Assert.notNull(query, "Query must not be null"); - return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + return new ExecutableFindSupport<>(template, domainType, returnType, resultConverter, collection, query); + } + + @Override + public TerminatingResults map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new ExecutableFindSupport<>(template, domainType, returnType, this.resultConverter.andThen(converter), + collection, query); } @Override @@ -143,12 +158,13 @@ public Stream stream() { @Override public Window scroll(ScrollPosition scrollPosition) { - return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); + return template.doScroll(query.with(scrollPosition), domainType, returnType, resultConverter, + getCollectionName()); } @Override public TerminatingFindNear near(NearQuery nearQuery) { - return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); + return new TerminatingFindNearSupport<>(nearQuery, this.resultConverter); } @Override @@ -176,17 +192,17 @@ private List doFind(@Nullable CursorPreparer preparer) { Document fieldsObject = query.getFieldsObject(); return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType, - returnType, getCursorPreparer(query, preparer)); + returnType, resultConverter, getCursorPreparer(query, preparer)); } private List doFindDistinct(String field) { return template.findDistinct(query, field, getCollectionName(), domainType, - returnType == domainType ? (Class) Object.class : returnType); + returnType == domainType ? (Class) Object.class : returnType); } private Stream doStream() { - return template.doStream(query, domainType, getCollectionName(), returnType); + return template.doStream(query, domainType, getCollectionName(), returnType, resultConverter); } private CursorPreparer getCursorPreparer(Query query, @Nullable CursorPreparer preparer) { @@ -200,6 +216,31 @@ private String getCollectionName() { private String asString() { return SerializationUtils.serializeToJsonSafely(query); } + + class TerminatingFindNearSupport implements TerminatingFindNear { + + private final NearQuery nearQuery; + private final QueryResultConverter resultConverter; + + public TerminatingFindNearSupport(NearQuery nearQuery, + QueryResultConverter resultConverter) { + this.nearQuery = nearQuery; + this.resultConverter = resultConverter; + } + + @Override + public TerminatingFindNear map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new TerminatingFindNearSupport<>(nearQuery, this.resultConverter.andThen(converter)); + } + + @Override + public GeoResults all() { + return template.doGeoNear(nearQuery, domainType, getCollectionName(), returnType, resultConverter); + } + } } /** @@ -245,19 +286,19 @@ public Document getSortObject() { * @author Christoph Strobl * @since 2.1 */ - static class DistinctOperationSupport implements TerminatingDistinct { + static class DistinctOperationSupport implements TerminatingDistinct { private final String field; - private final ExecutableFindSupport delegate; + private final ExecutableFindSupport delegate; - public DistinctOperationSupport(ExecutableFindSupport delegate, String field) { + public DistinctOperationSupport(ExecutableFindSupport delegate, String field) { this.delegate = delegate; this.field = field; } @Override - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) public TerminatingDistinct as(Class resultType) { Assert.notNull(resultType, "ResultType must not be null"); @@ -270,12 +311,13 @@ public TerminatingDistinct matching(Query query) { Assert.notNull(query, "Query must not be null"); - return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.matching(query), field); + return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.matching(query), field); } @Override public List all() { return delegate.doFindDistinct(field); } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java index 839f49c7da..bc26dfb68c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -24,6 +24,7 @@ import java.util.stream.Collectors; import org.bson.Document; + import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; @@ -31,14 +32,18 @@ import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.JsonSchemaObject; import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; import org.springframework.data.util.TypeInformation; import org.springframework.util.Assert; @@ -121,29 +126,31 @@ public MongoJsonSchema createSchemaFor(Class type) { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); - { - Encrypted encrypted = entity.findAnnotation(Encrypted.class); - if (encrypted != null) { + Encrypted encrypted = entity.findAnnotation(Encrypted.class); + if (encrypted != null) { + schemaBuilder.encryptionMetadata(getEncryptionMetadata(entity, encrypted)); + } - Document encryptionMetadata = new Document(); + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); + schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); - Collection encryptionKeyIds = entity.getEncryptionKeyIds(); - if (!CollectionUtils.isEmpty(encryptionKeyIds)) { - encryptionMetadata.append("keyId", encryptionKeyIds); - } + return schemaBuilder.build(); + } - if (StringUtils.hasText(encrypted.algorithm())) { - encryptionMetadata.append("algorithm", encrypted.algorithm()); - } + private static Document getEncryptionMetadata(MongoPersistentEntity entity, Encrypted encrypted) { - schemaBuilder.encryptionMetadata(encryptionMetadata); - } + Document encryptionMetadata = new Document(); + + Collection encryptionKeyIds = entity.getEncryptionKeyIds(); + if (!CollectionUtils.isEmpty(encryptionKeyIds)) { + encryptionMetadata.append("keyId", encryptionKeyIds); } - List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); - schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); + if (StringUtils.hasText(encrypted.algorithm())) { + encryptionMetadata.append("algorithm", encrypted.algorithm()); + } - return schemaBuilder.build(); + return encryptionMetadata; } private List computePropertiesForEntity(List path, @@ -185,8 +192,8 @@ private JsonSchemaProperty computeSchemaForProperty(List rawTargetType = computeTargetType(property); // target type before conversion Class targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type - - if ((rawTargetType.isPrimitive() || ClassUtils.isPrimitiveArray(rawTargetType)) && targetType == Object.class || ClassUtils.isAssignable(targetType, rawTargetType) ) { + if ((rawTargetType.isPrimitive() || ClassUtils.isPrimitiveArray(rawTargetType)) && targetType == Object.class + || ClassUtils.isAssignable(targetType, rawTargetType)) { targetType = rawTargetType; } @@ -291,7 +298,36 @@ private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentPropert if (!ObjectUtils.isEmpty(encrypted.keyId())) { enc = enc.keys(property.getEncryptionKeyIds()); } - return enc; + + Queryable queryable = property.findAnnotation(Queryable.class); + if (queryable == null || !StringUtils.hasText(queryable.queryType())) { + return enc; + } + + QueryCharacteristic characteristic = new QueryCharacteristic() { + + @Override + public String queryType() { + return queryable.queryType(); + } + + @Override + public Document toDocument() { + + Document options = QueryCharacteristic.super.toDocument(); + + if (queryable.contentionFactor() >= 0) { + options.put("contention", queryable.contentionFactor()); + } + + if (StringUtils.hasText(queryable.queryAttributes())) { + options.putAll(Document.parse(queryable.queryAttributes())); + } + + return options; + } + }; + return new QueryableJsonSchemaProperty(enc, QueryCharacteristics.of(characteristic)); } private JsonSchemaProperty createObjectSchemaPropertyForEntity(List path, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index 67ef3a3081..e46573b476 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -110,6 +110,7 @@ import org.springframework.data.mongodb.util.MongoCompatibilityAdapter; import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.CloseableIterator; +import org.springframework.data.util.Lazy; import org.springframework.data.util.Optionals; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -479,15 +480,20 @@ public Stream stream(Query query, Class entityType, String collectionN return doStream(query, entityType, collectionName, entityType); } - @SuppressWarnings("ConstantConditions") protected Stream doStream(Query query, Class entityType, String collectionName, Class returnType) { + return doStream(query, entityType, collectionName, returnType, QueryResultConverter.entity()); + } + + @SuppressWarnings("ConstantConditions") + Stream doStream(Query query, Class entityType, String collectionName, Class returnType, + QueryResultConverter resultConverter) { Assert.notNull(query, "Query must not be null"); Assert.notNull(entityType, "Entity type must not be null"); Assert.hasText(collectionName, "Collection name must not be null or empty"); Assert.notNull(returnType, "ReturnType must not be null"); - return execute(collectionName, (CollectionCallback>) collection -> { + return execute(collectionName, (CollectionCallback>) collection -> { MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); @@ -501,8 +507,10 @@ protected Stream doStream(Query query, Class entityType, String collec FindIterable cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection, col -> readPreference.prepare(col).find(mappedQuery, Document.class).projection(mappedFields)); + DocumentCallback resultReader = getResultReader(projection, collectionName, resultConverter); + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, - new ProjectingReadCallback<>(mongoConverter, projection, collectionName)).stream(); + resultReader).stream(); }); } @@ -898,10 +906,11 @@ public Window scroll(Query query, Class entityType) { @Override public Window scroll(Query query, Class entityType, String collectionName) { - return doScroll(query, entityType, entityType, collectionName); + return doScroll(query, entityType, entityType, QueryResultConverter.entity(), collectionName); } - Window doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { + Window doScroll(Query query, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, String collectionName) { Assert.notNull(query, "Query must not be null"); Assert.notNull(collectionName, "CollectionName must not be null"); @@ -909,7 +918,7 @@ Window doScroll(Query query, Class sourceClass, Class targetClass, Assert.notNull(targetClass, "Target type must not be null"); EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); - ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; if (query.hasKeyset()) { @@ -917,14 +926,14 @@ Window doScroll(Query query, Class sourceClass, Class targetClass, KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, operations.getIdPropertyName(sourceClass)); - List result = doFind(collectionName, createDelegate(query), keysetPaginationQuery.query(), + List result = doFind(collectionName, createDelegate(query), keysetPaginationQuery.query(), keysetPaginationQuery.fields(), sourceClass, new QueryCursorPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback); return ScrollUtils.createWindow(query, result, sourceClass, operations); } - List result = doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + List result = doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), sourceClass, new QueryCursorPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), callback); @@ -1016,6 +1025,11 @@ public GeoResults geoNear(NearQuery near, Class domainType, String col } public GeoResults geoNear(NearQuery near, Class domainType, String collectionName, Class returnType) { + return doGeoNear(near, domainType, collectionName, returnType, QueryResultConverter.entity()); + } + + GeoResults doGeoNear(NearQuery near, Class domainType, String collectionName, Class returnType, + QueryResultConverter resultConverter) { if (near == null) { throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); @@ -1047,15 +1061,15 @@ public GeoResults geoNear(NearQuery near, Class domainType, String col AggregationResults results = aggregate($geoNear, collection, Document.class); EntityProjection projection = operations.introspectProjection(returnType, domainType); - DocumentCallback> callback = new GeoNearResultDocumentCallback<>(distanceField, - new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); + DocumentCallback> callback = new GeoNearResultDocumentCallback<>(distanceField, + getResultReader(projection, collectionName, resultConverter), near.getMetric()); - List> result = new ArrayList<>(results.getMappedResults().size()); + List> result = new ArrayList<>(results.getMappedResults().size()); BigDecimal aggregate = BigDecimal.ZERO; for (Document element : results) { - GeoResult geoResult = callback.doWith(element); + GeoResult geoResult = callback.doWith(element); aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue())); result.add(geoResult); } @@ -1433,7 +1447,10 @@ protected Collection doInsertBatch(String collectionName, Collection(initialized, document, collectionName)); initialized = maybeCallBeforeSave(initialized, document, collectionName); - documentList.add(document); + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(uninitialized.getClass()); + + documentList.add(mappedDocument.getDocument()); initializedBatchToSave.add(initialized); } @@ -2019,7 +2036,7 @@ public AggregationResults aggregate(TypedAggregation aggregation, Clas @Override public AggregationResults aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - return aggregate(aggregation, inputCollectionName, outputType, null); + return aggregate(aggregation, inputCollectionName, outputType, (AggregationOperationContext) null); } @Override @@ -2032,7 +2049,7 @@ public AggregationResults aggregate(Aggregation aggregation, Class inp @Override public AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType) { - return aggregate(aggregation, collectionName, outputType, null); + return doAggregate(aggregation, collectionName, outputType, QueryResultConverter.entity()); } @Override @@ -2162,11 +2179,25 @@ private AggregationResults doAggregate(Aggregation aggregation, String co return doAggregate(aggregation, collectionName, outputType, context.getAggregationOperationContext()); } + AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, + QueryResultConverter resultConverter) { + + return doAggregate(aggregation, collectionName, outputType, resultConverter, queryOperations + .createAggregation(aggregation, (AggregationOperationContext) null).getAggregationOperationContext()); + } + @SuppressWarnings("ConstantConditions") protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, AggregationOperationContext context) { + return doAggregate(aggregation, collectionName, outputType, QueryResultConverter.entity(), context); + } + + @SuppressWarnings("ConstantConditions") + AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, + QueryResultConverter resultConverter, AggregationOperationContext context) { - ReadDocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); + DocumentCallback callback = new QueryResultConverterCallback<>(resultConverter, + new ReadDocumentCallback<>(mongoConverter, outputType, collectionName)); AggregationOptions options = aggregation.getOptions(); AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext); @@ -2245,9 +2276,15 @@ protected AggregationResults doAggregate(Aggregation aggregation, String }); } - @SuppressWarnings("ConstantConditions") protected Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType, @Nullable AggregationOperationContext context) { + return doAggregateStream(aggregation, collectionName, outputType, QueryResultConverter.entity(), context); + } + + @SuppressWarnings("ConstantConditions") + protected Stream doAggregateStream(Aggregation aggregation, String collectionName, Class outputType, + QueryResultConverter resultConverter, + @Nullable AggregationOperationContext context) { Assert.notNull(aggregation, "Aggregation pipeline must not be null"); Assert.hasText(collectionName, "Collection name must not be null or empty"); @@ -2264,7 +2301,8 @@ protected Stream aggregateStream(Aggregation aggregation, String collecti String.format("Streaming aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } - ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); + DocumentCallback readCallback = new QueryResultConverterCallback<>(resultConverter, + new ReadDocumentCallback<>(mongoConverter, outputType, collectionName)); return execute(collectionName, (CollectionCallback>) collection -> { @@ -2626,11 +2664,12 @@ protected List doFind(String collectionName, * * @since 2.0 */ - List doFind(CollectionPreparer> collectionPreparer, String collectionName, - Document query, Document fields, Class sourceClass, Class targetClass, CursorPreparer preparer) { + List doFind(CollectionPreparer> collectionPreparer, String collectionName, + Document query, Document fields, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, CursorPreparer preparer) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); - EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); Document mappedFields = queryContext.getMappedFields(entity, projection); @@ -2646,8 +2685,9 @@ List doFind(CollectionPreparer> collectionPr collectionName)); } + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), preparer, - new ProjectingReadCallback<>(mongoConverter, projection, collectionName), collectionName); + callback, collectionName); } /** @@ -2966,6 +3006,16 @@ private void executeQueryInternal(CollectionCallback> col } } + @SuppressWarnings("unchecked") + private DocumentCallback getResultReader(EntityProjection projection, String collectionName, + QueryResultConverter resultConverter) { + + DocumentCallback readCallback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + + return resultConverter == QueryResultConverter.entity() ? (DocumentCallback) readCallback + : new QueryResultConverterCallback(resultConverter, readCallback); + } + public PersistenceExceptionTranslator getExceptionTranslator() { return exceptionTranslator; } @@ -3325,6 +3375,24 @@ public T doWith(Document document) { } } + static final class QueryResultConverterCallback implements DocumentCallback { + + private final QueryResultConverter converter; + private final DocumentCallback delegate; + + QueryResultConverterCallback(QueryResultConverter converter, DocumentCallback delegate) { + this.converter = converter; + this.delegate = delegate; + } + + @Override + public R doWith(Document object) { + + Lazy lazy = Lazy.of(() -> delegate.doWith(object)); + return converter.mapDocument(object, lazy::get); + } + } + /** * {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryResultConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryResultConverter.java new file mode 100644 index 0000000000..e271ee23cc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryResultConverter.java @@ -0,0 +1,85 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; + +/** + * Converter for MongoDB query results. + *

+ * This is a functional interface that allows for mapping a {@link Document} to a result type. + * {@link #mapDocument(Document, ConversionResultSupplier) row mapping} can obtain upstream a + * {@link ConversionResultSupplier upstream converter} to enrich the final result object. This is useful when e.g. + * wrapping result objects where the wrapper needs to obtain information from the actual {@link Document}. + * + * @param object type accepted by this converter. + * @param the returned result type. + * @author Mark Paluch + * @since x.x + */ +@FunctionalInterface +public interface QueryResultConverter { + + /** + * Returns a function that returns the materialized entity. + * + * @param the type of the input and output entity to the function. + * @return a function that returns the materialized entity. + */ + @SuppressWarnings("unchecked") + static QueryResultConverter entity() { + return (QueryResultConverter) EntityResultConverter.INSTANCE; + } + + /** + * Map a {@link Document} that is read from the MongoDB query/aggregation operation to a query result. + * + * @param document the raw document from the MongoDB query/aggregation result. + * @param reader reader object that supplies an upstream result from an earlier converter. + * @return the mapped result. + */ + R mapDocument(Document document, ConversionResultSupplier reader); + + /** + * Returns a composed function that first applies this function to its input, and then applies the {@code after} + * function to the result. If evaluation of either function throws an exception, it is relayed to the caller of the + * composed function. + * + * @param the type of output of the {@code after} function, and of the composed function. + * @param after the function to apply after this function is applied. + * @return a composed function that first applies this function and then applies the {@code after} function. + */ + default QueryResultConverter andThen(QueryResultConverter after) { + return (row, reader) -> after.mapDocument(row, () -> mapDocument(row, reader)); + } + + /** + * A supplier that converts a {@link Document} into {@code T}. Allows for lazy reading of query results. + * + * @param type of the returned result. + */ + interface ConversionResultSupplier { + + /** + * Obtain the upstream conversion result. + * + * @return the upstream conversion result. + */ + T get(); + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java index 54129e6b5d..883bc65579 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java @@ -18,6 +18,7 @@ import reactor.core.publisher.Flux; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.lang.Contract; /** * {@link ReactiveAggregationOperation} allows creation and execution of reactive MongoDB aggregation operations in a @@ -44,7 +45,7 @@ public interface ReactiveAggregationOperation { /** * Start creating an aggregation operation that returns results mapped to the given domain type.
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to specify a potentially different - * input type for he aggregation. + * input type for the aggregation. * * @param domainType must not be {@literal null}. * @return new instance of {@link ReactiveAggregation}. Never {@literal null}. @@ -73,6 +74,18 @@ interface AggregationOperationWithCollection { */ interface TerminatingAggregationOperation { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link ExecutableFindOperation.TerminatingFindNear}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since x.y + */ + @Contract("_ -> new") + TerminatingAggregationOperation map(QueryResultConverter converter); + /** * Apply pipeline operations as specified and stream all matching elements.
* diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java index 954fd61716..a25d0eed6c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java @@ -51,22 +51,25 @@ public ReactiveAggregation aggregateAndReturn(Class domainType) { Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveAggregationSupport<>(template, domainType, null, null); + return new ReactiveAggregationSupport<>(template, domainType, QueryResultConverter.entity(), null, null); } - static class ReactiveAggregationSupport + static class ReactiveAggregationSupport implements AggregationOperationWithAggregation, ReactiveAggregation, TerminatingAggregationOperation { private final ReactiveMongoTemplate template; - private final Class domainType; + private final Class domainType; + private final QueryResultConverter resultConverter; private final Aggregation aggregation; private final String collection; - ReactiveAggregationSupport(ReactiveMongoTemplate template, Class domainType, Aggregation aggregation, + ReactiveAggregationSupport(ReactiveMongoTemplate template, Class domainType, + QueryResultConverter resultConverter, Aggregation aggregation, String collection) { this.template = template; this.domainType = domainType; + this.resultConverter = resultConverter; this.aggregation = aggregation; this.collection = collection; } @@ -76,7 +79,7 @@ public AggregationOperationWithAggregation inCollection(String collection) { Assert.hasText(collection, "Collection must not be null nor empty"); - return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); + return new ReactiveAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); } @Override @@ -84,12 +87,21 @@ public TerminatingAggregationOperation by(Aggregation aggregation) { Assert.notNull(aggregation, "Aggregation must not be null"); - return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); + return new ReactiveAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); + } + + @Override + public TerminatingAggregationOperation map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new ReactiveAggregationSupport<>(template, domainType, resultConverter.andThen(converter), aggregation, + collection); } @Override public Flux all() { - return template.aggregate(aggregation, getCollectionName(aggregation), domainType); + return template.doAggregate(aggregation, getCollectionName(aggregation), domainType, domainType, resultConverter); } private String getCollectionName(Aggregation aggregation) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java index cba827ffed..24d8c975bb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java @@ -25,6 +25,7 @@ import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Contract; /** * {@link ReactiveFindOperation} allows creation and execution of reactive MongoDB find operations in a fluent API @@ -66,7 +67,28 @@ public interface ReactiveFindOperation { /** * Compose find execution by calling one of the terminating methods. */ - interface TerminatingFind { + interface TerminatingFind extends TerminatingResults, TerminatingProjection { + + } + + /** + * Compose find execution by calling one of the terminating methods. + * + * @since x.y + */ + interface TerminatingResults { + + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingResults}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since x.y + */ + @Contract("_ -> new") + TerminatingResults map(QueryResultConverter converter); /** * Get exactly zero or one result. @@ -120,6 +142,15 @@ interface TerminatingFind { */ Flux tail(); + } + + /** + * Compose find execution by calling one of the terminating methods. + * + * @since x.y + */ + interface TerminatingProjection { + /** * Get the number of matching elements.
* This method uses an @@ -145,6 +176,18 @@ interface TerminatingFind { */ interface TerminatingFindNear { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link ExecutableFindOperation.TerminatingFindNear}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since x.y + */ + @Contract("_ -> new") + TerminatingFindNear map(QueryResultConverter converter); + /** * Find all matching elements and return them as {@link org.springframework.data.geo.GeoResult}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java index d1aec8af36..6292205dcd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java @@ -19,9 +19,11 @@ import reactor.core.publisher.Mono; import org.bson.Document; + import org.springframework.dao.IncorrectResultSizeDataAccessException; -import org.springframework.data.domain.Window; import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.GeoResult; import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; @@ -52,7 +54,7 @@ public ReactiveFind query(Class domainType) { Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveFindSupport<>(template, domainType, domainType, null, ALL_QUERY); + return new ReactiveFindSupport<>(template, domainType, domainType, QueryResultConverter.entity(), null, ALL_QUERY); } /** @@ -61,21 +63,24 @@ public ReactiveFind query(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - static class ReactiveFindSupport + static class ReactiveFindSupport implements ReactiveFind, FindWithCollection, FindWithProjection, FindWithQuery { private final ReactiveMongoTemplate template; private final Class domainType; - private final Class returnType; - private final String collection; + private final Class returnType; + private final QueryResultConverter resultConverter; + private final @Nullable String collection; private final Query query; - ReactiveFindSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, String collection, + ReactiveFindSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, + QueryResultConverter resultConverter, @Nullable String collection, Query query) { this.template = template; this.domainType = domainType; this.returnType = returnType; + this.resultConverter = resultConverter; this.collection = collection; this.query = query; } @@ -85,7 +90,7 @@ public FindWithProjection inCollection(String collection) { Assert.hasText(collection, "Collection name must not be null nor empty"); - return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + return new ReactiveFindSupport<>(template, domainType, returnType, resultConverter, collection, query); } @Override @@ -93,7 +98,8 @@ public FindWithQuery as(Class returnType) { Assert.notNull(returnType, "ReturnType must not be null"); - return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + return new ReactiveFindSupport<>(template, domainType, returnType, QueryResultConverter.entity(), collection, + query); } @Override @@ -101,7 +107,16 @@ public TerminatingFind matching(Query query) { Assert.notNull(query, "Query must not be null"); - return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + return new ReactiveFindSupport<>(template, domainType, returnType, resultConverter, collection, query); + } + + @Override + public TerminatingResults map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new ReactiveFindSupport<>(template, domainType, returnType, this.resultConverter.andThen(converter), + collection, query); } @Override @@ -141,7 +156,8 @@ public Flux all() { @Override public Mono> scroll(ScrollPosition scrollPosition) { - return template.doScroll(query.with(scrollPosition), domainType, returnType, getCollectionName()); + return template.doScroll(query.with(scrollPosition), domainType, returnType, resultConverter, + getCollectionName()); } @Override @@ -151,7 +167,7 @@ public Flux tail() { @Override public TerminatingFindNear near(NearQuery nearQuery) { - return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); + return new TerminatingFindNearSupport<>(nearQuery, resultConverter); } @Override @@ -178,14 +194,15 @@ private Flux doFind(@Nullable FindPublisherPreparer preparer) { Document fieldsObject = query.getFieldsObject(); return template.doFind(getCollectionName(), ReactiveCollectionPreparerDelegate.of(query), queryObject, - fieldsObject, domainType, returnType, preparer != null ? preparer : getCursorPreparer(query)); + fieldsObject, domainType, returnType, resultConverter, + preparer != null ? preparer : getCursorPreparer(query)); } - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) private Flux doFindDistinct(String field) { return template.findDistinct(query, field, getCollectionName(), domainType, - returnType == domainType ? (Class) Object.class : returnType); + returnType == domainType ? (Class) Object.class : returnType); } private FindPublisherPreparer getCursorPreparer(Query query) { @@ -200,10 +217,36 @@ private String asString() { return SerializationUtils.serializeToJsonSafely(query); } + class TerminatingFindNearSupport implements TerminatingFindNear { + + private final NearQuery nearQuery; + private final QueryResultConverter resultConverter; + + public TerminatingFindNearSupport(NearQuery nearQuery, + QueryResultConverter resultConverter) { + this.nearQuery = nearQuery; + this.resultConverter = resultConverter; + } + + @Override + public TerminatingFindNear map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new TerminatingFindNearSupport<>(nearQuery, this.resultConverter.andThen(converter)); + } + + @Override + public Flux> all() { + return template.doGeoNear(nearQuery, domainType, getCollectionName(), returnType, resultConverter); + } + } + /** * @author Christoph Strobl * @since 2.1 */ + @SuppressWarnings({ "unchecked", "rawtypes" }) static class DistinctOperationSupport implements TerminatingDistinct { private final String field; @@ -224,12 +267,11 @@ public TerminatingDistinct as(Class resultType) { } @Override - @SuppressWarnings("unchecked") public TerminatingDistinct matching(Query query) { Assert.notNull(query, "Query must not be null"); - return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.matching(query), field); + return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.matching(query), field); } @Override diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index ea427a3e1f..0fa2f6b019 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -877,10 +877,11 @@ public Mono> scroll(Query query, Class entityType) { @Override public Mono> scroll(Query query, Class entityType, String collectionName) { - return doScroll(query, entityType, entityType, collectionName); + return doScroll(query, entityType, entityType, QueryResultConverter.entity(), collectionName); } - Mono> doScroll(Query query, Class sourceClass, Class targetClass, String collectionName) { + Mono> doScroll(Query query, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, String collectionName) { Assert.notNull(query, "Query must not be null"); Assert.notNull(collectionName, "CollectionName must not be null"); @@ -888,7 +889,7 @@ Mono> doScroll(Query query, Class sourceClass, Class targetC Assert.notNull(targetClass, "Target type must not be null"); EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); - ProjectingReadCallback callback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; if (query.hasKeyset()) { @@ -896,7 +897,7 @@ Mono> doScroll(Query query, Class sourceClass, Class targetC KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, operations.getIdPropertyName(sourceClass)); - Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), keysetPaginationQuery.query(), keysetPaginationQuery.fields(), sourceClass, new QueryFindPublisherPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback) .collectList(); @@ -904,7 +905,7 @@ Mono> doScroll(Query query, Class sourceClass, Class targetC return result.map(it -> ScrollUtils.createWindow(query, it, sourceClass, operations)); } - Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), query.getFieldsObject(), sourceClass, new QueryFindPublisherPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), callback) .collectList(); @@ -1003,6 +1004,11 @@ public Flux aggregate(Aggregation aggregation, String collectionName, Cla protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, Class outputType) { + return doAggregate(aggregation, collectionName, inputType, outputType, QueryResultConverter.entity()); + } + + Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType, QueryResultConverter resultConverter) { Assert.notNull(aggregation, "Aggregation pipeline must not be null"); Assert.hasText(collectionName, "Collection name must not be null or empty"); @@ -1018,13 +1024,14 @@ protected Flux doAggregate(Aggregation aggregation, String collectionName serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName)); } - ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); + DocumentCallback readCallback = new QueryResultConverterCallback<>(resultConverter, + new ReadDocumentCallback<>(mongoConverter, outputType, collectionName)); return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), ctx.isOutOrMerge(), options, readCallback, ctx.getInputType())); } private Flux aggregateAndMap(MongoCollection collection, List pipeline, - boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback readCallback, + boolean isOutOrMerge, AggregationOptions options, DocumentCallback readCallback, @Nullable Class inputType) { ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(options); @@ -1070,9 +1077,14 @@ public Flux> geoNear(NearQuery near, Class entityClass, Stri return geoNear(near, entityClass, collectionName, entityClass); } - @SuppressWarnings("unchecked") protected Flux> geoNear(NearQuery near, Class entityClass, String collectionName, Class returnType) { + return doGeoNear(near, entityClass, collectionName, returnType, QueryResultConverter.entity()); + } + + @SuppressWarnings("unchecked") + Flux> doGeoNear(NearQuery near, Class entityClass, String collectionName, Class returnType, + QueryResultConverter resultConverter) { if (near == null) { throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); @@ -1086,8 +1098,8 @@ protected Flux> geoNear(NearQuery near, Class entityClass, S String distanceField = operations.nearQueryDistanceFieldName(entityClass); EntityProjection projection = operations.introspectProjection(returnType, entityClass); - GeoNearResultDocumentCallback callback = new GeoNearResultDocumentCallback<>(distanceField, - new ProjectingReadCallback<>(mongoConverter, projection, collection), near.getMetric()); + GeoNearResultDocumentCallback callback = new GeoNearResultDocumentCallback<>(distanceField, + getResultReader(projection, collectionName, resultConverter), near.getMetric()); Builder optionsBuilder = AggregationOptions.builder(); if (near.hasReadPreference()) { @@ -1434,11 +1446,16 @@ protected Flux doInsertBatch(String collectionName, Collection(initialized, dbDoc, collectionName)); + maybeEmitEvent(new BeforeSaveEvent<>(initialized, mapped.getDocument(), collectionName)); + return maybeCallBeforeSave(initialized, mapped.getDocument(), collectionName).map(toSave -> { - return maybeCallBeforeSave(initialized, dbDoc, collectionName).thenReturn(Tuples.of(entity, dbDoc)); + MappedDocument mappedDocument = queryOperations.createInsertContext(mapped) + .prepareId(uninitialized.getClass()); + + return Tuples.of(entity, mappedDocument.getDocument()); + }); }); }).collectList(); @@ -2407,11 +2424,12 @@ CollectionPreparer> createCollectionPreparer(Query que * * @since 2.0 */ - Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, - Document query, Document fields, Class sourceClass, Class targetClass, FindPublisherPreparer preparer) { + Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, FindPublisherPreparer preparer) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); - EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); Document mappedFields = queryContext.getMappedFields(entity, projection); @@ -2423,7 +2441,7 @@ Flux doFind(String collectionName, CollectionPreparer(mongoConverter, projection, collectionName), collectionName); + getResultReader(projection, collectionName, resultConverter), collectionName); } protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions) { @@ -2733,6 +2751,16 @@ private Flux executeFindMultiInternal(ReactiveCollectionQueryCallback DocumentCallback getResultReader(EntityProjection projection, String collectionName, + QueryResultConverter resultConverter) { + + DocumentCallback readCallback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + + return resultConverter == QueryResultConverter.entity() ? (DocumentCallback) readCallback + : new QueryResultConverterCallback(resultConverter, readCallback); + } + /** * Exception translation {@link Function} intended for {@link Flux#onErrorMap(Function)} usage. * @@ -3090,6 +3118,22 @@ interface ReactiveCollectionQueryCallback extends ReactiveCollectionCallback< FindPublisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; } + static final class QueryResultConverterCallback implements DocumentCallback { + + private final QueryResultConverter converter; + private final DocumentCallback delegate; + + QueryResultConverterCallback(QueryResultConverter converter, DocumentCallback delegate) { + this.converter = converter; + this.delegate = delegate; + } + + @Override + public Mono doWith(Document object) { + return delegate.doWith(object).map(it -> converter.mapDocument(object, () -> it)); + } + } + /** * Simple {@link DocumentCallback} that will transform {@link Document} into the given target type using the given * {@link EntityReader}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java index 5fde0acddd..da106715d4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConversionContext.java @@ -16,41 +16,56 @@ package org.springframework.data.mongodb.core.convert; import org.bson.conversions.Bson; - import org.springframework.data.convert.ValueConversionContext; import org.springframework.data.mapping.model.PropertyValueProvider; import org.springframework.data.mapping.model.SpELContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; +import org.springframework.lang.CheckReturnValue; import org.springframework.lang.Nullable; /** * {@link ValueConversionContext} that allows to delegate read/write to an underlying {@link MongoConverter}. * * @author Christoph Strobl + * @author Ross Lawley * @since 3.4 */ public class MongoConversionContext implements ValueConversionContext { private final PropertyValueProvider accessor; // TODO: generics - private final @Nullable MongoPersistentProperty persistentProperty; private final MongoConverter mongoConverter; + @Nullable private final MongoPersistentProperty persistentProperty; @Nullable private final SpELContext spELContext; + @Nullable private final OperatorContext operatorContext; public MongoConversionContext(PropertyValueProvider accessor, @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter) { - this(accessor, persistentProperty, mongoConverter, null); + this(accessor, persistentProperty, mongoConverter, null, null); } public MongoConversionContext(PropertyValueProvider accessor, @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, @Nullable SpELContext spELContext) { + this(accessor, persistentProperty, mongoConverter, spELContext, null); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable OperatorContext operatorContext) { + this(accessor, persistentProperty, mongoConverter, null, operatorContext); + } + + public MongoConversionContext(PropertyValueProvider accessor, + @Nullable MongoPersistentProperty persistentProperty, MongoConverter mongoConverter, + @Nullable SpELContext spELContext, @Nullable OperatorContext operatorContext) { this.accessor = accessor; this.persistentProperty = persistentProperty; this.mongoConverter = mongoConverter; this.spELContext = spELContext; + this.operatorContext = operatorContext; } @Override @@ -63,6 +78,16 @@ public MongoPersistentProperty getProperty() { return persistentProperty; } + /** + * @param operatorContext + * @return new instance of {@link MongoConversionContext}. + * @since 4.5 + */ + @CheckReturnValue + public MongoConversionContext forOperator(@Nullable OperatorContext operatorContext) { + return new MongoConversionContext(accessor, persistentProperty, mongoConverter, spELContext, operatorContext); + } + @Nullable public Object getValue(String propertyPath) { return accessor.getPropertyValue(getProperty().getOwner().getRequiredPersistentProperty(propertyPath)); @@ -84,4 +109,62 @@ public T read(@Nullable Object value, TypeInformation target) { public SpELContext getSpELContext() { return spELContext; } + + @Nullable + public OperatorContext getOperatorContext() { + return operatorContext; + } + + /** + * The {@link OperatorContext} provides access to the actual conversion intent like a write operation or a query + * operator such as {@literal $gte}. + * + * @since 4.5 + */ + public interface OperatorContext { + + /** + * The operator the conversion is used in. + * + * @return {@literal write} for simple write operations during save, or a query operator. + */ + String operator(); + + /** + * The context path the operator is used in. + * + * @return never {@literal null}. + */ + String path(); + + boolean isWriteOperation(); + + } + + record WriteOperatorContext(String path) implements OperatorContext { + + @Override + public String operator() { + return "write"; + } + + @Override + public boolean isWriteOperation() { + return true; + } + } + + record QueryOperatorContext(String operator, String path) implements OperatorContext { + + public QueryOperatorContext(@Nullable String operator, String path) { + this.operator = operator != null ? operator : "$eq"; + this.path = path; + } + + @Override + public boolean isWriteOperation() { + return false; + } + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index cce809adc6..debaf2f127 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -37,7 +37,6 @@ import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; - import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Reference; @@ -58,6 +57,8 @@ import org.springframework.data.mongodb.core.aggregation.AggregationExpression; import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.convert.MappingMongoConverter.NestedDocument; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.QueryOperatorContext; import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; @@ -88,6 +89,7 @@ * @author David Julia * @author Divya Srivastava * @author Gyungrai Wang + * @author Ross Lawley */ public class QueryMapper { @@ -670,9 +672,26 @@ private Object convertValue(Field documentField, Object sourceValue, Object valu PropertyValueConverter> valueConverter) { MongoPersistentProperty property = documentField.getProperty(); - MongoConversionContext conversionContext = new MongoConversionContext(NoPropertyPropertyValueProvider.INSTANCE, - property, converter); + OperatorContext criteriaContext = new QueryOperatorContext( + isKeyword(documentField.name) ? documentField.name : "$eq", property.getFieldName()); + MongoConversionContext conversionContext; + if (valueConverter instanceof MongoConversionContext mcc) { + conversionContext = mcc.forOperator(criteriaContext); + } else { + conversionContext = new MongoConversionContext(NoPropertyPropertyValueProvider.INSTANCE, property, converter, + criteriaContext); + } + + return convertValueWithConversionContext(documentField, sourceValue, value, valueConverter, conversionContext); + } + + @Nullable + protected Object convertValueWithConversionContext(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter, + MongoConversionContext conversionContext) { + + MongoPersistentProperty property = documentField.getProperty(); /* might be an $in clause with multiple entries */ if (property != null && !property.isCollectionLike() && sourceValue instanceof Collection collection) { @@ -692,7 +711,8 @@ private Object convertValue(Field documentField, Object sourceValue, Object valu return BsonUtils.mapValues(document, (key, val) -> { if (isKeyword(key)) { - return getMappedValue(documentField, val); + return convertValueWithConversionContext(documentField, val, val, valueConverter, conversionContext + .forOperator(new QueryOperatorContext(key, conversionContext.getOperatorContext().path()))); } return val; }); @@ -1606,7 +1626,7 @@ public MongoConverter getConverter() { return converter; } - private enum NoPropertyPropertyValueProvider implements PropertyValueProvider { + enum NoPropertyPropertyValueProvider implements PropertyValueProvider { INSTANCE; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java index 35cb578c23..805bafe974 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/UpdateMapper.java @@ -24,10 +24,13 @@ import org.bson.Document; import org.bson.conversions.Bson; import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.PropertyValueConverter; +import org.springframework.data.convert.ValueConversionContext; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Order; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.WriteOperatorContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.Query; @@ -160,6 +163,13 @@ protected Entry getMappedObjectForField(Field field, Object rawV return super.getMappedObjectForField(field, rawValue); } + protected Object convertValueWithConversionContext(Field documentField, Object sourceValue, Object value, + PropertyValueConverter> valueConverter, + MongoConversionContext conversionContext) { + + return super.convertValueWithConversionContext(documentField, sourceValue, value, valueConverter, conversionContext.forOperator(new WriteOperatorContext(documentField.name))); + } + private Entry getMappedUpdateModifier(Field field, Object rawValue) { Object value; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java index f8d814fee4..67c30fcf94 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/ExplicitEncryptionContext.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.convert.encryption; import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; import org.springframework.data.mongodb.core.encryption.EncryptionContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; @@ -26,6 +27,7 @@ * Default {@link EncryptionContext} implementation. * * @author Christoph Strobl + * @author Ross Lawley * @since 4.1 */ class ExplicitEncryptionContext implements EncryptionContext { @@ -66,4 +68,10 @@ public T read(@Nullable Object value, TypeInformation target) { public T write(@Nullable Object value, TypeInformation target) { return conversionContext.write(value, target); } + + @Override + @Nullable + public OperatorContext getOperatorContext() { + return conversionContext.getOperatorContext(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java index 1ce24b25fe..8d29847aae 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/encryption/MongoEncryptionConverter.java @@ -15,8 +15,13 @@ */ package org.springframework.data.mongodb.core.convert.encryption; +import static java.util.Arrays.*; +import static java.util.Collections.*; +import static org.springframework.data.mongodb.core.encryption.EncryptionOptions.*; + import java.util.Collection; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; @@ -27,28 +32,36 @@ import org.bson.BsonValue; import org.bson.Document; import org.bson.types.Binary; + import org.springframework.core.CollectionFactory; import org.springframework.data.mongodb.core.convert.MongoConversionContext; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; import org.springframework.data.mongodb.core.encryption.Encryption; import org.springframework.data.mongodb.core.encryption.EncryptionContext; +import org.springframework.data.mongodb.core.encryption.EncryptionKey; import org.springframework.data.mongodb.core.encryption.EncryptionKeyResolver; import org.springframework.data.mongodb.core.encryption.EncryptionOptions; import org.springframework.data.mongodb.core.mapping.Encrypted; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Default implementation of {@link EncryptingConverter}. Properties used with this converter must be annotated with * {@link Encrypted @Encrypted} to provide key and algorithm metadata. * * @author Christoph Strobl + * @author Ross Lawley * @since 4.1 */ public class MongoEncryptionConverter implements EncryptingConverter { private static final Log LOGGER = LogFactory.getLog(MongoEncryptionConverter.class); + private static final List RANGE_OPERATORS = asList("$gt", "$gte", "$lt", "$lte"); + public static final String AND_OPERATOR = "$and"; private final Encryption encryption; private final EncryptionKeyResolver keyResolver; @@ -158,10 +171,52 @@ public Object encrypt(Object value, EncryptionContext context) { if (annotation == null) { throw new IllegalStateException(String.format("Property %s.%s is not annotated with @Encrypted", - getProperty(context).getOwner().getName(), getProperty(context).getName())); + persistentProperty.getOwner().getName(), persistentProperty.getName())); + } + + String algorithm = annotation.algorithm(); + EncryptionKey key = keyResolver.getKey(context); + OperatorContext operatorContext = context.getOperatorContext(); + + EncryptionOptions encryptionOptions = new EncryptionOptions(algorithm, key, + getEQOptions(persistentProperty, operatorContext)); + + if (operatorContext != null && !operatorContext.isWriteOperation() && encryptionOptions.queryableEncryptionOptions() != null + && !encryptionOptions.queryableEncryptionOptions().getQueryType().equals("equality")) { + return encryptExpression(operatorContext, value, encryptionOptions); + } else { + return encryptValue(value, context, persistentProperty, encryptionOptions); + } + } + + private static @Nullable QueryableEncryptionOptions getEQOptions(MongoPersistentProperty persistentProperty, + @Nullable OperatorContext operatorContext) { + + Queryable queryableAnnotation = persistentProperty.findAnnotation(Queryable.class); + if (queryableAnnotation == null || !StringUtils.hasText(queryableAnnotation.queryType())) { + return null; + } + + QueryableEncryptionOptions queryableEncryptionOptions = QueryableEncryptionOptions.none(); + + String queryAttributes = queryableAnnotation.queryAttributes(); + if (!queryAttributes.isEmpty()) { + queryableEncryptionOptions = queryableEncryptionOptions.attributes(Document.parse(queryAttributes)); + } + + if (queryableAnnotation.contentionFactor() >= 0) { + queryableEncryptionOptions = queryableEncryptionOptions.contentionFactor(queryableAnnotation.contentionFactor()); } - EncryptionOptions encryptionOptions = new EncryptionOptions(annotation.algorithm(), keyResolver.getKey(context)); + boolean isPartOfARangeQuery = operatorContext != null && !operatorContext.isWriteOperation(); + if (isPartOfARangeQuery) { + queryableEncryptionOptions = queryableEncryptionOptions.queryType(queryableAnnotation.queryType()); + } + return queryableEncryptionOptions; + } + + private BsonBinary encryptValue(Object value, EncryptionContext context, MongoPersistentProperty persistentProperty, + EncryptionOptions encryptionOptions) { if (!persistentProperty.isEntity()) { @@ -176,6 +231,7 @@ public Object encrypt(Object value, EncryptionContext context) { } return encryption.encrypt(BsonUtils.simpleToBsonValue(value), encryptionOptions); } + if (persistentProperty.isCollectionLike()) { return encryption.encrypt(collectionLikeToBsonValue(value, persistentProperty, context), encryptionOptions); } @@ -187,6 +243,37 @@ public Object encrypt(Object value, EncryptionContext context) { return encryption.encrypt(BsonUtils.simpleToBsonValue(write), encryptionOptions); } + /** + * Encrypts a range query expression. + *

+ * The mongodb-crypt {@code encryptExpression} has strict formatting requirements so this method ensures these + * requirements are met and then picks out and returns just the value for use with a range query. + * + * @param operatorContext field name and query operator. + * @param value the value of the expression to be encrypted. + * @param encryptionOptions the options. + * @return the encrypted range value for use in a range query. + */ + private BsonValue encryptExpression(OperatorContext operatorContext, Object value, + EncryptionOptions encryptionOptions) { + + BsonValue doc = BsonUtils.simpleToBsonValue(value); + + String fieldName = operatorContext.path(); + String queryOperator = operatorContext.operator(); + + if (!RANGE_OPERATORS.contains(queryOperator)) { + throw new AssertionError(String.format("Not a valid range query. Querying a range encrypted field but the " + + "query operator '%s' for field path '%s' is not a range query.", queryOperator, fieldName)); + } + + BsonDocument encryptExpression = new BsonDocument(AND_OPERATOR, + new BsonArray(singletonList(new BsonDocument(fieldName, new BsonDocument(queryOperator, doc))))); + + BsonDocument result = encryption.encryptExpression(encryptExpression, encryptionOptions); + return result.getArray(AND_OPERATOR).get(0).asDocument().getDocument(fieldName).getBinary(queryOperator); + } + private BsonValue collectionLikeToBsonValue(Object value, MongoPersistentProperty property, EncryptionContext context) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java index 5645c1e416..a80a72ed1f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/Encryption.java @@ -15,13 +15,18 @@ */ package org.springframework.data.mongodb.core.encryption; +import org.bson.BsonDocument; + /** * Component responsible for encrypting and decrypting values. * + * @param

plaintext type. + * @param ciphertext type. * @author Christoph Strobl + * @author Ross Lawley * @since 4.1 */ -public interface Encryption { +public interface Encryption { /** * Encrypt the given value. @@ -30,7 +35,7 @@ public interface Encryption { * @param options must not be {@literal null}. * @return the encrypted value. */ - T encrypt(S value, EncryptionOptions options); + C encrypt(P value, EncryptionOptions options); /** * Decrypt the given value. @@ -38,6 +43,18 @@ public interface Encryption { * @param value must not be {@literal null}. * @return the decrypted value. */ - S decrypt(T value); + P decrypt(C value); + + /** + * Encrypt the given expression. + * + * @param value must not be {@literal null}. + * @param options must not be {@literal null}. + * @return the encrypted expression. + * @since 4.5.0 + */ + default BsonDocument encryptExpression(BsonDocument value, EncryptionOptions options) { + throw new UnsupportedOperationException("Unsupported encryption method"); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java index 89beaadedb..5f5e29578d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionContext.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.encryption; import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mongodb.core.convert.MongoConversionContext.OperatorContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; import org.springframework.expression.EvaluationContext; @@ -25,6 +26,7 @@ * Context to encapsulate encryption for a specific {@link MongoPersistentProperty}. * * @author Christoph Strobl + * @author Ross Lawley * @since 4.1 */ public interface EncryptionContext { @@ -128,4 +130,13 @@ default T write(@Nullable Object value, Class target) { EvaluationContext getEvaluationContext(Object source); + /** + * The field name and field query operator + * + * @return can be {@literal null}. + */ + @Nullable + default OperatorContext getOperatorContext() { + return null; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java index fe01cfa8ba..73a66e4a8a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/EncryptionOptions.java @@ -15,27 +15,41 @@ */ package org.springframework.data.mongodb.core.encryption; +import java.util.Map; +import java.util.Objects; + +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** - * Options, like the {@link #algorithm()}, to apply when encrypting values. - * + * Options used to provide additional information when {@link Encryption encrypting} values. like the + * {@link #algorithm()} to be used. + * * @author Christoph Strobl + * @author Ross Lawley * @since 4.1 */ public class EncryptionOptions { private final String algorithm; private final EncryptionKey key; + private final @Nullable QueryableEncryptionOptions queryableEncryptionOptions; public EncryptionOptions(String algorithm, EncryptionKey key) { + this(algorithm, key, null); + } + + public EncryptionOptions(String algorithm, EncryptionKey key, + @Nullable QueryableEncryptionOptions queryableEncryptionOptions) { Assert.hasText(algorithm, "Algorithm must not be empty"); Assert.notNull(key, "EncryptionKey must not be empty"); + Assert.notNull(key, "QueryableEncryptionOptions must not be empty"); this.key = key; this.algorithm = algorithm; + this.queryableEncryptionOptions = queryableEncryptionOptions; } public EncryptionKey key() { @@ -46,6 +60,14 @@ public String algorithm() { return algorithm; } + /** + * @return {@literal null} if not set. + * @since 4.5 + */ + public @Nullable QueryableEncryptionOptions queryableEncryptionOptions() { + return queryableEncryptionOptions; + } + @Override public boolean equals(Object o) { @@ -61,7 +83,11 @@ public boolean equals(Object o) { if (!ObjectUtils.nullSafeEquals(algorithm, that.algorithm)) { return false; } - return ObjectUtils.nullSafeEquals(key, that.key); + if (!ObjectUtils.nullSafeEquals(key, that.key)) { + return false; + } + + return ObjectUtils.nullSafeEquals(queryableEncryptionOptions, that.queryableEncryptionOptions); } @Override @@ -69,11 +95,141 @@ public int hashCode() { int result = ObjectUtils.nullSafeHashCode(algorithm); result = 31 * result + ObjectUtils.nullSafeHashCode(key); + result = 31 * result + ObjectUtils.nullSafeHashCode(queryableEncryptionOptions); return result; } @Override public String toString() { - return "EncryptionOptions{" + "algorithm='" + algorithm + '\'' + ", key=" + key + '}'; + return "EncryptionOptions{" + "algorithm='" + algorithm + '\'' + ", key=" + key + ", queryableEncryptionOptions='" + + queryableEncryptionOptions + "'}"; + } + + /** + * Options, like the {@link #getQueryType()}, to apply when encrypting queryable values. + * + * @author Ross Lawley + * @author Christoph Strobl + * @since 4.5 + */ + public static class QueryableEncryptionOptions { + + private static final QueryableEncryptionOptions NONE = new QueryableEncryptionOptions(null, null, Map.of()); + + private final @Nullable String queryType; + private final @Nullable Long contentionFactor; + private final Map attributes; + + private QueryableEncryptionOptions(@Nullable String queryType, @Nullable Long contentionFactor, + Map attributes) { + + this.queryType = queryType; + this.contentionFactor = contentionFactor; + this.attributes = attributes; + } + + /** + * Create an empty {@link QueryableEncryptionOptions}. + * + * @return unmodifiable {@link QueryableEncryptionOptions} instance. + */ + public static QueryableEncryptionOptions none() { + return NONE; + } + + /** + * Define the {@code queryType} to be used for queryable document encryption. + * + * @param queryType can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions queryType(@Nullable String queryType) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Define the {@code contentionFactor} to be used for queryable document encryption. + * + * @param contentionFactor can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions contentionFactor(@Nullable Long contentionFactor) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Define the {@code rangeOptions} to be used for queryable document encryption. + * + * @param attributes can be {@literal null}. + * @return new instance of {@link QueryableEncryptionOptions}. + */ + public QueryableEncryptionOptions attributes(Map attributes) { + return new QueryableEncryptionOptions(queryType, contentionFactor, attributes); + } + + /** + * Get the {@code queryType} to apply. + * + * @return {@literal null} if not set. + */ + public @Nullable String getQueryType() { + return queryType; + } + + /** + * Get the {@code contentionFactor} to apply. + * + * @return {@literal null} if not set. + */ + public @Nullable Long getContentionFactor() { + return contentionFactor; + } + + /** + * Get the {@code rangeOptions} to apply. + * + * @return never {@literal null}. + */ + public Map getAttributes() { + return Map.copyOf(attributes); + } + + /** + * @return {@literal true} if no arguments set. + */ + boolean isEmpty() { + return getQueryType() == null && getContentionFactor() == null && getAttributes().isEmpty(); + } + + @Override + public String toString() { + return "QueryableEncryptionOptions{" + "queryType='" + queryType + '\'' + ", contentionFactor=" + contentionFactor + + ", attributes=" + attributes + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + QueryableEncryptionOptions that = (QueryableEncryptionOptions) o; + + if (!ObjectUtils.nullSafeEquals(queryType, that.queryType)) { + return false; + } + + if (!ObjectUtils.nullSafeEquals(contentionFactor, that.contentionFactor)) { + return false; + } + return ObjectUtils.nullSafeEquals(attributes, that.attributes); + } + + @Override + public int hashCode() { + return Objects.hash(queryType, contentionFactor, attributes); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java index 92350ce7d7..f83f98d4ac 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/encryption/MongoClientEncryption.java @@ -15,20 +15,28 @@ */ package org.springframework.data.mongodb.core.encryption; +import static org.springframework.data.mongodb.util.MongoCompatibilityAdapter.rangeOptionsAdapter; + +import java.util.Map; import java.util.function.Supplier; import org.bson.BsonBinary; +import org.bson.BsonDocument; import org.bson.BsonValue; import org.springframework.data.mongodb.core.encryption.EncryptionKey.Type; +import org.springframework.data.mongodb.core.encryption.EncryptionOptions.QueryableEncryptionOptions; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.util.Assert; import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.model.vault.RangeOptions; import com.mongodb.client.vault.ClientEncryption; /** * {@link ClientEncryption} based {@link Encryption} implementation. * * @author Christoph Strobl + * @author Ross Lawley * @since 4.1 */ public class MongoClientEncryption implements Encryption { @@ -59,6 +67,19 @@ public BsonValue decrypt(BsonBinary value) { @Override public BsonBinary encrypt(BsonValue value, EncryptionOptions options) { + return getClientEncryption().encrypt(value, createEncryptOptions(options)); + } + + @Override + public BsonDocument encryptExpression(BsonDocument value, EncryptionOptions options) { + return getClientEncryption().encryptExpression(value, createEncryptOptions(options)); + } + + public ClientEncryption getClientEncryption() { + return source.get(); + } + + private EncryptOptions createEncryptOptions(EncryptionOptions options) { EncryptOptions encryptOptions = new EncryptOptions(options.algorithm()); @@ -68,11 +89,58 @@ public BsonBinary encrypt(BsonValue value, EncryptionOptions options) { encryptOptions = encryptOptions.keyId((BsonBinary) options.key().value()); } - return getClientEncryption().encrypt(value, encryptOptions); + if (options.queryableEncryptionOptions() == null) { + return encryptOptions; + } + + QueryableEncryptionOptions qeOptions = options.queryableEncryptionOptions(); + if (qeOptions.getQueryType() != null) { + encryptOptions.queryType(qeOptions.getQueryType()); + } + if (qeOptions.getContentionFactor() != null) { + encryptOptions.contentionFactor(qeOptions.getContentionFactor()); + } + if (!qeOptions.getAttributes().isEmpty()) { + encryptOptions.rangeOptions(rangeOptions(qeOptions.getAttributes())); + } + return encryptOptions; } - public ClientEncryption getClientEncryption() { - return source.get(); + protected RangeOptions rangeOptions(Map attributes) { + + RangeOptions encryptionRangeOptions = new RangeOptions(); + if (attributes.isEmpty()) { + return encryptionRangeOptions; + } + + if (attributes.containsKey("min")) { + encryptionRangeOptions.min(BsonUtils.simpleToBsonValue(attributes.get("min"))); + } + if (attributes.containsKey("max")) { + encryptionRangeOptions.max(BsonUtils.simpleToBsonValue(attributes.get("max"))); + } + if (attributes.containsKey("trimFactor")) { + Object trimFactor = attributes.get("trimFactor"); + Assert.isInstanceOf(Integer.class, trimFactor, () -> String + .format("Expected to find a %s but it turned out to be %s.", Integer.class, trimFactor.getClass())); + + rangeOptionsAdapter(encryptionRangeOptions).trimFactor((Integer) trimFactor); + } + + if (attributes.containsKey("sparsity")) { + Object sparsity = attributes.get("sparsity"); + Assert.isInstanceOf(Number.class, sparsity, + () -> String.format("Expected to find a %s but it turned out to be %s.", Long.class, sparsity.getClass())); + encryptionRangeOptions.sparsity(((Number) sparsity).longValue()); + } + + if (attributes.containsKey("precision")) { + Object precision = attributes.get("precision"); + Assert.isInstanceOf(Number.class, precision, () -> String + .format("Expected to find a %s but it turned out to be %s.", Integer.class, precision.getClass())); + encryptionRangeOptions.precision(((Number) precision).intValue()); + } + return encryptionRangeOptions; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java index 5f08e5c787..37d1019f62 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ExplicitEncrypted.java @@ -47,6 +47,7 @@ * * * @author Christoph Strobl + * @author Ross Lawley * @since 4.1 * @see ValueConverter */ @@ -60,7 +61,8 @@ * Define the algorithm to use. *

* A {@literal Deterministic} algorithm ensures that a given input value always encrypts to the same output while a - * {@literal randomized} one will produce different results every time. + * {@literal randomized} one will produce different results every time. A {@literal range} algorithm allows for + * the value to be queried whilst encrypted. *

* Please make sure to use an algorithm that is in line with MongoDB's encryption rules for simple types, complex * objects and arrays as well as the query limitations that come with each of them. @@ -91,4 +93,5 @@ */ @AliasFor(annotation = ValueConverter.class, value = "value") Class value() default MongoEncryptionConverter.class; + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java new file mode 100644 index 0000000000..a0c67f7187 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Queryable.java @@ -0,0 +1,48 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @author Christoph Strobl + * @since 4.5 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD, ElementType.ANNOTATION_TYPE }) +public @interface Queryable { + + /** + * @return empty {@link String} if not set. + */ + String queryType() default ""; + + /** + * @return empty {@link String} if not set. + */ + String queryAttributes() default ""; + + /** + * Set the contention factor + * + * @return the contention factor + */ + long contentionFactor() default -1; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java new file mode 100644 index 0000000000..8b2eccb6ca --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/RangeEncrypted.java @@ -0,0 +1,57 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; + +/** + * @author Christoph Strobl + * @author Ross Lawley + * @since 4.5 + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +@Encrypted(algorithm = "Range") +@Queryable(queryType = "range") +public @interface RangeEncrypted { + + /** + * Set the contention factor. + * + * @return the contention factor + */ + @AliasFor(annotation = Queryable.class, value = "contentionFactor") + long contentionFactor() default -1; + + /** + * Set the {@literal range} options. + *

+ * Should be valid extended {@link org.bson.Document#parse(String) JSON} representing the range options and including + * the following values: {@code min}, {@code max}, {@code trimFactor} and {@code sparsity}. + *

+ * Please note that values are data type sensitive and may require proper identification via eg. {@code $numberLong}. + * + * @return the {@link org.bson.Document#parse(String) JSON} representation of range options. + */ + @AliasFor(annotation = Queryable.class, value = "queryAttributes") + String rangeOptions() default ""; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java index 29cedfd6ce..26dbd7dffb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java @@ -23,8 +23,8 @@ import java.util.UUID; import org.bson.Document; - import org.springframework.data.domain.Range; +import org.springframework.data.mongodb.core.EncryptionAlgorithms; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ArrayJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.BooleanJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.DateJsonSchemaObject; @@ -1036,7 +1036,7 @@ public static class EncryptedJsonSchemaProperty implements JsonSchemaProperty { private final JsonSchemaProperty targetProperty; private final @Nullable String algorithm; - private final @Nullable String keyId; + private final @Nullable Object keyId; private final @Nullable List keyIds; /** @@ -1048,7 +1048,7 @@ public EncryptedJsonSchemaProperty(JsonSchemaProperty target) { this(target, null, null, null); } - private EncryptedJsonSchemaProperty(JsonSchemaProperty target, @Nullable String algorithm, @Nullable String keyId, + private EncryptedJsonSchemaProperty(JsonSchemaProperty target, @Nullable String algorithm, @Nullable Object keyId, @Nullable List keyIds) { Assert.notNull(target, "Target must not be null"); @@ -1068,13 +1068,25 @@ public static EncryptedJsonSchemaProperty encrypted(JsonSchemaProperty target) { return new EncryptedJsonSchemaProperty(target); } + /** + * Create new instance of {@link EncryptedJsonSchemaProperty} with {@literal Range} encryption, wrapping the given + * {@link JsonSchemaProperty target}. + * + * @param target must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 4.5 + */ + public static EncryptedJsonSchemaProperty rangeEncrypted(JsonSchemaProperty target) { + return new EncryptedJsonSchemaProperty(target).algorithm(EncryptionAlgorithms.RANGE); + } + /** * Use {@literal AEAD_AES_256_CBC_HMAC_SHA_512-Random} algorithm. * * @return new instance of {@link EncryptedJsonSchemaProperty}. */ public EncryptedJsonSchemaProperty aead_aes_256_cbc_hmac_sha_512_random() { - return algorithm("AEAD_AES_256_CBC_HMAC_SHA_512-Random"); + return algorithm(EncryptionAlgorithms.AEAD_AES_256_CBC_HMAC_SHA_512_Random); } /** @@ -1083,7 +1095,7 @@ public EncryptedJsonSchemaProperty aead_aes_256_cbc_hmac_sha_512_random() { * @return new instance of {@link EncryptedJsonSchemaProperty}. */ public EncryptedJsonSchemaProperty aead_aes_256_cbc_hmac_sha_512_deterministic() { - return algorithm("AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"); + return algorithm(EncryptionAlgorithms.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic); } /** @@ -1103,6 +1115,15 @@ public EncryptedJsonSchemaProperty keyId(String keyId) { return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, null); } + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + * @since 4.5 + */ + public EncryptedJsonSchemaProperty keyId(Object keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, keyId, null); + } + /** * @param keyId must not be {@literal null}. * @return new instance of {@link EncryptedJsonSchemaProperty}. @@ -1171,5 +1192,71 @@ private Type extractPropertyType(Document source) { return null; } + + public Object getKeyId() { + if (keyId != null) { + return keyId; + } + if (keyIds != null && keyIds.size() == 1) { + return keyIds.iterator().next(); + } + return null; + } + } + + /** + * {@link JsonSchemaProperty} implementation typically wrapping an {@link EncryptedJsonSchemaProperty encrypted + * property} to mark it as queryable. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class QueryableJsonSchemaProperty implements JsonSchemaProperty { + + private final JsonSchemaProperty targetProperty; + private final QueryCharacteristics characteristics; + + public QueryableJsonSchemaProperty(JsonSchemaProperty target, QueryCharacteristics characteristics) { + this.targetProperty = target; + this.characteristics = characteristics; + } + + @Override + public Document toDocument() { + + Document doc = targetProperty.toDocument(); + Document propertySpecification = doc.get(targetProperty.getIdentifier(), Document.class); + + if (propertySpecification.containsKey("encrypt")) { + Document encrypt = propertySpecification.get("encrypt", Document.class); + List queries = characteristics.getCharacteristics().stream().map(QueryCharacteristic::toDocument) + .toList(); + encrypt.append("queries", queries); + } + + return doc; + } + + @Override + public String getIdentifier() { + return targetProperty.getIdentifier(); + } + + @Override + public Set getTypes() { + return targetProperty.getTypes(); + } + + boolean isEncrypted() { + return targetProperty instanceof EncryptedJsonSchemaProperty; + } + + public JsonSchemaProperty getTargetProperty() { + return targetProperty; + } + + public QueryCharacteristics getCharacteristics() { + return characteristics; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java index 8529951db2..a854c6184a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/JsonSchemaProperty.java @@ -16,10 +16,22 @@ package org.springframework.data.mongodb.core.schema; import java.util.Collection; +import java.util.List; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.BooleanJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.DateJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NullJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.NumericJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.RequiredJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.StringJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.TimestampJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.UntypedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.NumericJsonSchemaObject; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; -import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.*; import org.springframework.lang.Nullable; /** @@ -69,6 +81,18 @@ static EncryptedJsonSchemaProperty encrypted(JsonSchemaProperty property) { return EncryptedJsonSchemaProperty.encrypted(property); } + /** + * Turns the given target property into a {@link QueryableJsonSchemaProperty queryable} one, eg. for {@literal range} + * encrypted properties. + * + * @param property the queryable property. Must not be {@literal null}. + * @param queries predefined query characteristics. + * @since 4.5 + */ + static QueryableJsonSchemaProperty queryable(JsonSchemaProperty property, List queries) { + return new QueryableJsonSchemaProperty(property, new QueryCharacteristics(queries)); + } + /** * Creates a new {@link StringJsonSchemaProperty} with given {@literal identifier} of {@code type : 'string'}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java index e0f3e26100..a6fc3ab8bd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MergedJsonSchema.java @@ -19,7 +19,9 @@ import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.BiFunction; +import java.util.stream.Collectors; import org.bson.Document; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java new file mode 100644 index 0000000000..8604ba9d6c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristic.java @@ -0,0 +1,40 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import org.bson.Document; + +/** + * Defines the specific character of a query that can be executed. Mainly used to define the characteristic of queryable + * encrypted fields. + * + * @author Christoph Strobl + * @since 4.5 + */ +public interface QueryCharacteristic { + + /** + * @return the query type, eg. {@literal range}. + */ + String queryType(); + + /** + * @return the raw {@link Document} representation of the instance. + */ + default Document toDocument() { + return new Document("queryType", queryType()); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java new file mode 100644 index 0000000000..4ec775c5e7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/QueryCharacteristics.java @@ -0,0 +1,263 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.schema; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; +import org.springframework.lang.Nullable; + +/** + * Encapsulation of individual {@link QueryCharacteristic query characteristics} used to define queries that can be + * executed when using queryable encryption. + * + * @author Christoph Strobl + * @since 4.5 + */ +public class QueryCharacteristics implements Iterable { + + /** + * instance indicating none + */ + private static final QueryCharacteristics NONE = new QueryCharacteristics(Collections.emptyList()); + + private final List characteristics; + + QueryCharacteristics(List characteristics) { + this.characteristics = characteristics; + } + + /** + * @return marker instance indicating no characteristics have been defined. + */ + public static QueryCharacteristics none() { + return NONE; + } + + /** + * Create new {@link QueryCharacteristics} from given list of {@link QueryCharacteristic characteristics}. + * + * @param characteristics must not be {@literal null}. + * @return new instance of {@link QueryCharacteristics}. + */ + public static QueryCharacteristics of(List characteristics) { + return new QueryCharacteristics(List.copyOf(characteristics)); + } + + /** + * Create new {@link QueryCharacteristics} from given {@link QueryCharacteristic characteristics}. + * + * @param characteristics must not be {@literal null}. + * @return new instance of {@link QueryCharacteristics}. + */ + public static QueryCharacteristics of(QueryCharacteristic... characteristics) { + return new QueryCharacteristics(Arrays.asList(characteristics)); + } + + /** + * @return the list of {@link QueryCharacteristic characteristics}. + */ + public List getCharacteristics() { + return characteristics; + } + + @Override + public Iterator iterator() { + return this.characteristics.iterator(); + } + + /** + * Create a new {@link RangeQuery range query characteristic} used to define range queries against an encrypted field. + * + * @param targeted field type + * @return new instance of {@link RangeQuery}. + */ + public static RangeQuery range() { + return new RangeQuery<>(); + } + + /** + * Create a new {@link EqualityQuery equality query characteristic} used to define equality queries against an + * encrypted field. + * + * @param targeted field type + * @return new instance of {@link EqualityQuery}. + */ + public static EqualityQuery equality() { + return new EqualityQuery<>(null); + } + + /** + * {@link QueryCharacteristic} for equality comparison. + * + * @param + * @since 4.5 + */ + public static class EqualityQuery implements QueryCharacteristic { + + private final @Nullable Long contention; + + /** + * Create new instance of {@link EqualityQuery}. + * + * @param contention can be {@literal null}. + */ + public EqualityQuery(@Nullable Long contention) { + this.contention = contention; + } + + /** + * @param contention concurrent counter partition factor. + * @return new instance of {@link EqualityQuery}. + */ + public EqualityQuery contention(long contention) { + return new EqualityQuery<>(contention); + } + + @Override + public String queryType() { + return "equality"; + } + + @Override + public Document toDocument() { + return QueryCharacteristic.super.toDocument().append("contention", contention); + } + } + + /** + * {@link QueryCharacteristic} for range comparison. + * + * @param + * @since 4.5 + */ + public static class RangeQuery implements QueryCharacteristic { + + private final @Nullable Range valueRange; + private final @Nullable Integer trimFactor; + private final @Nullable Long sparsity; + private final @Nullable Long precision; + private final @Nullable Long contention; + + private RangeQuery() { + this(Range.unbounded(), null, null, null, null); + } + + /** + * Create new instance of {@link RangeQuery}. + * + * @param valueRange + * @param trimFactor + * @param sparsity + * @param contention + */ + public RangeQuery(@Nullable Range valueRange, @Nullable Integer trimFactor, @Nullable Long sparsity, + @Nullable Long precision, @Nullable Long contention) { + this.valueRange = valueRange; + this.trimFactor = trimFactor; + this.sparsity = sparsity; + this.precision = precision; + this.contention = contention; + } + + /** + * @param lower the lower value range boundary for the queryable field. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery min(T lower) { + + Range range = Range.of(Bound.inclusive(lower), + valueRange != null ? valueRange.getUpperBound() : Bound.unbounded()); + return new RangeQuery<>(range, trimFactor, sparsity, precision, contention); + } + + /** + * @param upper the upper value range boundary for the queryable field. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery max(T upper) { + + Range range = Range.of(valueRange != null ? valueRange.getLowerBound() : Bound.unbounded(), + Bound.inclusive(upper)); + return new RangeQuery<>(range, trimFactor, sparsity, precision, contention); + } + + /** + * @param trimFactor value to control the throughput of concurrent inserts and updates. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery trimFactor(int trimFactor) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param sparsity value to control the value density within the index. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery sparsity(long sparsity) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param contention concurrent counter partition factor. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery contention(long contention) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + /** + * @param precision digits considered comparing floating point numbers. + * @return new instance of {@link RangeQuery}. + */ + public RangeQuery precision(long precision) { + return new RangeQuery<>(valueRange, trimFactor, sparsity, precision, contention); + } + + @Override + public String queryType() { + return "range"; + } + + @Override + @SuppressWarnings("unchecked") + public Document toDocument() { + + Document target = QueryCharacteristic.super.toDocument(); + if (contention != null) { + target.append("contention", contention); + } + if (trimFactor != null) { + target.append("trimFactor", trimFactor); + } + if (valueRange != null) { + target.append("min", valueRange.getLowerBound().getValue().orElse((T) BsonNull.VALUE)).append("max", + valueRange.getUpperBound().getValue().orElse((T) BsonNull.VALUE)); + } + if (sparsity != null) { + target.append("sparsity", sparsity); + } + + return target; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java index f85be98c1f..8bd422c493 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/MongoCompatibilityAdapter.java @@ -34,6 +34,7 @@ import com.mongodb.client.MongoDatabase; import com.mongodb.client.MongoIterable; import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.vault.RangeOptions; import com.mongodb.reactivestreams.client.MapReducePublisher; /** @@ -42,11 +43,13 @@ * This class is for internal use within the framework and should not be used by applications. * * @author Christoph Strobl + * @author Ross Lawley * @since 4.3 */ public class MongoCompatibilityAdapter { private static final String NO_LONGER_SUPPORTED = "%s is no longer supported on Mongo Client 5 or newer"; + private static final String NOT_SUPPORTED_ON_4 = "%s is not supported on Mongo Client 4"; private static final @Nullable Method getStreamFactoryFactory = ReflectionUtils.findMethod(MongoClientSettings.class, "getStreamFactoryFactory"); @@ -54,6 +57,19 @@ public class MongoCompatibilityAdapter { private static final @Nullable Method setBucketSize = ReflectionUtils.findMethod(IndexOptions.class, "bucketSize", Double.class); + private static final @Nullable Method setTrimFactor; + + static { + + // method name changed in between + Method trimFactor = ReflectionUtils.findMethod(RangeOptions.class, "setTrimFactor", Integer.class); + if (trimFactor != null) { + setTrimFactor = trimFactor; + } else { + setTrimFactor = ReflectionUtils.findMethod(RangeOptions.class, "trimFactor", Integer.class); + } + } + /** * Return a compatibility adapter for {@link MongoClientSettings.Builder}. * @@ -122,6 +138,23 @@ public static MapReduceIterableAdapter mapReduceIterableAdapter(Object iterable) }; } + /** + * Return a compatibility adapter for {@link RangeOptions}. + * + * @param options + * @return + */ + public static RangeOptionsAdapter rangeOptionsAdapter(RangeOptions options) { + return trimFactor -> { + + if (!MongoClientVersion.isVersion5orNewer() || setTrimFactor == null) { + throw new UnsupportedOperationException(NOT_SUPPORTED_ON_4.formatted("RangeOptions.trimFactor")); + } + + ReflectionUtils.invokeMethod(setTrimFactor, options, trimFactor); + }; + } + /** * Return a compatibility adapter for {@code MapReducePublisher}. * @@ -199,6 +232,10 @@ public interface MongoDatabaseAdapterBuilder { MongoDatabaseAdapter forDb(com.mongodb.client.MongoDatabase db); } + public interface RangeOptionsAdapter { + void trimFactor(Integer trimFactor); + } + @SuppressWarnings({ "unchecked", "DataFlowIssue" }) public static class MongoDatabaseAdapter { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java index f2691275c3..9de0863cd2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CollectionOptionsUnitTests.java @@ -15,12 +15,24 @@ */ package org.springframework.data.mongodb.core; -import static org.assertj.core.api.Assertions.*; -import static org.springframework.data.mongodb.core.CollectionOptions.*; +import static org.assertj.core.api.Assertions.assertThat; +import static org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import static org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; +import static org.springframework.data.mongodb.core.CollectionOptions.emitChangedRevisions; +import static org.springframework.data.mongodb.core.CollectionOptions.empty; +import static org.springframework.data.mongodb.core.CollectionOptions.encryptedCollection; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.int32; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.queryable; +import java.util.List; + +import org.bson.BsonNull; import org.bson.Document; import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; import org.springframework.data.mongodb.core.validation.Validator; /** @@ -76,4 +88,93 @@ void validatorEquals() { .isNotEqualTo(empty().validator(Validator.document(new Document("three", "four")))) .isNotEqualTo(empty().validator(Validator.document(new Document("one", "two"))).moderateValidation()); } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionOptionsFromSchemaRenderCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder() + .property(JsonSchemaProperty.object("spring") + .properties(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("data")), List.of()))) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("mongodb")), List.of())).build(); + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(schema); + + assertThat(encryptionOptions.toDocument().get("fields", List.class)).hasSize(2) + .contains(new Document("path", "mongodb").append("bsonType", "long").append("queries", List.of()) + .append("keyId", BsonNull.VALUE)) + .contains(new Document("path", "spring.data").append("bsonType", "int").append("queries", List.of()) + .append("keyId", BsonNull.VALUE)); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverrideByPath() { + + CollectionOptions collectionOptions = encryptedCollection(options -> options // + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("spring"))) + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("data"))) + + // override first with data type long + .queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverridesPathFromSchema() { + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(MongoJsonSchema.builder() + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("spring")), List.of())) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("data")), List.of())).build()); + + // override spring from schema with data type long + CollectionOptions collectionOptions = CollectionOptions.encryptedCollection( + encryptionOptions.queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } + + @Test // GH-4185 + void encryptionOptionsAreImmutable() { + + EncryptedFieldsOptions source = EncryptedFieldsOptions + .fromProperties(List.of(queryable(int32("spring.data"), List.of(QueryCharacteristics.range().min(1))))); + + assertThat(source.queryable(queryable(int32("mongodb"), List.of(QueryCharacteristics.range().min(1))))) + .isNotSameAs(source).satisfies(it -> { + assertThat(it.toDocument().get("fields", List.class)).hasSize(2); + }); + + assertThat(source.toDocument().get("fields", List.class)).hasSize(1); + } + + @Test // GH-4185 + @SuppressWarnings("unchecked") + void queryableEncryptionPropertiesOverridesNestedPathFromSchema() { + + EncryptedFieldsOptions encryptionOptions = EncryptedFieldsOptions.fromSchema(MongoJsonSchema.builder() + .property(JsonSchemaProperty.object("spring") + .properties(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int32("data")), List.of()))) + .property(queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("mongodb")), List.of())).build()); + + // override spring from schema with data type long + CollectionOptions collectionOptions = CollectionOptions.encryptedCollection( + encryptionOptions.queryable(JsonSchemaProperty.encrypted(JsonSchemaProperty.int64("spring.data")))); + + assertThat(collectionOptions.getEncryptedFieldsOptions()).map(EncryptedFieldsOptions::toDocument) + .hasValueSatisfying(it -> { + assertThat(it.get("fields", List.class)).hasSize(2).contains(new Document("path", "spring.data") + .append("bsonType", "long").append("queries", List.of()).append("keyId", BsonNull.VALUE)); + }); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java index af4fac84b1..78a6e6b496 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/DefaultIndexOperationsIntegrationTests.java @@ -15,9 +15,9 @@ */ package org.springframework.data.mongodb.core; -import static org.assertj.core.api.Assertions.*; -import static org.springframework.data.mongodb.core.index.PartialIndexFilter.*; -import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.index.PartialIndexFilter.of; +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; import org.bson.BsonDocument; import org.bson.Document; @@ -79,7 +79,7 @@ public void shouldApplyPartialFilterCorrectly() { IndexDefinition id = new Index().named("partial-with-criteria").on("k3y", Direction.ASC) .partial(of(where("q-t-y").gte(10))); - indexOps.ensureIndex(id); + indexOps.createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-criteria"); assertThat(Document.parse(info.getPartialFilterExpression())) @@ -92,7 +92,7 @@ public void shouldApplyPartialFilterWithMappedPropertyCorrectly() { IndexDefinition id = new Index().named("partial-with-mapped-criteria").on("k3y", Direction.ASC) .partial(of(where("quantity").gte(10))); - template.indexOps(DefaultIndexOperationsIntegrationTestsSample.class).ensureIndex(id); + template.indexOps(DefaultIndexOperationsIntegrationTestsSample.class).createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-mapped-criteria"); assertThat(Document.parse(info.getPartialFilterExpression())) @@ -105,7 +105,7 @@ public void shouldApplyPartialDBOFilterCorrectly() { IndexDefinition id = new Index().named("partial-with-dbo").on("k3y", Direction.ASC) .partial(of(new org.bson.Document("qty", new org.bson.Document("$gte", 10)))); - indexOps.ensureIndex(id); + indexOps.createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-dbo"); assertThat(Document.parse(info.getPartialFilterExpression())) @@ -120,7 +120,7 @@ public void shouldFavorExplicitMappingHintViaClass() { indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); - indexOps.ensureIndex(id); + indexOps.createIndex(id); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "partial-with-inheritance"); assertThat(Document.parse(info.getPartialFilterExpression())) @@ -150,7 +150,7 @@ public void shouldCreateIndexWithCollationCorrectly() { new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); - indexOps.ensureIndex(id); + indexOps.createIndex(id); Document expected = new Document("locale", "de_AT") // .append("caseLevel", false) // @@ -179,7 +179,7 @@ void indexShouldNotBeHiddenByDefault() { IndexDefinition index = new Index().named("my-index").on("a", Direction.ASC); indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); - indexOps.ensureIndex(index); + indexOps.createIndex(index); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-index"); assertThat(info.isHidden()).isFalse(); @@ -191,7 +191,7 @@ void shouldCreateHiddenIndex() { IndexDefinition index = new Index().named("my-hidden-index").on("a", Direction.ASC).hidden(); indexOps = new DefaultIndexOperations(template, COLLECTION_NAME, MappingToSameCollection.class); - indexOps.ensureIndex(index); + indexOps.createIndex(index); IndexInfo info = findAndReturnIndexInfo(indexOps.getIndexInfo(), "my-hidden-index"); assertThat(info.isHidden()).isTrue(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java index 05f0695839..80373562c8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupportUnitTests.java @@ -33,6 +33,7 @@ * Unit tests for {@link ExecutableAggregationOperationSupport}. * * @author Christoph Strobl + * @author Mark Paluch */ @ExtendWith(MockitoExtension.class) public class ExecutableAggregationOperationSupportUnitTests { @@ -72,7 +73,8 @@ void aggregateWithUntypedAggregationAndExplicitCollection() { opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).all(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).aggregate(any(Aggregation.class), eq("star-wars"), captor.capture()); + verify(template).doAggregate(any(Aggregation.class), eq("star-wars"), captor.capture(), + eq(QueryResultConverter.entity())); assertThat(captor.getValue()).isEqualTo(Person.class); } @@ -86,7 +88,8 @@ void aggregateWithUntypedAggregation() { ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); verify(template).getCollectionName(captor.capture()); - verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + verify(template).doAggregate(any(Aggregation.class), eq("person"), captor.capture(), + eq(QueryResultConverter.entity())); assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); } @@ -101,7 +104,8 @@ void aggregateWithTypeAggregation() { ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); verify(template).getCollectionName(captor.capture()); - verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + verify(template).doAggregate(any(Aggregation.class), eq("person"), captor.capture(), + eq(QueryResultConverter.entity())); assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); } @@ -112,7 +116,8 @@ void aggregateStreamWithUntypedAggregationAndExplicitCollection() { opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).stream(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).aggregateStream(any(Aggregation.class), eq("star-wars"), captor.capture()); + verify(template).doAggregateStream(any(Aggregation.class), eq("star-wars"), captor.capture(), + eq(QueryResultConverter.entity()), any()); assertThat(captor.getValue()).isEqualTo(Person.class); } @@ -126,7 +131,8 @@ void aggregateStreamWithUntypedAggregation() { ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); verify(template).getCollectionName(captor.capture()); - verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture()); + verify(template).doAggregateStream(any(Aggregation.class), eq("person"), captor.capture(), + eq(QueryResultConverter.entity()), any()); assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); } @@ -141,7 +147,8 @@ void aggregateStreamWithTypeAggregation() { ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); verify(template).getCollectionName(captor.capture()); - verify(template).aggregateStream(any(Aggregation.class), eq("person"), captor.capture()); + verify(template).doAggregateStream(any(Aggregation.class), eq("person"), captor.capture(), + eq(QueryResultConverter.entity()), any()); assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java index eac248e69a..3f7e167bd2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupportTests.java @@ -21,7 +21,9 @@ import static org.springframework.data.mongodb.test.util.DirtiesStateExtension.*; import java.util.Date; +import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.stream.Stream; import org.bson.BsonString; @@ -170,6 +172,16 @@ void findAllByWithProjection() { .hasOnlyElementsOfType(Jedi.class).hasSize(1); } + @Test // GH- + void findAllByWithConverter() { + + List> result = template.query(Person.class).as(Jedi.class) + .matching(query(where("firstname").is("luke"))).map((document, reader) -> Optional.of(reader.get())).all(); + + assertThat(result).hasOnlyElementsOfType(Optional.class).hasSize(1); + assertThat(result).extracting(Optional::get).hasOnlyElementsOfType(Jedi.class).hasSize(1); + } + @Test // DATAMONGO-1563 void findBy() { assertThat(template.query(Person.class).matching(query(where("firstname").is("luke"))).one()).contains(luke); @@ -260,6 +272,15 @@ void streamAllWithProjection() { } } + @Test // GH- + void streamAllWithConverter() { + + try (Stream> stream = template.query(Person.class).as(Jedi.class) + .map((document, reader) -> Optional.of(reader.get())).stream()) { + assertThat(stream).extracting(Optional::get).hasOnlyElementsOfType(Jedi.class).hasSize(2); + } + } + @Test // DATAMONGO-1733 void streamAllReturningResultsAsClosedInterfaceProjection() { @@ -315,6 +336,20 @@ void findAllNearByWithCollectionAndProjection() { assertThat(results.getContent().get(0).getContent().getId()).isEqualTo("alderan"); } + @Test // GH- + void findAllNearByWithConverter() { + + GeoResults> results = template.query(Object.class).inCollection(STAR_WARS_PLANETS).as(Human.class) + .near(NearQuery.near(-73.9667, 40.78).spherical(true)).map((document, reader) -> Optional.of(reader.get())) + .all(); + + assertThat(results.getContent()).hasSize(2); + assertThat(results.getContent().get(0).getDistance()).isNotNull(); + assertThat(results.getContent().get(0).getContent()).isInstanceOf(Optional.class); + assertThat(results.getContent().get(0).getContent().get()).isInstanceOf(Human.class); + assertThat(results.getContent().get(0).getContent().get().getId()).isEqualTo("alderan"); + } + @Test // DATAMONGO-1733 void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java index d18ed6f119..adaecad5da 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java @@ -15,7 +15,8 @@ */ package org.springframework.data.mongodb.core; -import static org.springframework.data.mongodb.test.util.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThat; +import static org.springframework.data.mongodb.test.util.Assertions.assertThatExceptionOfType; import java.util.Collections; import java.util.Date; @@ -38,6 +39,8 @@ import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.mapping.RangeEncrypted; import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; @@ -282,6 +285,48 @@ void wrapEncryptedEntityTypeLikeProperty() { .containsEntry("properties.domainTypeValue", Document.parse("{'encrypt': {'bsonType': 'object' } }")); } + @Test // GH-4185 + void qeRangeEncryptedProperties() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(QueryableEncryptedRoot.class); + + String expectedForInt = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'int', + 'queries' : [ + { 'queryType' : 'range', 'contention' : { '$numberLong' : '0' }, 'max' : 200, 'min' : 0, 'sparsity' : 1, 'trimFactor' : 1 } + ] + }}"""; + + String expectedForRootLong = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'long', + 'queries' : [ + { 'queryType' : 'range', contention : { '$numberLong' : '0' }, 'sparsity' : 0 } + ] + }}"""; + + String expectedForNestedLong = """ + { 'encrypt' : { + 'algorithm' : 'Range', + 'bsonType' : 'long', + 'queries' : [ + { 'queryType' : 'range', contention : { '$numberLong' : '1' }, 'max' : { '$numberLong' : '1' }, 'min' : { '$numberLong' : '-1' }, 'sparsity' : 1, 'trimFactor' : 1 } + ] + }}"""; + + assertThat(schema.schemaDocument()) // + .doesNotContainKey("properties.unencrypted") // + .containsEntry("properties.encryptedInt", Document.parse(expectedForInt)) + .containsEntry("properties.encryptedLong", Document.parse(expectedForRootLong)) + .containsEntry("properties.nested.properties.encrypted_long", Document.parse(expectedForNestedLong)); + + } + // --> TYPES AND JSON // --> ENUM @@ -311,7 +356,8 @@ enum JustSomeEnum { " 'binaryDataProperty' : { 'bsonType' : 'binData' }," + // " 'collectionProperty' : { 'type' : 'array' }," + // " 'simpleTypeCollectionProperty' : { 'type' : 'array', 'items' : { 'type' : 'string' } }," + // - " 'complexTypeCollectionProperty' : { 'type' : 'array', 'items' : { 'type' : 'object', 'properties' : { 'field' : { 'type' : 'string'} } } }" + // + " 'complexTypeCollectionProperty' : { 'type' : 'array', 'items' : { 'type' : 'object', 'properties' : { 'field' : { 'type' : 'string'} } } }" + + // " 'enumTypeCollectionProperty' : { 'type' : 'array', 'items' : " + JUST_SOME_ENUM + " }" + // " 'mapProperty' : { 'type' : 'object' }," + // " 'objectProperty' : { 'type' : 'object' }," + // @@ -692,4 +738,28 @@ static class PropertyClashWithA { static class WithEncryptedEntityLikeProperty { @Encrypted SomeDomainType domainTypeValue; } + + static class QueryableEncryptedRoot { + + String unencrypted; + + @RangeEncrypted(contentionFactor = 0L, rangeOptions = "{ 'min': 0, 'max': 200, 'trimFactor': 1, 'sparsity': 1}") // + Integer encryptedInt; + + @Encrypted(algorithm = "Range") + @Queryable(contentionFactor = 0L, queryType = "range", queryAttributes = "{ 'sparsity': 0 }") // + Long encryptedLong; + + NestedRangeEncrypted nested; + + } + + static class NestedRangeEncrypted { + + @Field("encrypted_long") + @RangeEncrypted(contentionFactor = 1L, + rangeOptions = "{ 'min': { '$numberLong' : '-1' }, 'max': { '$numberLong' : '1' }, 'trimFactor': 1, 'sparsity': 1}") // + Long encryptedLong; + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index 83d4e30cc5..6aaec4011e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -34,6 +34,7 @@ import java.util.stream.IntStream; import java.util.stream.Stream; +import org.bson.Document; import org.bson.types.ObjectId; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; @@ -3110,6 +3111,18 @@ public void generatesIdForInsertAll() { assertThat(jesse.getId()).isNotNull(); } + @Test // GH-4944 + public void insertAllShouldConvertIdToTargetTypeBeforeSave() { + + RawStringId walter = new RawStringId(); + walter.value = "walter"; + + RawStringId returned = template.insertAll(List.of(walter)).iterator().next(); + org.bson.Document document = template.execute(RawStringId.class, collection -> collection.find().first()); + + assertThat(returned.id).isEqualTo(document.get("_id")); + } + @Test // DATAMONGO-1208 public void takesSortIntoAccountWhenStreaming() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 79a0bb1fcb..81408cc22d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -1156,7 +1156,7 @@ void countShouldApplyQueryHintAsIndexNameIfPresent() { void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, - PersonProjection.class, CursorPreparer.NO_OP_PREPARER); + PersonProjection.class, QueryResultConverter.entity(), CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(new Document("firstname", 1))); } @@ -1165,7 +1165,7 @@ void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class, - PersonProjection.class, CursorPreparer.NO_OP_PREPARER); + PersonProjection.class, QueryResultConverter.entity(), CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(new Document("bar", 1))); } @@ -1174,7 +1174,7 @@ void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, - PersonSpELProjection.class, CursorPreparer.NO_OP_PREPARER); + PersonSpELProjection.class, QueryResultConverter.entity(), CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @@ -1183,7 +1183,7 @@ void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { void appliesFieldsToDtoProjection() { template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, - Jedi.class, CursorPreparer.NO_OP_PREPARER); + Jedi.class, QueryResultConverter.entity(), CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(new Document("firstname", 1))); } @@ -1192,7 +1192,7 @@ void appliesFieldsToDtoProjection() { void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document("bar", 1), Person.class, - Jedi.class, CursorPreparer.NO_OP_PREPARER); + Jedi.class, QueryResultConverter.entity(), CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(new Document("bar", 1))); } @@ -1201,7 +1201,7 @@ void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { void doesNotApplyFieldsWhenTargetIsNotAProjection() { template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, - Person.class, CursorPreparer.NO_OP_PREPARER); + Person.class, QueryResultConverter.entity(), CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @@ -1210,7 +1210,7 @@ void doesNotApplyFieldsWhenTargetIsNotAProjection() { void doesNotApplyFieldsWhenTargetExtendsDomainType() { template.doFind(CollectionPreparer.identity(), "star-wars", new Document(), new Document(), Person.class, - PersonExtended.class, CursorPreparer.NO_OP_PREPARER); + PersonExtended.class, QueryResultConverter.entity(), CursorPreparer.NO_OP_PREPARER); verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java index 9d4ed339b5..83e1b3c272 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupportUnitTests.java @@ -72,7 +72,8 @@ void aggregateWithUntypedAggregationAndExplicitCollection() { opSupport.aggregateAndReturn(Person.class).inCollection("star-wars").by(newAggregation(project("foo"))).all(); ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); - verify(template).aggregate(any(Aggregation.class), eq("star-wars"), captor.capture()); + verify(template).doAggregate(any(Aggregation.class), eq("star-wars"), captor.capture(), any(Class.class), + eq(QueryResultConverter.entity())); assertThat(captor.getValue()).isEqualTo(Person.class); } @@ -86,7 +87,8 @@ void aggregateWithUntypedAggregation() { ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); verify(template).getCollectionName(captor.capture()); - verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + verify(template).doAggregate(any(Aggregation.class), eq("person"), captor.capture(), any(Class.class), + eq(QueryResultConverter.entity())); assertThat(captor.getAllValues()).containsExactly(Person.class, Person.class); } @@ -101,7 +103,8 @@ void aggregateWithTypeAggregation() { ArgumentCaptor captor = ArgumentCaptor.forClass(Class.class); verify(template).getCollectionName(captor.capture()); - verify(template).aggregate(any(Aggregation.class), eq("person"), captor.capture()); + verify(template).doAggregate(any(Aggregation.class), eq("person"), captor.capture(), any(Class.class), + eq(QueryResultConverter.entity())); assertThat(captor.getAllValues()).containsExactly(Person.class, Jedi.class); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java index f23e973202..28b77cdfa9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupportTests.java @@ -26,6 +26,7 @@ import java.util.Date; import java.util.Objects; +import java.util.Optional; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; @@ -167,6 +168,17 @@ void findAllWithProjection() { .verifyComplete(); } + @Test // GH-… + void findAllWithConverter() { + + template.query(Person.class).as(Jedi.class).map((document, reader) -> Optional.of(reader.get())).all() + .map(Optional::get) // + .map(it -> it.getClass().getName()) // + .as(StepVerifier::create) // + .expectNext(Jedi.class.getName(), Jedi.class.getName()) // + .verifyComplete(); + } + @Test // DATAMONGO-1719 void findAllBy() { @@ -299,6 +311,32 @@ void findAllNearByWithCollectionAndProjection() { .verifyComplete(); } + @Test // GH-… + @DirtiesState + void findAllNearByWithConverter() { + + blocking.indexOps(Planet.class).ensureIndex( + new GeospatialIndex("coordinates").typed(GeoSpatialIndexType.GEO_2DSPHERE).named("planet-coordinate-idx")); + + Planet alderan = new Planet("alderan", new Point(-73.9836, 40.7538)); + Planet dantooine = new Planet("dantooine", new Point(-73.9928, 40.7193)); + + blocking.save(alderan); + blocking.save(dantooine); + + template.query(Object.class).inCollection(STAR_WARS).as(Human.class) + .near(NearQuery.near(-73.9667, 40.78).spherical(true)).map((document, reader) -> Optional.of(reader.get())) // + .all() // + .as(StepVerifier::create).consumeNextWith(actual -> { + assertThat(actual.getDistance()).isNotNull(); + assertThat(actual.getContent()).isInstanceOf(Optional.class); + assertThat(actual.getContent().get()).isInstanceOf(Human.class); + assertThat(actual.getContent().get().getId()).isEqualTo("alderan"); + }) // + .expectNextCount(1) // + .verifyComplete(); + } + @Test // DATAMONGO-1719 @DirtiesState void findAllNearByReturningGeoResultContentAsClosedInterfaceProjection() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java index 80dd584b9e..f87227cdde 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateTests.java @@ -48,6 +48,7 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; + import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.support.GenericApplicationContext; import org.springframework.dao.DataIntegrityViolationException; @@ -84,6 +85,7 @@ import com.mongodb.WriteConcern; import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; /** * Integration test for {@link MongoTemplate}. @@ -165,6 +167,19 @@ void insertCollectionSetsId() { assertThat(person.getId()).isNotNull(); } + @Test // GH-4944 + void insertAllShouldConvertIdToTargetTypeBeforeSave() { + + RawStringId walter = new RawStringId(); + walter.value = "walter"; + + RawStringId returned = template.insertAll(List.of(walter)).blockLast(); + template.execute(RawStringId.class, MongoCollection::find) // + .as(StepVerifier::create) // + .consumeNextWith(actual -> assertThat(returned.id).isEqualTo(actual.get("_id"))) // + .verifyComplete(); + } + @Test // DATAMONGO-1444 void saveSetsId() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index f89b2fa8c1..cc50a684cc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -18,6 +18,7 @@ import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import static org.springframework.data.mongodb.test.util.Assertions.assertThat; import reactor.core.publisher.Flux; @@ -53,6 +54,7 @@ import org.mockito.quality.Strictness; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; + import org.springframework.beans.factory.annotation.Value; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; @@ -437,7 +439,7 @@ void geoNearShouldHonorReadConcernFromQuery() { void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, - PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + PersonProjection.class, QueryResultConverter.entity(), FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher).projection(eq(new Document("firstname", 1))); } @@ -446,7 +448,7 @@ void appliesFieldsWhenInterfaceProjectionIsClosedAndQueryDoesNotDefineFields() { void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class, - PersonProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + PersonProjection.class, QueryResultConverter.entity(), FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher).projection(eq(new Document("bar", 1))); } @@ -455,7 +457,7 @@ void doesNotApplyFieldsWhenInterfaceProjectionIsClosedAndQueryDefinesFields() { void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, - PersonSpELProjection.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + PersonSpELProjection.class, QueryResultConverter.entity(), FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher, never()).projection(any()); } @@ -464,7 +466,7 @@ void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { void appliesFieldsToDtoProjection() { template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, - Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + Jedi.class, QueryResultConverter.entity(), FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher).projection(eq(new Document("firstname", 1))); } @@ -473,7 +475,7 @@ void appliesFieldsToDtoProjection() { void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document("bar", 1), Person.class, - Jedi.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + Jedi.class, QueryResultConverter.entity(), FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher).projection(eq(new Document("bar", 1))); } @@ -482,7 +484,7 @@ void doesNotApplyFieldsToDtoProjectionWhenQueryDefinesFields() { void doesNotApplyFieldsWhenTargetIsNotAProjection() { template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, - Person.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + Person.class, QueryResultConverter.entity(), FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher, never()).projection(any()); } @@ -491,7 +493,7 @@ void doesNotApplyFieldsWhenTargetIsNotAProjection() { void doesNotApplyFieldsWhenTargetExtendsDomainType() { template.doFind("star-wars", CollectionPreparer.identity(), new Document(), new Document(), Person.class, - PersonExtended.class, FindPublisherPreparer.NO_OP_PREPARER).subscribe(); + PersonExtended.class, QueryResultConverter.entity(), FindPublisherPreparer.NO_OP_PREPARER).subscribe(); verify(findPublisher, never()).projection(any()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java index 99579b34a7..95a29fe8ba 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AggregationTests.java @@ -37,6 +37,7 @@ import java.util.Date; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Scanner; import java.util.stream.Stream; @@ -287,6 +288,60 @@ void shouldAggregateEmptyCollectionAndStream() { } } + @Test // GH- + void shouldAggregateAsStreamWithConverter() { + + MongoCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); + + coll.insertOne(createDocument("Doc1", "spring", "mongodb", "nosql")); + coll.insertOne(createDocument("Doc2")); + + Aggregation aggregation = newAggregation(// + project("tags"), // + unwind("tags"), // + group("tags") // + .count().as("n"), // + project("n") // + .and("tag").previousOperation(), // + sort(DESC, "n") // + ); + + try (Stream> stream = mongoTemplate.aggregateAndReturn(TagCount.class) + .inCollection(INPUT_COLLECTION).by(aggregation).map((document, reader) -> Optional.of(reader.get())).stream()) { + + List tagCount = stream.flatMap(Optional::stream).toList(); + + assertThat(tagCount).hasSize(3); + } + } + + @Test // GH- + void shouldAggregateWithConverter() { + + MongoCollection coll = mongoTemplate.getCollection(INPUT_COLLECTION); + + coll.insertOne(createDocument("Doc1", "spring", "mongodb", "nosql")); + coll.insertOne(createDocument("Doc2")); + + Aggregation aggregation = newAggregation(// + project("tags"), // + unwind("tags"), // + group("tags") // + .count().as("n"), // + project("n") // + .and("tag").previousOperation(), // + sort(DESC, "n") // + ); + + AggregationResults> results = mongoTemplate.aggregateAndReturn(TagCount.class) + .inCollection(INPUT_COLLECTION) // + .by(aggregation) // + .map((document, reader) -> Optional.of(reader.get())) // + .all(); + + assertThat(results.getMappedResults()).extracting(Optional::get).hasOnlyElementsOfType(TagCount.class).hasSize(3); + } + @Test // DATAMONGO-1391 void shouldUnwindWithIndex() { @@ -501,7 +556,7 @@ void findStatesWithPopulationOver10MillionAggregationExample() { /* //complex mongodb aggregation framework example from https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state - + db.zipcodes.aggregate( { $group: { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java index 55d6bf3b60..62d13a8f27 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationTests.java @@ -22,6 +22,7 @@ import reactor.test.StepVerifier; import java.util.Arrays; +import java.util.Optional; import org.bson.Document; import org.junit.After; @@ -115,6 +116,29 @@ public void shouldProjectMultipleDocuments() { }).verifyComplete(); } + @Test // GH-… + public void shouldProjectAndConvertMultipleDocuments() { + + City dresden = new City("Dresden", 100); + City linz = new City("Linz", 101); + City braunschweig = new City("Braunschweig", 102); + City weinheim = new City("Weinheim", 103); + + reactiveMongoTemplate.insertAll(Arrays.asList(dresden, linz, braunschweig, weinheim)).as(StepVerifier::create) + .expectNextCount(4).verifyComplete(); + + Aggregation agg = newAggregation( // + match(where("population").lt(103))); + + reactiveMongoTemplate.aggregateAndReturn(City.class).inCollection("city").by(agg) + .map((document, reader) -> Optional.of(reader.get())) // + .all() // + .collectList() // + .as(StepVerifier::create).consumeNextWith(actual -> { + assertThat(actual).hasSize(3).extracting(Optional::get).contains(dresden, linz, braunschweig); + }).verifyComplete(); + } + @Test // DATAMONGO-1646 public void shouldAggregateToOutCollection() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java new file mode 100644 index 0000000000..dd9e459e78 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/MongoQueryableEncryptionCollectionCreationTests.java @@ -0,0 +1,142 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.QueryCharacteristics.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.List; +import java.util.UUID; +import java.util.stream.Stream; + +import org.bson.BsonBinary; +import org.bson.Document; +import org.bson.UuidRepresentation; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +import com.mongodb.client.MongoClient; + +/** + * Integration tests for creating collections with encrypted fields. + * + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") +@ContextConfiguration +public class MongoQueryableEncryptionCollectionCreationTests { + + public static final String COLLECTION_NAME = "enc-collection"; + static @Client MongoClient mongoClient; + + @Configuration + static class Config extends AbstractMongoClientConfiguration { + + @Override + public MongoClient mongoClient() { + return mongoClient; + } + + @Override + protected String getDatabaseName() { + return "encryption-schema-tests"; + } + + } + + @Autowired MongoTemplate template; + + @BeforeEach + void beforeEach() { + template.dropCollection(COLLECTION_NAME); + } + + @ParameterizedTest // GH-4185 + @MethodSource("collectionOptions") + public void createsCollectionWithEncryptedFieldsCorrectly(CollectionOptions collectionOptions) { + + template.createCollection(COLLECTION_NAME, collectionOptions); + + Document encryptedFields = readEncryptedFieldsFromDatabase(COLLECTION_NAME); + assertThat(encryptedFields).containsKey("fields"); + + List fields = encryptedFields.get("fields", List.of()); + assertThat(fields.get(0)).containsEntry("path", "encryptedInt") // + .containsEntry("bsonType", "int") // + .containsEntry("queries", List + .of(Document.parse("{'queryType': 'range', 'contention': { '$numberLong' : '1' }, 'min': 5, 'max': 100}"))); + + assertThat(fields.get(1)).containsEntry("path", "nested.encryptedLong") // + .containsEntry("bsonType", "long") // + .containsEntry("queries", List.of(Document.parse( + "{'queryType': 'range', 'contention': { '$numberLong' : '0' }, 'min': { '$numberLong' : '-1' }, 'max': { '$numberLong' : '1' }}"))); + } + + private static Stream collectionOptions() { + + BsonBinary key1 = new BsonBinary(UUID.randomUUID(), UuidRepresentation.STANDARD); + BsonBinary key2 = new BsonBinary(UUID.randomUUID(), UuidRepresentation.STANDARD); + + CollectionOptions manualOptions = CollectionOptions.encryptedCollection(options -> options // + .queryable(encrypted(int32("encryptedInt")).keys(key1), range().min(5).max(100).contention(1)) // + .queryable(encrypted(JsonSchemaProperty.int64("nested.encryptedLong")).keys(key2), + range().min(-1L).max(1L).contention(0))); + + CollectionOptions schemaOptions = CollectionOptions.encryptedCollection(MongoJsonSchema.builder() + .property( + queryable(encrypted(int32("encryptedInt")).keyId(key1), List.of(range().min(5).max(100).contention(1)))) + .property(queryable(encrypted(int64("nested.encryptedLong")).keyId(key2), + List.of(range().min(-1L).max(1L).contention(0)))) + .build()); + + return Stream.of(Arguments.of(manualOptions), Arguments.of(schemaOptions)); + } + + Document readEncryptedFieldsFromDatabase(String collectionName) { + + Document collectionInfo = template + .executeCommand(new Document("listCollections", 1).append("filter", new Document("name", collectionName))); + + if (collectionInfo.containsKey("cursor")) { + collectionInfo = (Document) collectionInfo.get("cursor", Document.class).get("firstBatch", List.class).iterator() + .next(); + } + + if (!collectionInfo.containsKey("options")) { + return new Document(); + } + + return collectionInfo.get("options", Document.class).get("encryptedFields", Document.class); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java new file mode 100644 index 0000000000..e4e760cc91 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/encryption/RangeEncryptionTests.java @@ -0,0 +1,573 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.encryption; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; + +import java.security.SecureRandom; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; + +import org.assertj.core.api.Assumptions; +import org.bson.BsonBinary; +import org.bson.BsonDocument; +import org.bson.BsonInt32; +import org.bson.BsonString; +import org.bson.Document; +import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.Bean; +import org.springframework.data.convert.PropertyValueConverterFactory; +import org.springframework.data.convert.ValueConverter; +import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; +import org.springframework.data.mongodb.core.CollectionOptions; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import org.springframework.data.mongodb.core.MongoJsonSchemaCreator; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; +import org.springframework.data.mongodb.core.convert.encryption.MongoEncryptionConverter; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.mapping.RangeEncrypted; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.util.MongoClientVersion; +import org.springframework.data.util.Lazy; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.springframework.util.StringUtils; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ClientEncryptionSettings; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoNamespace; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.client.MongoCollection; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.CreateEncryptedCollectionParams; +import com.mongodb.client.model.Filters; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.client.model.Indexes; +import com.mongodb.client.model.vault.EncryptOptions; +import com.mongodb.client.model.vault.RangeOptions; +import com.mongodb.client.result.UpdateResult; +import com.mongodb.client.vault.ClientEncryption; +import com.mongodb.client.vault.ClientEncryptions; + +/** + * @author Ross Lawley + * @author Christoph Strobl + */ +@ExtendWith({ MongoClientExtension.class, SpringExtension.class }) +@EnableIfMongoServerVersion(isGreaterThanEqual = "8.0") +@EnableIfReplicaSetAvailable +@ContextConfiguration(classes = RangeEncryptionTests.EncryptionConfig.class) +class RangeEncryptionTests { + + @Autowired MongoTemplate template; + @Autowired MongoClientEncryption clientEncryption; + @Autowired EncryptionKeyHolder keyHolder; + + @BeforeEach + void clientVersionCheck() { + Assumptions.assumeThat(MongoClientVersion.isVersion5orNewer()).isTrue(); + } + + @AfterEach + void tearDown() { + template.getDb().getCollection("test").deleteMany(new BsonDocument()); + } + + @Test // GH-4185 + void manuallyEncryptedValuesCanBeSavedAndRetrievedCorrectly() { + + EncryptOptions encryptOptions = new EncryptOptions("Range").contentionFactor(1L) + .keyId(keyHolder.getEncryptionKey("encryptedInt")) + .rangeOptions(new RangeOptions().min(new BsonInt32(0)).max(new BsonInt32(200)).sparsity(1L)); + + EncryptOptions encryptExpressionOptions = new EncryptOptions("Range").contentionFactor(1L) + .rangeOptions(new RangeOptions().min(new BsonInt32(0)).max(new BsonInt32(200))) + .keyId(keyHolder.getEncryptionKey("encryptedInt")).queryType("range"); + + EncryptOptions equalityEncOptions = new EncryptOptions("Indexed").contentionFactor(0L) + .keyId(keyHolder.getEncryptionKey("age")); + ; + + EncryptOptions equalityEncOptionsString = new EncryptOptions("Indexed").contentionFactor(0L) + .keyId(keyHolder.getEncryptionKey("name")); + ; + + Document source = new Document("_id", "id-1"); + + source.put("name", + clientEncryption.getClientEncryption().encrypt(new BsonString("It's a Me, Mario!"), equalityEncOptionsString)); + source.put("age", clientEncryption.getClientEncryption().encrypt(new BsonInt32(101), equalityEncOptions)); + source.put("encryptedInt", clientEncryption.getClientEncryption().encrypt(new BsonInt32(101), encryptOptions)); + source.put("_class", Person.class.getName()); + + template.execute(Person.class, col -> col.insertOne(source)); + + Document result = template.execute(Person.class, col -> { + + BsonDocument filterSource = new BsonDocument("encryptedInt", new BsonDocument("$gte", new BsonInt32(100))); + BsonDocument filter = clientEncryption.getClientEncryption() + .encryptExpression(new Document("$and", List.of(filterSource)), encryptExpressionOptions); + + return col.find(filter).first(); + }); + + assertThat(result).containsEntry("encryptedInt", 101); + } + + @Test // GH-4185 + void canLesserThanEqualMatchRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Person loaded = template.query(Person.class).matching(where("encryptedInt").lte(source.encryptedInt)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryMixOfEqualityEncryptedAndUnencrypted() { + + Person source = template.insert(createPerson()); + + Person loaded = template.query(Person.class) + .matching(where("name").is(source.name).and("unencryptedValue").is(source.unencryptedValue)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryMixOfRangeEncryptedAndUnencrypted() { + + Person source = template.insert(createPerson()); + + Person loaded = template.query(Person.class) + .matching(where("encryptedInt").lte(source.encryptedInt).and("unencryptedValue").is(source.unencryptedValue)) + .firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canQueryEqualityEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Person loaded = template.query(Person.class).matching(where("age").is(source.age)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canExcludeSafeContentFromResult() { + + Person source = createPerson(); + template.insert(source); + + Query q = Query.query(where("encryptedLong").lte(1001L).gte(1001L)); + q.fields().exclude("__safeContent__"); + + Person loaded = template.query(Person.class).matching(q).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canRangeMatchRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + Query q = Query.query(where("encryptedLong").lte(1001L).gte(1001L)); + Person loaded = template.query(Person.class).matching(q).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canReplaceEntityWithRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + source.encryptedInt = 123; + source.encryptedLong = 9999L; + template.save(source); + + Person loaded = template.query(Person.class).matching(where("id").is(source.id)).firstValue(); + assertThat(loaded).isEqualTo(source); + } + + @Test // GH-4185 + void canUpdateRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + UpdateResult updateResult = template.update(Person.class).matching(where("id").is(source.id)) + .apply(Update.update("encryptedLong", 5000L)).first(); + assertThat(updateResult.getModifiedCount()).isOne(); + + Person loaded = template.query(Person.class).matching(where("id").is(source.id)).firstValue(); + assertThat(loaded.encryptedLong).isEqualTo(5000L); + } + + @Test // GH-4185 + void errorsWhenUsingNonRangeOperatorEqOnRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + assertThatThrownBy( + () -> template.query(Person.class).matching(where("encryptedInt").is(source.encryptedInt)).firstValue()) + .isInstanceOf(AssertionError.class) + .hasMessageStartingWith("Not a valid range query. Querying a range encrypted field but " + + "the query operator '$eq' for field path 'encryptedInt' is not a range query."); + } + + @Test // GH-4185 + void errorsWhenUsingNonRangeOperatorInOnRangeEncryptedField() { + + Person source = createPerson(); + template.insert(source); + + assertThatThrownBy( + () -> template.query(Person.class).matching(where("encryptedLong").in(1001L, 9999L)).firstValue()) + .isInstanceOf(AssertionError.class) + .hasMessageStartingWith("Not a valid range query. Querying a range encrypted field but " + + "the query operator '$in' for field path 'encryptedLong' is not a range query."); + } + + private Person createPerson() { + + Person source = new Person(); + source.id = "id-1"; + source.unencryptedValue = "y2k"; + source.name = "it's a me mario!"; + source.age = 42; + source.encryptedInt = 101; + source.encryptedLong = 1001L; + source.nested = new NestedWithQEFields(); + source.nested.value = "Luigi time!"; + return source; + } + + protected static class EncryptionConfig extends AbstractMongoClientConfiguration { + + private static final String LOCAL_KMS_PROVIDER = "local"; + + private static final Lazy>> LAZY_KMS_PROVIDERS = Lazy.of(() -> { + byte[] localMasterKey = new byte[96]; + new SecureRandom().nextBytes(localMasterKey); + return Map.of(LOCAL_KMS_PROVIDER, Map.of("key", localMasterKey)); + }); + + @Autowired ApplicationContext applicationContext; + + @Override + protected String getDatabaseName() { + return "qe-test"; + } + + @Bean + public MongoClient mongoClient() { + return super.mongoClient(); + } + + @Override + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + converterConfigurationAdapter + .registerPropertyValueConverterFactory(PropertyValueConverterFactory.beanFactoryAware(applicationContext)) + .useNativeDriverJavaTimeCodecs(); + } + + @Bean + EncryptionKeyHolder keyHolder(MongoClientEncryption mongoClientEncryption) { + + Lazy> lazyDataKeyMap = Lazy.of(() -> { + try (MongoClient client = mongoClient()) { + + MongoDatabase database = client.getDatabase(getDatabaseName()); + database.getCollection("test").drop(); + + ClientEncryption clientEncryption = mongoClientEncryption.getClientEncryption(); + + MongoJsonSchema personSchema = MongoJsonSchemaCreator.create(new MongoMappingContext()) // init schema creator + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(Person.class); // + + Document encryptedFields = CollectionOptions.encryptedCollection(personSchema) // + .getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .orElseThrow(); + + CreateCollectionOptions createCollectionOptions = new CreateCollectionOptions() + .encryptedFields(encryptedFields); + + BsonDocument local = clientEncryption.createEncryptedCollection(database, "test", createCollectionOptions, + new CreateEncryptedCollectionParams(LOCAL_KMS_PROVIDER)); + + Map keyMap = new LinkedHashMap<>(); + for (Object o : local.getArray("fields")) { + if (o instanceof BsonDocument db) { + String path = db.getString("path").getValue(); + BsonBinary binary = db.getBinary("keyId"); + for (String part : path.split("\\.")) { + keyMap.put(part, binary); + } + } + } + return keyMap; + } + }); + + return new EncryptionKeyHolder(lazyDataKeyMap); + } + + @Bean + MongoEncryptionConverter encryptingConverter(MongoClientEncryption mongoClientEncryption, + EncryptionKeyHolder keyHolder) { + return new MongoEncryptionConverter(mongoClientEncryption, EncryptionKeyResolver.annotated((ctx) -> { + + String path = ctx.getProperty().getFieldName(); + + if (ctx.getProperty().getMongoField().getName().isPath()) { + path = StringUtils.arrayToDelimitedString(ctx.getProperty().getMongoField().getName().parts(), "."); + } + if (ctx.getOperatorContext() != null) { + path = ctx.getOperatorContext().path(); + } + return EncryptionKey.keyId(keyHolder.getEncryptionKey(path)); + })); + } + + @Bean + CachingMongoClientEncryption clientEncryption(ClientEncryptionSettings encryptionSettings) { + return new CachingMongoClientEncryption(() -> ClientEncryptions.create(encryptionSettings)); + } + + @Override + protected void configureClientSettings(MongoClientSettings.Builder builder) { + try (MongoClient client = MongoClients.create()) { + ClientEncryptionSettings clientEncryptionSettings = encryptionSettings(client); + + builder.autoEncryptionSettings(AutoEncryptionSettings.builder() // + .kmsProviders(clientEncryptionSettings.getKmsProviders()) // + .keyVaultNamespace(clientEncryptionSettings.getKeyVaultNamespace()) // + .bypassQueryAnalysis(true).build()); + } + } + + @Bean + ClientEncryptionSettings encryptionSettings(MongoClient mongoClient) { + MongoNamespace keyVaultNamespace = new MongoNamespace("encryption.testKeyVault"); + MongoCollection keyVaultCollection = mongoClient.getDatabase(keyVaultNamespace.getDatabaseName()) + .getCollection(keyVaultNamespace.getCollectionName()); + keyVaultCollection.drop(); + // Ensure that two data keys cannot share the same keyAltName. + keyVaultCollection.createIndex(Indexes.ascending("keyAltNames"), + new IndexOptions().unique(true).partialFilterExpression(Filters.exists("keyAltNames"))); + + mongoClient.getDatabase(getDatabaseName()).getCollection("test").drop(); // Clear old data + + // Create the ClientEncryption instance + return ClientEncryptionSettings.builder() // + .keyVaultMongoClientSettings( + MongoClientSettings.builder().applyConnectionString(new ConnectionString("mongodb://localhost")).build()) // + .keyVaultNamespace(keyVaultNamespace.getFullName()) // + .kmsProviders(LAZY_KMS_PROVIDERS.get()) // + .build(); + } + } + + static class CachingMongoClientEncryption extends MongoClientEncryption implements DisposableBean { + + static final AtomicReference cache = new AtomicReference<>(); + + CachingMongoClientEncryption(Supplier source) { + super(() -> { + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption == null) { + clientEncryption = source.get(); + cache.set(clientEncryption); + } + + return clientEncryption; + }); + } + + @Override + public void destroy() { + ClientEncryption clientEncryption = cache.get(); + if (clientEncryption != null) { + clientEncryption.close(); + cache.set(null); + } + } + } + + static class EncryptionKeyHolder { + + Supplier> lazyDataKeyMap; + + public EncryptionKeyHolder(Supplier> lazyDataKeyMap) { + this.lazyDataKeyMap = Lazy.of(lazyDataKeyMap); + } + + BsonBinary getEncryptionKey(String path) { + return lazyDataKeyMap.get().get(path); + } + } + + @org.springframework.data.mongodb.core.mapping.Document("test") + static class Person { + + String id; + + String unencryptedValue; + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + String name; + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + Integer age; + + @ValueConverter(MongoEncryptionConverter.class) + @RangeEncrypted(contentionFactor = 0L, + rangeOptions = "{\"min\": 0, \"max\": 200, \"trimFactor\": 1, \"sparsity\": 1}") // + Integer encryptedInt; + + @ValueConverter(MongoEncryptionConverter.class) + @RangeEncrypted(contentionFactor = 0L, + rangeOptions = "{\"min\": {\"$numberLong\": \"1000\"}, \"max\": {\"$numberLong\": \"9999\"}, \"trimFactor\": 1, \"sparsity\": 1}") // + Long encryptedLong; + + NestedWithQEFields nested; + + public String getId() { + return this.id; + } + + public void setId(String id) { + this.id = id; + } + + public String getName() { + return this.name; + } + + public void setName(String name) { + this.name = name; + } + + public Integer getEncryptedInt() { + return this.encryptedInt; + } + + public void setEncryptedInt(Integer encryptedInt) { + this.encryptedInt = encryptedInt; + } + + public Long getEncryptedLong() { + return this.encryptedLong; + } + + public void setEncryptedLong(Long encryptedLong) { + this.encryptedLong = encryptedLong; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Person person = (Person) o; + return Objects.equals(id, person.id) && Objects.equals(unencryptedValue, person.unencryptedValue) + && Objects.equals(name, person.name) && Objects.equals(age, person.age) + && Objects.equals(encryptedInt, person.encryptedInt) && Objects.equals(encryptedLong, person.encryptedLong); + } + + @Override + public int hashCode() { + return Objects.hash(id, unencryptedValue, name, age, encryptedInt, encryptedLong); + } + + @Override + public String toString() { + return "Person{" + "id='" + id + '\'' + ", unencryptedValue='" + unencryptedValue + '\'' + ", name='" + name + + '\'' + ", age=" + age + ", encryptedInt=" + encryptedInt + ", encryptedLong=" + encryptedLong + '}'; + } + } + + static class NestedWithQEFields { + + @ValueConverter(MongoEncryptionConverter.class) + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) // + String value; + + @Override + public String toString() { + return "NestedWithQEFields{" + "value='" + value + '\'' + '}'; + } + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + NestedWithQEFields that = (NestedWithQEFields) o; + return Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java index 3514927b18..1691305617 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/schema/MongoJsonSchemaUnitTests.java @@ -15,11 +15,14 @@ */ package org.springframework.data.mongodb.core.schema; +import static org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty.*; import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.*; +import static org.springframework.data.mongodb.core.schema.JsonSchemaProperty.encrypted; import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import java.util.Collections; +import java.util.List; import java.util.UUID; import org.bson.Document; @@ -105,6 +108,37 @@ void rendersEncryptedPropertyWithKeyIdCorrectly() { .append("algorithm", "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic").append("bsonType", "string")))))); } + @Test // GH-4185 + void rendersQueryablePropertyCorrectly() { + + MongoJsonSchema schema = MongoJsonSchema.builder().properties( // + queryable(rangeEncrypted(number("ssn")), + List.of(QueryCharacteristics.range().contention(0).trimFactor(1).sparsity(1).min(0).max(200)))) + .build(); + + assertThat(schema.toDocument().get("$jsonSchema", Document.class)).isEqualTo(""" + { + "type": "object", + "properties": { + "ssn": { + "encrypt": { + "bsonType": "long", + "algorithm": "Range", + "queries": [{ + "queryType": "range", + "contention": {$numberLong: "0"}, + "trimFactor": 1, + "sparsity": {$numberLong: "1"}, + "min": 0, + "max": 200 + }] + } + } + } + } + """); + } + @Test // DATAMONGO-1835 void throwsExceptionOnNullRoot() { assertThatIllegalArgumentException().isThrownBy(() -> MongoJsonSchema.of((JsonSchemaObject) null)); diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc index 98a6d2478a..14e866cf14 100644 --- a/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-encryption.adoc @@ -1,8 +1,8 @@ [[mongo.encryption]] -= Encryption (CSFLE) += Encryption Client Side Encryption is a feature that encrypts data in your application before it is sent to MongoDB. -We recommend you get familiar with the concepts, ideally from the https://www.mongodb.com/docs/manual/core/csfle/[MongoDB Documentation] to learn more about its capabilities and restrictions before you continue applying Encryption through Spring Data. +We recommend you get familiar with the concepts, ideally from the https://www.mongodb.com/docs/manual/core/security-in-use-encryption/[MongoDB Documentation] to learn more about its capabilities and restrictions before you continue applying Encryption through Spring Data. [NOTE] ==== @@ -11,8 +11,13 @@ MongoDB does not support encryption for all field types. Specific data types require deterministic encryption to preserve equality comparison functionality. ==== +== Client Side Field Level Encryption (CSFLE) + +Choosing CSFLE gives you full flexibility and allows you to use different keys for a single field, eg. in a one key per tenant scenario. + +Please make sure to consult the https://www.mongodb.com/docs/manual/core/csfle/[MongoDB CSFLE Documentation] before you continue reading. + [[mongo.encryption.automatic]] -== Automatic Encryption +=== Automatic Encryption (CSFLE) MongoDB supports https://www.mongodb.com/docs/manual/core/csfle/[Client-Side Field Level Encryption] out of the box using the MongoDB driver with its Automatic Encryption feature. Automatic Encryption requires a xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step. @@ -47,7 +52,7 @@ MongoClientSettingsBuilderCustomizer customizer(MappingContext mappingContext) { ---- [[mongo.encryption.explicit]] -== Explicit Encryption +=== Explicit Encryption (CSFLE) Explicit encryption uses the MongoDB driver's encryption library (`org.mongodb:mongodb-crypt`) to perform encryption and decryption tasks. The `@ExplicitEncrypted` annotation is a combination of the `@Encrypted` annotation used for xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema creation] and a xref:mongodb/mapping/property-converters.adoc[Property Converter]. @@ -114,8 +119,147 @@ By default, the `@ExplicitEncrypted(value=…)` attribute references a `MongoEnc It is possible to change the default implementation and exchange it with any `PropertyValueConverter` implementation by providing the according type reference. To learn more about custom `PropertyValueConverters` and the required configuration, please refer to the xref:mongodb/mapping/property-converters.adoc[Property Converters - Mapping specific fields] section. +[[mongo.encryption.queryable]] +== Queryable Encryption (QE) + +Choosing QE enables you to run different types of queries, like _range_ or _equality_, against encrypted fields. + +Please make sure to consult the https://www.mongodb.com/docs/manual/core/queryable-encryption/[MongoDB QE Documentation] before you continue reading to learn more about QE features and limitations. + +=== Collection Setup + +Queryable Encryption requires upfront declaration of certain aspects allowed within an actual query against an encrypted field. +The information covers the algorithm in use as well as allowed query types along with their attributes and must be provided when creating the collection. + +`MongoOperations#createCollection(...)` can be used to do the initial setup for collections utilizing QE. +The configuration for QE via Spring Data uses the same building blocks (a xref:mongodb/mapping/mapping-schema.adoc#mongo.jsonSchema.encrypted-fields[JSON Schema creation]) as CSFLE, converting the schema/properties into the configuration format required by MongoDB. + +[tabs] +====== +Manual Collection Setup:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="primary"] +---- +CollectionOptions collectionOptions = CollectionOptions.encryptedCollection(options -> options + .queryable(encrypted(string("ssn")).algorithm("Indexed"), equality().contention(0)) + .queryable(encrypted(int32("age")).algorithm("Range"), range().contention(8).min(0).max(150)) + .queryable(encrypted(int64("address.sign")).algorithm("Range"), range().contention(2).min(-10L).max(10L)) +); + +mongoTemplate.createCollection(Patient.class, collectionOptions); <1> +---- +<1> Using the template to create the collection may prevent capturing generated keyIds. In this case render the `Document` from the options and use the `createEncryptedCollection(...)` method via the encryption library. +==== + +Derived Collection Setup:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="secondary"] +---- +class Patient { + + @Id String id; + + @Encrypted(algorithm = "Indexed") // + @Queryable(queryType = "equality", contentionFactor = 0) + String ssn; + + @RangeEncrypted(contentionFactor = 8, rangeOptions = "{ 'min' : 0, 'max' : 150 }") + Integer age; + + Address address; +} + +MongoJsonSchema patientSchema = MongoJsonSchemaCreator.create(mappingContext) + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); + +CollectionOptions collectionOptions = CollectionOptions.encryptedCollection(patientSchema); + +mongoTemplate.createCollection(Patient.class, collectionOptions); <1> +---- +<1> Using the template to create the collection may prevent capturing generated keyIds. In this case render the `Document` from the options and use the `createEncryptedCollection(...)` method via the encryption library. + +The `Queryable` annotation allows to define allowed query types for encrypted fields. +`@RangeEncrypted` is a combination of `@Encrypted` and `@Queryable` for fields allowing `range` queries. +It is possible to create custom annotations out of the provided ones. +==== + +MongoDB Collection Info:: ++ +==== +[source,java,indent=0,subs="verbatim,quotes",role="thrid"] +---- +{ + name: 'patient', + type: 'collection', + options: { + encryptedFields: { + escCollection: 'enxcol_.test.esc', + ecocCollection: 'enxcol_.test.ecoc', + fields: [ + { + keyId: ..., + path: 'ssn', + bsonType: 'string', + queries: [ { queryType: 'equality', contention: Long('0') } ] + }, + { + keyId: ..., + path: 'age', + bsonType: 'int', + queries: [ { queryType: 'range', contention: Long('8'), min: 0, max: 150 } ] + }, + { + keyId: ..., + path: 'address.sign', + bsonType: 'long', + queries: [ { queryType: 'range', contention: Long('2'), min: Long('-10'), max: Long('10') } ] + } + ] + } + } +} +---- +==== +====== + +[NOTE] +==== +- It is not possible to use both QE and CSFLE within the same collection. +- It is not possible to query a `range` indexed field with an `equality` operator. +- It is not possible to query an `equality` indexed field with a `range` operator. +- It is not possible to set `bypassAutoEncrytion(true)`. +- It is not possible to use self maintained encryption keys via `@Encrypted` in combination with Queryable Encryption. +- Contention is only optional on the server side, the clients requires you to set the value (Default us `8`). +- Additional options for eg. `min` and `max` need to match the actual field type. Make sure to use `$numberLong` etc. to ensure target types when parsing bson String. +- Queryable Encryption will an extra field `__safeContent__` to each of your documents. +Unless explicitly excluded the field will be loaded into memory when retrieving results. +==== + +[[mongo.encryption.queryable.automatic]] +=== Automatic Encryption (QE) + +MongoDB supports Queryable Encryption out of the box using the MongoDB driver with its Automatic Encryption feature. +Automatic Encryption requires a xref:mongodb/mapping/mapping-schema.adoc[JSON Schema] that allows to perform encrypted read and write operations without the need to provide an explicit en-/decryption step. + +All you need to do is create the collection according to the MongoDB documentation. +You may utilize techniques to create the required configuration outlined in the section above. + +[[mongo.encryption.queryable.manual]] +=== Explicit Encryption (QE) + +Explicit encryption uses the MongoDB driver's encryption library (`org.mongodb:mongodb-crypt`) to perform encryption and decryption tasks based on the meta information provided by annotation within the domain model. + +[NOTE] +==== +There is no official support for using Explicit Queryable Encryption. +The audacious user may combine `@Encrypted` and `@Queryable` with `@ValueConverter(MongoEncryptionConverter.class)` at their own risk. +==== + [[mongo.encryption.explicit-setup]] -=== MongoEncryptionConverter Setup +[[mongo.encryption.converter-setup]] +== MongoEncryptionConverter Setup The converter setup for `MongoEncryptionConverter` requires a few steps as several components are involved. The bean setup consists of the following: @@ -124,7 +268,6 @@ The bean setup consists of the following: 2. A `MongoEncryptionConverter` instance configured with `ClientEncryption` and a `EncryptionKeyResolver`. 3. A `PropertyValueConverterFactory` that uses the registered `MongoEncryptionConverter` bean. -A side effect of using annotated key resolution is that the `@ExplicitEncrypted` annotation does not need to specify an alt key name. The `EncryptionKeyResolver` uses an `EncryptionContext` providing access to the property allowing for dynamic DEK resolution. .Sample MongoEncryptionConverter Configuration diff --git a/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc b/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc index 9b6bfcf095..345b5dbb6c 100644 --- a/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc +++ b/src/main/antora/modules/ROOT/pages/mongodb/mongo-search-indexes.adoc @@ -84,7 +84,6 @@ VectorSearchOperation search = VectorSearchOperation.search("vector_index") <1> .vector( ... ) .numCandidates(150) .limit(10) - .quantization(SCALAR) .withSearchScore("score"); <3> AggregationResults results = mongoTemplate @@ -107,8 +106,7 @@ db.embedded_movies.aggregate([ "path": "plot_embedding", <1> "queryVector": [ ... ], "numCandidates": 150, - "limit": 10, - "quantization": "scalar" + "limit": 10 } }, { diff --git a/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc index 1a4af7a60b..7d31acb2d4 100644 --- a/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc +++ b/src/main/antora/modules/ROOT/pages/repositories/core-concepts.adoc @@ -2,11 +2,3 @@ include::{commons}@data-commons::page$repositories/core-concepts.adoc[] [[mongodb.entity-persistence.state-detection-strategies]] include::{commons}@data-commons::page$is-new-state-detection.adoc[leveloffset=+1] - -[NOTE] -==== -Cassandra provides no means to generate identifiers upon inserting data. -As consequence, entities must be associated with identifier values. -Spring Data defaults to identifier inspection to determine whether an entity is new. -If you want to use xref:mongodb/auditing.adoc[auditing] make sure to either use xref:mongodb/template-crud-operations.adoc#mongo-template.optimistic-locking[Optimistic Locking] or implement `Persistable` for proper entity state detection. -====