diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java index 43ccea532a..18dc7b923d 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/MeProcessor.java @@ -55,7 +55,11 @@ public FrontendMeInformation getUserInformation(@NonNull User user) { // User can use the dataset and can possibly upload ids for resolving datasetAblilites.put( dataset.getId(), - new FrontendDatasetAbility(user.isPermitted(dataset, Ability.PRESERVE_ID)) + new FrontendDatasetAbility( + user.isPermitted(dataset, Ability.PRESERVE_ID), + user.isPermitted(dataset, Ability.ENTITY_PREVIEW) && user.isPermitted(dataset, Ability.PRESERVE_ID), + user.isPermitted(dataset, Ability.QUERY_PREVIEW) + ) ); } @@ -93,6 +97,8 @@ public static class FrontendMeInformation { @NoArgsConstructor public static class FrontendDatasetAbility { private boolean canUpload; + private boolean canViewEntityPreview; + private boolean canViewQueryPreview; } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java index 119650f5dc..1e6ed1aa5a 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java @@ -6,10 +6,12 @@ import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Predicate; @@ -48,6 +50,7 @@ import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.config.ColumnConfig; import com.bakdata.conquery.models.config.ConqueryConfig; @@ -58,6 +61,7 @@ import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.ExecutionManager; @@ -312,6 +316,43 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc log.info("Patching {} ({}) with patch: {}", execution.getClass().getSimpleName(), execution, patch); + // If the patch shares the execution, we also share all subQueries + if (patch.getGroups() != null && !patch.getGroups().isEmpty()) { + + + for (ManagedExecutionId managedExecutionId : execution.getSubmitted().collectRequiredQueries()) { + final ManagedExecution subQuery = storage.getExecution(managedExecutionId); + + if (!subject.isPermitted(subQuery, Ability.READ)) { + log.warn("Not sharing {} as User {} is not allowed to see it themselves.", subQuery.getId(), subject); + continue; + } + + final ConqueryPermission canReadQuery = subQuery.createPermission(Set.of(Ability.READ)); + + final Set groupsToShareWith = new HashSet<>(patch.getGroups()); + + // Find all groups the query is already shared with, so we do not remove them, as patch is absolute + for (Group group : storage.getAllGroups()) { + if (groupsToShareWith.contains(group.getId())){ + continue; + } + + final Set effectivePermissions = group.getEffectivePermissions(); + + if(effectivePermissions.stream().anyMatch(perm -> perm.implies(canReadQuery))) { + groupsToShareWith.add(group.getId()); + } + } + + final MetaDataPatch sharePatch = MetaDataPatch.builder() + .groups(new ArrayList<>(groupsToShareWith)) + .build(); + + patchQuery(subject, subQuery, sharePatch); + } + } + patch.applyTo(execution, storage, subject); storage.updateExecution(execution); } @@ -397,7 +438,9 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext */ public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List sources, Dataset dataset, Range dateRange) { - final Namespace namespace = datasetRegistry.get(dataset.getId()); + subject.authorize(dataset, Ability.ENTITY_PREVIEW); + subject.authorize(dataset, Ability.PRESERVE_ID); + final PreviewConfig previewConfig = datasetRegistry.get(dataset.getId()).getPreviewConfig(); final EntityPreviewForm form = EntityPreviewForm.create(entity, idKind, dateRange, sources, previewConfig.getSelects(), previewConfig.getTimeStratifiedSelects(), datasetRegistry); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/ExecutionStatus.java b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/ExecutionStatus.java index 4aec080f22..51f83c56f6 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/ExecutionStatus.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/ExecutionStatus.java @@ -44,6 +44,8 @@ public abstract class ExecutionStatus { private String queryType; private SecondaryIdDescriptionId secondaryId; + private boolean containsDates; + /** * The urls under from which the result of the execution can be downloaded as soon as it finished successfully. diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java index 32d9d45c38..2984e904e0 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java @@ -46,7 +46,6 @@ public class FullExecutionStatus extends ExecutionStatus { */ private boolean canExpand; - private boolean containsDates; /** * Is set to the query description if the user can expand all included concepts. diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java index 2dc2b2ee50..353cae748c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQReusedQuery.java @@ -11,6 +11,7 @@ import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; +import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryExecutionContext; @@ -74,6 +75,11 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { @Override public void resolve(QueryResolveContext context) { query = ((ManagedQuery) context.getStorage().getExecution(queryId)); + + if(query == null){ + throw new ConqueryError.ExecutionCreationResolveError(queryId); + } + resolvedQuery = query.getQuery(); // Yey recursion, because the query might consist of another CQReusedQuery or CQExternal diff --git a/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java b/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java index d71b715036..130d05f1ee 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java +++ b/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java @@ -1,11 +1,7 @@ package com.bakdata.conquery.io.mina; -import java.io.File; -import java.util.UUID; - import javax.validation.Validator; -import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.messages.network.NetworkMessage; import com.bakdata.conquery.models.worker.IdResolveContext; @@ -14,7 +10,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; -import com.fasterxml.jackson.databind.SerializationFeature; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -26,34 +21,26 @@ public class BinaryJacksonCoder implements CQCoder> { public BinaryJacksonCoder(IdResolveContext datasets, Validator validator, ObjectMapper objectMapper) { this.validator = validator; - this.writer = objectMapper - .writerFor(NetworkMessage.class); - this.reader = datasets - .injectIntoNew(objectMapper.readerFor(NetworkMessage.class)) - .without(Feature.AUTO_CLOSE_SOURCE); + writer = objectMapper.writerFor(NetworkMessage.class); + reader = datasets.injectIntoNew(objectMapper.readerFor(NetworkMessage.class)).without(Feature.AUTO_CLOSE_SOURCE); } @Override public Chunkable encode(NetworkMessage message) throws Exception { ValidatorHelper.failOnError(log, validator.validate(message)); - UUID id = message.getMessageId(); - Chunkable chunkable = new Chunkable(id, writer, message); - if(log.isTraceEnabled()) { - Jackson.MAPPER.writerFor(NetworkMessage.class).with(SerializationFeature.INDENT_OUTPUT).writeValue(new File("dumps/out_"+id+".json"), message); - } - return chunkable; + return new Chunkable(message.getMessageId(), writer, message); } @Override public NetworkMessage decode(ChunkedMessage message) throws Exception { - try(EndCheckableInputStream is = message.createInputStream()) { - Object obj = reader.readValue(is); - if(!is.isAtEnd()) { - throw new IllegalStateException("After reading the JSON message "+obj+" the buffer has still bytes available"); + try (EndCheckableInputStream is = message.createInputStream()) { + final Object obj = reader.readValue(is); + if (!is.isAtEnd()) { + throw new IllegalStateException("After reading the JSON message " + obj + " the buffer has still bytes available"); } ValidatorHelper.failOnError(log, validator.validate(obj)); - return (NetworkMessage)obj; + return (NetworkMessage) obj; } } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/mina/ChunkWriter.java b/backend/src/main/java/com/bakdata/conquery/io/mina/ChunkWriter.java index fdf9095dbe..44d1cd462d 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/mina/ChunkWriter.java +++ b/backend/src/main/java/com/bakdata/conquery/io/mina/ChunkWriter.java @@ -26,7 +26,7 @@ public class ChunkWriter extends ProtocolEncoderAdapter { @Getter @Setter - private int bufferSize = Ints.checkedCast(Size.megabytes(32).toBytes()); + private int bufferSize = Ints.checkedCast(Size.megabytes(2).toBytes()); private final SoftPool bufferPool = new SoftPool<>(() -> IoBuffer.allocate(bufferSize)); @SuppressWarnings("rawtypes") private final CQCoder coder; diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManagerProvider.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManagerProvider.java index f8dd53c541..085db3eb29 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManagerProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterManagerProvider.java @@ -12,6 +12,7 @@ import com.bakdata.conquery.mode.StorageListener; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.jobs.JobManager; +import com.bakdata.conquery.models.worker.ClusterHealthCheck; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.ShardNodeInformation; @@ -22,32 +23,26 @@ public class ClusterManagerProvider implements ManagerProvider { public ClusterManager provideManager(ConqueryConfig config, Environment environment) { - JobManager jobManager = ManagerProvider.newJobManager(config); - InternalObjectMapperCreator creator = ManagerProvider.newInternalObjectMapperCreator(config, environment.getValidator()); - ClusterState clusterState = new ClusterState(); - NamespaceHandler namespaceHandler = new ClusterNamespaceHandler(clusterState, config, creator); - DatasetRegistry datasetRegistry = ManagerProvider.createDatasetRegistry(namespaceHandler, config, creator); + final JobManager jobManager = ManagerProvider.newJobManager(config); + final InternalObjectMapperCreator creator = ManagerProvider.newInternalObjectMapperCreator(config, environment.getValidator()); + final ClusterState clusterState = new ClusterState(); + final NamespaceHandler namespaceHandler = new ClusterNamespaceHandler(clusterState, config, creator); + final DatasetRegistry datasetRegistry = ManagerProvider.createDatasetRegistry(namespaceHandler, config, creator); creator.init(datasetRegistry); - ClusterConnectionManager connectionManager = new ClusterConnectionManager( - datasetRegistry, jobManager, environment.getValidator(), config, creator, clusterState - ); - ImportHandler importHandler = new ClusterImportHandler(config, datasetRegistry); - StorageListener extension = new ClusterStorageListener(jobManager, datasetRegistry); - Supplier> nodeProvider = () -> clusterState.getShardNodes().values(); - List adminTasks = List.of(new ReportConsistencyTask(clusterState)); - - DelegateManager delegate = new DelegateManager<>( - config, - environment, - datasetRegistry, - importHandler, - extension, - nodeProvider, - adminTasks, - creator, - jobManager - ); + final ClusterConnectionManager connectionManager = + new ClusterConnectionManager(datasetRegistry, jobManager, environment.getValidator(), config, creator, clusterState); + + final ImportHandler importHandler = new ClusterImportHandler(config, datasetRegistry); + final StorageListener extension = new ClusterStorageListener(jobManager, datasetRegistry); + final Supplier> nodeProvider = () -> clusterState.getShardNodes().values(); + final List adminTasks = List.of(new ReportConsistencyTask(clusterState)); + + final DelegateManager + delegate = + new DelegateManager<>(config, environment, datasetRegistry, importHandler, extension, nodeProvider, adminTasks, creator, jobManager); + + environment.healthChecks().register("cluster", new ClusterHealthCheck(clusterState)); return new ClusterManager(delegate, connectionManager); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/Ability.java b/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/Ability.java index f65c0c9984..529b6ae880 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/Ability.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/Ability.java @@ -19,6 +19,9 @@ public enum Ability { DOWNLOAD, // Allow download of per entity results PRESERVE_ID, // Needs extra implementation: Allow the user to see the real id of entities and externally resolve real ids into conquery + ENTITY_PREVIEW, + QUERY_PREVIEW, //TODO not yet implemented + // FormConfig specific MODIFY; diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/AbilitySets.java b/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/AbilitySets.java index 6032ac4830..e04a7c17f2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/AbilitySets.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/permissions/AbilitySets.java @@ -7,29 +7,15 @@ @UtilityClass public class AbilitySets { - public static final EnumSet QUERY_CREATOR = EnumSet.of( - Ability.READ, - Ability.DELETE, - Ability.SHARE, - Ability.TAG, - Ability.CANCEL, - Ability.LABEL - ); + public static final EnumSet QUERY_CREATOR = EnumSet.of(Ability.READ, Ability.DELETE, Ability.SHARE, Ability.TAG, Ability.CANCEL, Ability.LABEL); - public static final EnumSet FORM_CONFIG_CREATOR = EnumSet.of( - Ability.READ, - Ability.DELETE, - Ability.SHARE, - Ability.TAG, - Ability.LABEL, - Ability.MODIFY - ); + public static final EnumSet + FORM_CONFIG_CREATOR = + EnumSet.of(Ability.READ, Ability.DELETE, Ability.SHARE, Ability.TAG, Ability.LABEL, Ability.MODIFY); - public static final EnumSet SHAREHOLDER = EnumSet.of( - Ability.READ, - Ability.TAG, - Ability.LABEL - ); + public static final EnumSet SHAREHOLDER = EnumSet.of(Ability.READ, Ability.TAG, Ability.LABEL); - public static final EnumSet DATASET_CREATOR = EnumSet.of(Ability.READ, Ability.DOWNLOAD, Ability.PRESERVE_ID); + public static final EnumSet + DATASET_CREATOR = + EnumSet.of(Ability.READ, Ability.DOWNLOAD, Ability.PRESERVE_ID, Ability.ENTITY_PREVIEW, Ability.QUERY_PREVIEW); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/dictionary/DictionaryMapping.java b/backend/src/main/java/com/bakdata/conquery/models/dictionary/DictionaryMapping.java index 8ccf1ae511..df48a7b59a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/dictionary/DictionaryMapping.java +++ b/backend/src/main/java/com/bakdata/conquery/models/dictionary/DictionaryMapping.java @@ -1,6 +1,8 @@ package com.bakdata.conquery.models.dictionary; +import java.util.stream.IntStream; + import com.bakdata.conquery.models.events.stores.root.IntegerStore; import com.bakdata.conquery.models.events.stores.root.StringStore; import it.unimi.dsi.fastutil.ints.Int2IntMap; @@ -40,17 +42,17 @@ public static DictionaryMapping createAndImport(Dictionary from, Dictionary into int newIds = 0; - Int2IntMap source2Target = new Int2IntOpenHashMap(from.size()); + final Int2IntMap source2Target = new Int2IntOpenHashMap(from.size()); source2Target.defaultReturnValue(-1); - Int2IntMap target2Source = new Int2IntOpenHashMap(from.size()); + final Int2IntMap target2Source = new Int2IntOpenHashMap(from.size()); target2Source.defaultReturnValue(-1); for (int id = 0; id < from.size(); id++) { - byte[] value = from.getElement(id); + final byte[] value = from.getElement(id); int targetId = into.getId(value); //if id was unknown until now @@ -92,22 +94,21 @@ public IntCollection target() { * Mutably applies mapping to store. */ public void applyToStore(StringStore from, IntegerStore to) { - for (int event = 0; event < from.getLines(); event++) { - if (!from.has(event)) { - to.setNull(event); - continue; - } - - final int string = from.getString(event); - - int value = source2Target(string); - - if (value == -1) { - throw new IllegalStateException(String.format("Missing mapping for %s", string)); - } - - to.setInteger(event, value); - } + IntStream.range(0, from.getLines()) + .parallel() + .forEach(event -> { + if (!from.has(event)) { + to.setNull(event); + return; + } + final int string = from.getString(event); + final int value = source2Target(string); + + if (value == -1) { + throw new IllegalStateException(String.format("Missing mapping for %s", string)); + } + to.setInteger(event, value); + }); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java index fb709ad704..b346aaa8ba 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java @@ -51,6 +51,7 @@ import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonAlias; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.OptBoolean; import com.google.common.base.Preconditions; @@ -94,6 +95,10 @@ public abstract class ManagedExecution extends IdentifiableImpl { if (visitable instanceof CQConcept cqConcept) { - return cqConcept.isAggregateEventDates(); + return !cqConcept.isExcludeFromTimeAggregation(); } if (visitable instanceof CQExternal external) { @@ -373,7 +384,7 @@ private boolean containsDates(QueryDescription query) { }); } - private boolean canSubjectExpand(Subject subject, QueryDescription query) { + private static boolean canSubjectExpand(Subject subject, QueryDescription query) { NamespacedIdentifiableCollector namespacesIdCollector = new NamespacedIdentifiableCollector(); query.visit(namespacesIdCollector); @@ -401,9 +412,8 @@ public boolean isReadyToDownload() { @JsonIgnore public String getLabelWithoutAutoLabelSuffix() { - int idx; + final int idx; if (label != null && (idx = label.lastIndexOf(AUTO_LABEL_SUFFIX)) != -1) { - return label.substring(0, idx); } return label; diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java b/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java index 922fdf81ef..b626b88ddb 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java @@ -31,42 +31,41 @@ public interface Shareable extends Authorized { */ void setShared(boolean shared); - default , S extends Identifiable & Shareable & Authorized> Consumer sharer( - MetaStorage storage, - Subject subject) { + default , S extends Identifiable & Shareable & Authorized> Consumer sharer(MetaStorage storage, Subject subject) { if (!(this instanceof Identifiable)) { log.warn("Cannot share {} ({}) because it does not implement Identifiable", this.getClass(), this.toString()); return QueryUtils.getNoOpEntryPoint(); } - return (patch) -> { - if (patch != null && patch.getGroups() != null) { - S shareable = (S) this; - // Collect groups that do not have access to this instance and remove their probable permission - for (Group group1 : storage.getAllGroups()) { - if (patch.getGroups().contains(group1.getId())) { - continue; - } - log.trace("User {} unshares instance {} ({}) from owner {}.", subject, shareable.getClass().getSimpleName(), shareable.getId(), group1); + return (patch) -> { + if (patch == null || patch.getGroups() == null) { + return; + } - group1.removePermission(shareable.createPermission(AbilitySets.SHAREHOLDER)); + final S shareable = (S) this; + // Collect groups that do not have access to this instance and remove their probable permission + for (Group group : storage.getAllGroups()) { + if (patch.getGroups().contains(group.getId())) { + continue; } + log.trace("User {} unshares instance {} ({}) from owner {}.", subject, shareable.getClass().getSimpleName(), shareable.getId(), group); - if(!patch.getGroups().isEmpty()) { - // Resolve the provided groups - Set groups = patch.getGroups().stream().map(storage::getGroup).collect(Collectors.toSet()); + group.removePermission(shareable.createPermission(AbilitySets.SHAREHOLDER)); + } - for(Group group : groups) { - ConqueryPermission sharePermission = shareable.createPermission(AbilitySets.SHAREHOLDER); - group.addPermission(sharePermission); - log.trace("User {} shares instance {} ({}). Adding permission {} to owner {}.", subject, shareable.getClass().getSimpleName(), shareable.getId(), sharePermission, group); - } - } + // Resolve the provided groups + final Set groups = patch.getGroups().stream().map(storage::getGroup).collect(Collectors.toSet()); + + for(Group group : groups) { + final ConqueryPermission sharePermission = shareable.createPermission(AbilitySets.SHAREHOLDER); + group.addPermission(sharePermission); - this.setShared(!patch.getGroups().isEmpty()); + log.trace("User {} shares instance {} ({}). Adding permission {} to owner {}.", subject, shareable.getClass().getSimpleName(), shareable.getId(), sharePermission, group); } + + setShared(!patch.getGroups().isEmpty()); }; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java b/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java index b14e51b818..3cf8cb2bb8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/jobs/ImportJob.java @@ -11,6 +11,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import javax.ws.rs.BadRequestException; @@ -78,6 +79,8 @@ public class ImportJob extends Job { private final PreprocessedData container; private final ConqueryConfig config; + private final IdMutex sharedDictionaryLocks; + private static final int NUMBER_OF_STEPS = /* directly in execute = */4; @@ -148,7 +151,8 @@ else if (processedImport != null) { header, dictionaries, container, - config + config, + sharedDictionaryLocks ); } } @@ -197,63 +201,65 @@ private static Map createLocalIdReplacements(Map importDictionaries(DistributedNamespace namespace, Map dicts, Column[] columns, String importName, Table table) { + private static Map importDictionaries(DistributedNamespace namespace, Map dicts, Column[] columns, String importName, Table table, IdMutex sharedDictionaryLocks) { // Empty Maps are Coalesced to null by Jackson if (dicts == null) { return Collections.emptyMap(); } - final Map out = new HashMap<>(); + final Map out = new ConcurrentHashMap<>(); log.debug("BEGIN importing {} Dictionaries", dicts.size()); - for (Column column : columns) { + // Might not have an underlying Dictionary (eg Singleton, direct-Number) + // but could also be an error :/ Most likely the former + // It's a shared dictionary + // This should never fail, becaus the dictionary is pre-created in the replacement generation step - if (column.getType() != MajorTypeId.STRING) { - continue; - } + Arrays.stream(columns) + .parallel() + .filter(column -> column.getType() == MajorTypeId.STRING) + .filter(col -> col.getSharedDictionary() == null) + .map(col -> dicts.get(col.getName())) + .filter(Objects::nonNull) + .forEach(dictionary -> { + // Normal Dictionary -> no merge necessary, just distribute + distributeDictionary(namespace, dictionary); + }); - // Might not have an underlying Dictionary (eg Singleton, direct-Number) - // but could also be an error :/ Most likely the former - final Dictionary importDictionary = dicts.get(column.getName()); + Arrays.stream(columns) + .parallel() + .filter(column -> column.getType() == MajorTypeId.STRING) + .filter(col -> col.getSharedDictionary() != null) + .filter(col -> dicts.containsKey(col.getName())) + .forEach(column -> { + final Dictionary importDictionary = dicts.get(column.getName()); - if (importDictionary == null) { - log.trace("No Dictionary for {}", column); - continue; - } + final String sharedDictionaryName = column.getSharedDictionary(); + log.debug("Column[{}.{}.{}] part of shared Dictionary[{}]", table.getId(), importName, column.getName(), sharedDictionaryName); + final DictionaryId dictionaryId = new DictionaryId(namespace.getDataset().getId(), sharedDictionaryName); + final DictionaryMapping mapping; - if (column.getSharedDictionary() == null) { - // Normal Dictionary -> no merge necessary, just distribute - distributeDictionary(namespace, importDictionary); - } - else { - // It's a shared dictionary - - final String sharedDictionaryName = column.getSharedDictionary(); + // We have to lock here, as sibling columns might both use the same shared-dictionary + try (IdMutex.Locked lock = sharedDictionaryLocks.acquire(dictionaryId)) { + final Dictionary sharedDictionary = namespace.getStorage().getDictionary(dictionaryId); - log.debug("Column[{}.{}.{}] part of shared Dictionary[{}]", table.getId(), importName, column.getName(), sharedDictionaryName); + ResourceUtil.throwNotFoundIfNull(dictionaryId, sharedDictionary); + log.trace("Merging into shared Dictionary[{}]", sharedDictionary); - final DictionaryId dictionaryId = new DictionaryId(namespace.getDataset().getId(), sharedDictionaryName); - final Dictionary sharedDictionary = namespace.getStorage().getDictionary(dictionaryId); + mapping = DictionaryMapping.createAndImport(importDictionary, sharedDictionary); + } - // This should never fail, becaus the dictionary is pre-created in the replacement generation step - ResourceUtil.throwNotFoundIfNull(dictionaryId, sharedDictionary); + if (mapping.getNumberOfNewIds() != 0) { + distributeDictionary(namespace, mapping.getTargetDictionary()); + } + out.put(column.getName(), mapping); + }); - log.trace("Merging into shared Dictionary[{}]", sharedDictionary); - DictionaryMapping mapping = DictionaryMapping.createAndImport(importDictionary, sharedDictionary); - - if (mapping.getNumberOfNewIds() != 0) { - distributeDictionary(namespace, mapping.getTargetDictionary()); - } - - out.put(column.getName(), mapping); - } - } - return out; } @@ -285,7 +291,7 @@ public void execute() throws JSONException, InterruptedException, IOException { log.info("Importing Dictionaries"); Map sharedDictionaryMappings = - importDictionaries(namespace, dictionaries.getDictionaries(), table.getColumns(), header.getName(), table); + importDictionaries(namespace, dictionaries.getDictionaries(), table.getColumns(), header.getName(), table, sharedDictionaryLocks); log.info("Remapping Dictionaries {}", sharedDictionaryMappings.values()); @@ -482,33 +488,30 @@ private void remapToSharedDictionary(Map mappings, Ma final ProgressReporter subJob = getProgressReporter().subJob(mappings.size()); - for (Map.Entry entry : mappings.entrySet()) { - final String columnName = entry.getKey(); - final DictionaryMapping mapping = entry.getValue(); + // we need to find a new Type for the index-Column as it's going to be remapped and might change in size + mappings.entrySet().parallelStream() + .forEach(entry -> { + final String columnName = entry.getKey(); + final DictionaryMapping mapping = entry.getValue(); - final StringStore stringStore = (StringStore) values.get(columnName); + final StringStore stringStore = (StringStore) values.get(columnName); + log.debug("Remapping Column[{}] = {} with {}", columnName, stringStore, mapping); + final IntegerParser indexParser = new IntegerParser(config); + final IntSummaryStatistics statistics = mapping.target().intStream().summaryStatistics(); - log.debug("Remapping Column[{}] = {} with {}", columnName, stringStore, mapping); + indexParser.setLines(stringStore.getLines()); + indexParser.setMinValue(statistics.getMin()); + indexParser.setMaxValue(statistics.getMax()); - // we need to find a new Type for the index-Column as it's going to be remapped and might change in size - final IntegerParser indexParser = new IntegerParser(config); + final IntegerStore newType = indexParser.findBestType(); - final IntSummaryStatistics statistics = mapping.target().intStream().summaryStatistics(); + log.trace("Decided for {}", newType); - indexParser.setLines(stringStore.getLines()); + mapping.applyToStore(stringStore, newType); + stringStore.setIndexStore(newType); - indexParser.setMinValue(statistics.getMin()); - indexParser.setMaxValue(statistics.getMax()); - - final IntegerStore newType = indexParser.findBestType(); - - log.trace("Decided for {}", newType); - - mapping.applyToStore(stringStore, newType); - - stringStore.setIndexStore(newType); - subJob.report(1); - } + subJob.report(1); + }); } private Import createImport(PreprocessedHeader header, Map stores, Column[] columns, int size) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java index 5f8cb9845c..b32c0ca9e0 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java @@ -1,10 +1,9 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import java.util.Collection; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.function.Predicate; @@ -70,21 +69,22 @@ public void execute() throws Exception { log.info("BEGIN update Matching stats for {} Concepts", concepts.size()); - // SubJobs collect into this Map. - // Just a guess-timate so we don't grow that often, this memory is very short lived so we can over commit. - final ConcurrentMap, MatchingStats.Entry> messages = new ConcurrentHashMap<>(concepts.size() * 5_000); + final Map, CompletableFuture> + subJobs = + concepts.stream() + .collect(Collectors.toMap(Functions.identity(), + concept -> CompletableFuture.runAsync(() -> { + final Map, MatchingStats.Entry> + matchingStats = + new HashMap<>(concept.countElements()); + calculateConceptMatches(concept, matchingStats, worker); - final Map, CompletableFuture> subJobs = - concepts - .stream() - .collect(Collectors.toMap( - Functions.identity(), - concept -> CompletableFuture.runAsync(() -> { - calculateConceptMatches(concept, messages, worker); - progressReporter.report(1); - }, worker.getJobsExecutorService()) - )); + worker.send(new UpdateElementMatchingStats(worker.getInfo().getId(), matchingStats)); + + progressReporter.report(1); + }, worker.getJobsExecutorService()) + )); log.debug("All jobs submitted. Waiting for completion."); @@ -117,14 +117,7 @@ public void execute() throws Exception { } } while (!all.isDone()); - log.debug("All threads are done."); - - if (messages.isEmpty()) { - log.warn("Results were empty."); - } - else { - worker.send(new UpdateElementMatchingStats(worker.getInfo().getId(), messages)); - } + log.debug("DONE collecting matching stats for {}", worker.getInfo().getDataset()); } @@ -153,8 +146,7 @@ private static void calculateConceptMatches(Concept concept, Map new MatchingStats.Entry()) - .addEvent(table, bucket, event, entity); + results.computeIfAbsent(concept, (ignored) -> new MatchingStats.Entry()).addEvent(table, bucket, event, entity); continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java index 37cc0e5e63..46da29ffe1 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java @@ -61,12 +61,12 @@ public class ManagedQuery extends ManagedExecution implements SingleTableResult, */ private Long lastResultCount; - //TODO this can actually be known ahead and reduced to speedup queries. @JsonIgnore private transient Set involvedWorkers; @JsonIgnore private transient List columnDescriptions; + protected ManagedQuery(@JacksonInject(useInput = OptBoolean.FALSE) MetaStorage storage) { super(storage); } @@ -78,9 +78,7 @@ public ManagedQuery(Query query, User owner, Dataset submittedDataset, MetaStora @Override protected void doInitExecutable() { - query.resolve(new QueryResolveContext(getNamespace(), getConfig(), getStorage(), null)); - } @Override @@ -154,15 +152,15 @@ protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject */ public List generateColumnDescriptions() { Preconditions.checkArgument(isInitialized(), "The execution must have been initialized first"); - List columnDescriptions = new ArrayList<>(); + final List columnDescriptions = new ArrayList<>(); final Locale locale = I18n.LOCALE.get(); - PrintSettings settings = new PrintSettings(true, locale, getNamespace(), getConfig(), null); + final PrintSettings settings = new PrintSettings(true, locale, getNamespace(), getConfig(), null); - UniqueNamer uniqNamer = new UniqueNamer(settings); + final UniqueNamer uniqNamer = new UniqueNamer(settings); - // First add the id columns to the descriptor list. The are the first columns + // First add the id columns to the descriptor list. These are always the first columns for (ResultInfo header : getConfig().getIdColumns().getIdResultInfos()) { columnDescriptions.add(ColumnDescriptor.builder() .label(uniqNamer.getUniqueName(header)) diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/PrintSettings.java b/backend/src/main/java/com/bakdata/conquery/models/query/PrintSettings.java index e9676ff95e..132d3b435e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/PrintSettings.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/PrintSettings.java @@ -16,11 +16,9 @@ import lombok.AllArgsConstructor; import lombok.Getter; import lombok.ToString; -import lombok.With; @Getter @ToString(onlyExplicitlyIncluded = true) -@With @AllArgsConstructor(access = AccessLevel.PRIVATE) public class PrintSettings { diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java index f0cb08f30d..999aad39e5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java @@ -2,21 +2,28 @@ import java.time.LocalDate; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.common.CDateSet; +import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.Import; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.BucketManager; +import com.bakdata.conquery.models.events.stores.root.StringStore; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; +import groovy.lang.Tuple3; +import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NonNull; @@ -43,6 +50,28 @@ public class QueryExecutionContext { @NonNull private Optional> queryDateAggregator = Optional.empty(); + @Getter(AccessLevel.NONE) + private final Map, int[]> multiSelectValuesCache = new ConcurrentHashMap<>(); + + + private static int[] findIds(Column column, Bucket bucket, String[] values) { + final int[] selectedValues = new int[values.length]; + + final StringStore type = (StringStore) bucket.getStore(column); + + for (int index = 0; index < values.length; index++) { + final String select = values[index]; + final int parsed = type.getId(select); + + selectedValues[index] = parsed; + } + + return selectedValues; + } + + public int[] getIdsFor(Column column, Bucket bucket, String[] values) { + return multiSelectValuesCache.computeIfAbsent(new Tuple3<>(column, bucket.getImp(), values), (ignored) -> findIds(column, bucket, values)); + } /** * Only set when in {@link com.bakdata.conquery.models.query.queryplan.SecondaryIdQueryPlan}, to the selected {@link SecondaryIdDescriptionId}. @@ -58,4 +87,5 @@ public List getEntityBucketsForTable(Entity entity, Table table) { boolean isQueryCancelled() { return executor.isCancelled(executionId); } + } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/MultiSelectFilterNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/MultiSelectFilterNode.java index 42e2601419..cf3c671762 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/MultiSelectFilterNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/filter/event/MultiSelectFilterNode.java @@ -2,16 +2,15 @@ import java.util.Arrays; import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; import javax.validation.constraints.NotNull; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Import; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.stores.root.StringStore; +import com.bakdata.conquery.models.query.QueryExecutionContext; +import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.filter.EventFilterNode; import lombok.Getter; import lombok.Setter; @@ -32,46 +31,35 @@ public class MultiSelectFilterNode extends EventFilterNode { private final boolean empty; - /** - * Shared between all executing Threads to maximize utilization. - */ - private ConcurrentMap selectedValuesCache; private int[] selectedValues; + private QueryExecutionContext context; public MultiSelectFilterNode(Column column, String[] filterValue) { super(filterValue); this.column = column; - selectedValuesCache = new ConcurrentHashMap<>(); empty = Arrays.stream(filterValue).anyMatch(Strings::isEmpty); } + @Override + public void init(Entity entity, QueryExecutionContext context) { + super.init(entity, context); + this.context = context; + selectedValues = null; + } + @Override public void setFilterValue(String[] strings) { - selectedValuesCache = new ConcurrentHashMap<>(); selectedValues = null; super.setFilterValue(strings); } @Override public void nextBlock(Bucket bucket) { - selectedValues = selectedValuesCache.computeIfAbsent(bucket.getImp(),imp -> findIds(bucket, filterValue)); + selectedValues = context.getIdsFor(column, bucket, filterValue); } - private int[] findIds(Bucket bucket, String[] values) { - final int[] selectedValues = new int[values.length]; - - final StringStore type = (StringStore) bucket.getStore(getColumn()); - for (int index = 0; index < values.length; index++) { - final String select = values[index]; - final int parsed = type.getId(select); - - selectedValues[index] = parsed; - } - - return selectedValues; - } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java index 6010af221a..ee966f1d7e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java @@ -268,11 +268,11 @@ public FullExecutionStatus buildStatusFull(Subject subject) { setStatusFull(status, subject); status.setQuery(getValuesQuery().getQuery()); - final PrintSettings printSettings = new PrintSettings(false, I18n.LOCALE.get(), getNamespace(), getConfig(), null, previewConfig::resolveSelectLabel); - status.setInfos(transformQueryResultToInfos(getInfoCardExecution(), printSettings)); - status.setTimeStratifiedInfos(toChronoInfos(previewConfig, getSubQueries(), printSettings)); + status.setInfos(transformQueryResultToInfos(getInfoCardExecution(), new PrintSettings(true, I18n.LOCALE.get(), getNamespace(), getConfig(), null, previewConfig::resolveSelectLabel))); + + status.setTimeStratifiedInfos(toChronoInfos(previewConfig, getSubQueries(), new PrintSettings(false, I18n.LOCALE.get(), getNamespace(), getConfig(), null, previewConfig::resolveSelectLabel))); return status; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/ClusterHealthCheck.java b/backend/src/main/java/com/bakdata/conquery/models/worker/ClusterHealthCheck.java new file mode 100644 index 0000000000..ffcf3e5e04 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/ClusterHealthCheck.java @@ -0,0 +1,30 @@ +package com.bakdata.conquery.models.worker; + +import java.util.List; +import java.util.function.Predicate; + +import com.bakdata.conquery.mode.cluster.ClusterState; +import com.codahale.metrics.health.HealthCheck; +import lombok.Data; + +@Data +public class ClusterHealthCheck extends HealthCheck { + + private final ClusterState clusterState; + + @Override + protected Result check() throws Exception { + + final List disconnectedWorkers = + clusterState.getShardNodes().values().stream() + .filter(Predicate.not(ShardNodeInformation::isConnected)) + .map(ShardNodeInformation::toString) + .toList(); + + if (disconnectedWorkers.isEmpty()){ + return Result.healthy("All known shards are connected."); + } + + return Result.unhealthy("The shard(s) %s are no longer connected.".formatted(String.join(",", disconnectedWorkers))); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/SqlContext.java b/backend/src/main/java/com/bakdata/conquery/sql/SqlContext.java index bca5ca6f2e..f58de5e5b7 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/SqlContext.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/SqlContext.java @@ -1,11 +1,12 @@ package com.bakdata.conquery.sql; import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.sql.conversion.Context; import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import lombok.Value; @Value -public class SqlContext { +public class SqlContext implements Context { SqlConnectorConfig config; SqlDialect sqlDialect; } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlExecutionManager.java index 9ab60cacde..29d9d26f5c 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlExecutionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conquery/SqlExecutionManager.java @@ -11,6 +11,7 @@ import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.query.ExecutionManager; +import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.sql.SqlContext; @@ -34,6 +35,8 @@ public SqlExecutionManager(final SqlContext context, MetaStorage metaStorage) { @Override public SqlManagedQuery runQuery(Namespace namespace, QueryDescription query, User user, Dataset submittedDataset, ConqueryConfig config, boolean system) { + // required for properly setting date aggregation action in all nodes of the query graph + query.resolve(new QueryResolveContext(namespace, config, metaStorage, null)); SqlManagedQuery execution = createExecution(query, user, submittedDataset, system); execution.initExecutable(namespace, config); execution.start(); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/Context.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Context.java new file mode 100644 index 0000000000..8104e876a3 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Context.java @@ -0,0 +1,7 @@ +package com.bakdata.conquery.sql.conversion; + +/** + * Marker for a conversion context. + */ +public interface Context { +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/Conversions.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Conversions.java new file mode 100644 index 0000000000..d8c64532bb --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Conversions.java @@ -0,0 +1,31 @@ +package com.bakdata.conquery.sql.conversion; + +import java.util.List; + +import com.google.common.collect.MoreCollectors; +import lombok.Getter; + +/** + * Converts an input to a result with an applicable converter. + * + * @param type that can be converted + * @param type of the result + * @param context of the convertible + * @see Converter + */ +public abstract class Conversions { + + @Getter + private final List> converters; + + protected Conversions(List> converters) { + this.converters = converters; + } + + public R convert(C node, X context) { + return converters.stream() + .flatMap(converter -> converter.tryConvert(node, context).stream()) + .collect(MoreCollectors.onlyElement()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/Converter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Converter.java index 2557139ab9..5b93a18228 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/Converter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/Converter.java @@ -1,26 +1,37 @@ package com.bakdata.conquery.sql.conversion; import java.util.Optional; +import java.util.Set; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; /** * A converter converts an input into a result object if the input matches the conversion class. * * @param type that can be converted * @param type of the result + * @param context of the convertible */ -public interface Converter { +public interface Converter { - default Optional tryConvert(I input, ConversionContext context) { + default Optional tryConvert(I input, X context) { if (getConversionClass().isInstance(input)) { return Optional.ofNullable(convert(getConversionClass().cast(input), context)); } return Optional.empty(); } - Class getConversionClass(); + /** + * All steps this {@link Converter} requires. + * + * @return PREPROCESSING, AGGREGATION_SELECT and FINAL {@link ConceptCteStep} as defaults. Override if more steps are required. + */ + default Set requiredSteps() { + return ConceptCteStep.MANDATORY_STEPS; + } + + Class getConversionClass(); - R convert(final C convert, final ConversionContext context); + R convert(final C convert, final X context); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/ConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/ConverterService.java deleted file mode 100644 index cddd02e0d1..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/ConverterService.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.bakdata.conquery.sql.conversion; - -import java.util.List; - -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.google.common.collect.MoreCollectors; - -/** - * Converts an input to a result with an applicable converter. - * - * @param type that can be converted - * @param type of the result - * @see Converter - */ -public abstract class ConverterService { - - private final List> converters; - - protected ConverterService(List> converters) { - this.converters = converters; - } - - public R convert(C selectNode, ConversionContext context) { - return converters.stream() - .flatMap(converter -> converter.tryConvert(selectNode, context).stream()) - .collect(MoreCollectors.onlyElement()); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConversions.java similarity index 74% rename from backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverterService.java rename to backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConversions.java index f174a48265..d2aacbfd0e 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverterService.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConversions.java @@ -3,18 +3,18 @@ import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.models.config.SqlConnectorConfig; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; /** * Entry point for converting {@link QueryDescription} to an SQL query. */ -public class NodeConverterService extends ConverterService { +public class NodeConversions extends Conversions { private final SqlDialect dialect; private final SqlConnectorConfig config; - public NodeConverterService(SqlDialect dialect, SqlConnectorConfig config) { + public NodeConversions(SqlDialect dialect, SqlConnectorConfig config) { super(dialect.getNodeConverters()); this.dialect = dialect; this.config = config; @@ -23,7 +23,7 @@ public NodeConverterService(SqlDialect dialect, SqlConnectorConfig config) { public ConversionContext convert(QueryDescription queryDescription) { ConversionContext initialCtx = ConversionContext.builder() .config(config) - .nodeConverterService(this) + .nodeConversions(this) .sqlDialect(this.dialect) .build(); return convert(queryDescription, initialCtx); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverter.java index 7eb6cb0c0c..8b49b696fb 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/NodeConverter.java @@ -1,7 +1,7 @@ package com.bakdata.conquery.sql.conversion; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; /** * Interface for converters that implement the translation of a ConQuery query to an SQL query. @@ -12,6 +12,6 @@ * * @param type of the node to convert */ -public interface NodeConverter extends Converter { +public interface NodeConverter extends Converter { } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/SqlConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/SqlConverter.java index 8ee38b2f56..00daf28cfe 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/SqlConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/SqlConverter.java @@ -3,20 +3,20 @@ import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.models.config.SqlConnectorConfig; import com.bakdata.conquery.sql.SqlQuery; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import org.jooq.conf.ParamType; public class SqlConverter { - private final NodeConverterService nodeConverterService; + private final NodeConversions nodeConversions; public SqlConverter(SqlDialect dialect, SqlConnectorConfig config) { - this.nodeConverterService = new NodeConverterService(dialect, config); + this.nodeConversions = new NodeConversions(dialect, config); } public SqlQuery convert(QueryDescription queryDescription) { - ConversionContext converted = nodeConverterService.convert(queryDescription); + ConversionContext converted = nodeConversions.convert(queryDescription); return new SqlQuery(converted.getFinalQuery().getSQL(ParamType.INLINED)); } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java deleted file mode 100644 index 9e398edad3..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/ConceptSelects.java +++ /dev/null @@ -1,81 +0,0 @@ -package com.bakdata.conquery.sql.conversion.context.selects; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; - -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.sql.models.ColumnDateRange; -import lombok.Builder; -import lombok.Value; -import lombok.With; -import org.jooq.Field; - -/** - * {@link ConceptSelects} represent all select fields of a {@link CQConcept}. - */ -@Value -@With -@Builder(toBuilder = true) -public class ConceptSelects implements Selects { - - Field primaryColumn; - Optional dateRestrictionRange; - Optional validityDate; - List> eventSelect; - List> eventFilter; - List> groupSelect; - List> groupFilter; - - @Override - public Selects withValidityDate(ColumnDateRange validityDate) { - return this.toBuilder() - .validityDate(Optional.of(validityDate)) - .build(); - } - - @Override - public ConceptSelects qualifiedWith(String qualifier) { - return builder() - .primaryColumn(this.mapFieldToQualifier(qualifier, this.primaryColumn)) - .dateRestrictionRange(this.dateRestrictionRange.map(dateRestriction -> dateRestriction.qualify(qualifier))) - .validityDate(this.validityDate.map(validityDate -> validityDate.qualify(qualifier))) - .eventSelect(this.mapFieldStreamToQualifier(qualifier, this.eventSelect.stream()).toList()) - .eventFilter(this.mapFieldStreamToQualifier(qualifier, this.eventFilter.stream()).toList()) - .groupSelect(this.mapFieldStreamToQualifier(qualifier, this.groupSelect.stream()).toList()) - .groupFilter(this.mapFieldStreamToQualifier(qualifier, this.groupFilter.stream()).toList()) - .build(); - } - - @Override - public List> all() { - return Stream.concat( - this.primaryColumnAndValidityDate(), - this.explicitSelects().stream() - ).toList(); - } - - private Stream> primaryColumnAndValidityDate() { - return Stream.concat( - Stream.of(this.primaryColumn), - this.validityDate.map(ColumnDateRange::toFields).stream().flatMap(Collection::stream) - ); - } - - @Override - public List> explicitSelects() { - - List> explicitSelects = new ArrayList<>(); - - dateRestrictionRange.ifPresent(columnDateRange -> explicitSelects.addAll(columnDateRange.toFields())); - explicitSelects.addAll(eventSelect); - explicitSelects.addAll(eventFilter); - explicitSelects.addAll(groupSelect); - explicitSelects.addAll(groupFilter); - - return explicitSelects; - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java deleted file mode 100644 index 461b44a631..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/MergedSelects.java +++ /dev/null @@ -1,106 +0,0 @@ -package com.bakdata.conquery.sql.conversion.context.selects; - -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; - -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.models.ColumnDateRange; -import lombok.AccessLevel; -import lombok.AllArgsConstructor; -import lombok.Value; -import org.jooq.Field; -import org.jooq.impl.DSL; - -/** - * {@link MergedSelects} represent the combination of multiple {@link Selects}. - * Default selects fields of multiple {@link Selects} will be merged and special select fields like the primary column - * or validity dates will be unified or aggregated due to defined policies. - */ -@Value -@AllArgsConstructor(access = AccessLevel.PRIVATE) -public class MergedSelects implements Selects { - - public static final String PRIMARY_COLUMN_NAME = "primary_column"; - Field primaryColumn; - - /** - * An aggregated validity date of all validity dates of each {@link QueryStep} passed to the {@link MergedSelects} constructor. - */ - Optional validityDate; - - /** - * A merged list of all select fields, except the primary column and validity date, - * of the {@link QueryStep}'s passed to the {@link MergedSelects} constructor. - * Each field name is qualified with its associated CTE name. - */ - List> mergedSelects; - - public MergedSelects(List querySteps) { - this.primaryColumn = this.coalescePrimaryColumns(querySteps); - this.validityDate = this.extractValidityDates(querySteps); - this.mergedSelects = this.mergeSelects(querySteps); - } - - @Override - public Selects withValidityDate(ColumnDateRange validityDate) { - return new MergedSelects( - this.primaryColumn, - Optional.of(validityDate), - this.mergedSelects - ); - } - - @Override - public MergedSelects qualifiedWith(String cteName) { - return new MergedSelects( - this.mapFieldToQualifier(cteName, this.primaryColumn), - this.validityDate.map(columnDateRange -> columnDateRange.qualify(cteName)), - this.mapFieldStreamToQualifier(cteName, this.mergedSelects.stream()).toList() - ); - } - - @Override - public List> all() { - return Stream.concat( - this.primaryColumnAndValidityDate(), - this.mergedSelects.stream() - ).toList(); - } - - @Override - public List> explicitSelects() { - return this.mergedSelects; - } - - private Field coalescePrimaryColumns(List querySteps) { - List> primaryColumns = querySteps.stream() - .map(queryStep -> queryStep.getQualifiedSelects().getPrimaryColumn()) - .toList(); - return DSL.coalesce((Object) primaryColumns.get(0), primaryColumns.subList(1, primaryColumns.size()).toArray()) - .as(PRIMARY_COLUMN_NAME); - } - - private Optional extractValidityDates(List querySteps) { - // TODO: date aggregation... - return querySteps.stream() - .filter(queryStep -> queryStep.getSelects().getValidityDate().isPresent()) - .map(queryStep -> queryStep.getQualifiedSelects().getValidityDate().get()) - .findFirst(); - } - - private List> mergeSelects(List queriesToJoin) { - return queriesToJoin.stream() - .flatMap(queryStep -> queryStep.getQualifiedSelects().explicitSelects().stream()) - .toList(); - } - - private Stream> primaryColumnAndValidityDate() { - return Stream.concat( - Stream.of(this.primaryColumn), - this.validityDate.map(ColumnDateRange::toFields).stream().flatMap(Collection::stream) - ); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java deleted file mode 100644 index 30d15b17b7..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/selects/Selects.java +++ /dev/null @@ -1,63 +0,0 @@ -package com.bakdata.conquery.sql.conversion.context.selects; - -import java.util.List; -import java.util.Optional; -import java.util.stream.Stream; - -import com.bakdata.conquery.sql.models.ColumnDateRange; -import org.jooq.Field; -import org.jooq.impl.DSL; - -public interface Selects { - - Field getPrimaryColumn(); - - Optional getValidityDate(); - - Selects withValidityDate(ColumnDateRange validityDate); - - /** - * Returns the selected columns as fully qualified reference. - * - * @param qualifier the table name that creates these selects - * @return selects as fully qualified reference - * @see Selects#mapFieldToQualifier(String, Field) - */ - Selects qualifiedWith(String qualifier); - - /** - * @return A list of all select fields including the primary column and validity date. - */ - List> all(); - - /** - * List of columns that the user explicitly referenced, either via a filter or a select. - * - * @return A list of all select fields WITHOUT implicitly selected columns like the primary column and validity date. - */ - List> explicitSelects(); - - default Stream> mapFieldStreamToQualifier(String qualifier, Stream> objectField) { - return objectField.map(column -> this.mapFieldToQualifier(qualifier, column)); - } - - /** - * Converts a select to its fully qualified reference. - * - *

- *

Example:

- *
{@code
-	 * with a as (select c1 - c2 as c
-	 * from t1)
-	 * select t1.c
-	 * from a
-	 * }
- *

- * This function maps the select {@code c1 - c2 as c} to {@code t1.c}. - * - */ - default Field mapFieldToQualifier(String qualifier, Field field) { - return DSL.field(DSL.name(qualifier, field.getName())); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/LogicalOperation.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/LogicalOperation.java deleted file mode 100644 index 9f0351c854..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/LogicalOperation.java +++ /dev/null @@ -1,6 +0,0 @@ -package com.bakdata.conquery.sql.conversion.context.step; - -public enum LogicalOperation { - AND, - OR -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java deleted file mode 100644 index 83ed4298ff..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStep.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.bakdata.conquery.sql.conversion.context.step; - -import java.util.Collections; -import java.util.List; - -import com.bakdata.conquery.sql.conversion.context.selects.Selects; -import lombok.Builder; -import lombok.Value; -import org.jooq.Condition; -import org.jooq.Record; -import org.jooq.TableLike; -import org.jooq.impl.DSL; - -/** - * Intermediate representation of an SQL query. - */ -@Value -@Builder -public class QueryStep { - - String cteName; - Selects selects; - TableLike fromTable; - @Builder.Default - List conditions = Collections.emptyList(); - /** - * The CTEs referenced by this QueryStep - */ - List predecessors; - - public static TableLike toTableLike(String fromTableName) { - return DSL.table(DSL.name(fromTableName)); - } - - /** - * @return All selects re-mapped to a qualifier, which is the cteName of this QueryStep. - */ - public Selects getQualifiedSelects() { - return this.selects.qualifiedWith(this.cteName); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStepTransformer.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStepTransformer.java deleted file mode 100644 index e41c457235..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/QueryStepTransformer.java +++ /dev/null @@ -1,59 +0,0 @@ -package com.bakdata.conquery.sql.conversion.context.step; - -import java.util.List; -import java.util.stream.Stream; - -import org.jooq.CommonTableExpression; -import org.jooq.DSLContext; -import org.jooq.Record; -import org.jooq.Select; -import org.jooq.impl.DSL; - -/** - * Transformer for translating the intermediate representation of {@link QueryStep} into the final SQL query. - */ -public class QueryStepTransformer { - - private final DSLContext dslContext; - - public QueryStepTransformer(DSLContext dslContext) { - this.dslContext = dslContext; - } - - /** - * Converts a given {@link QueryStep} into an executable SELECT statement. - */ - public Select toSelectQuery(QueryStep queryStep) { - return this.dslContext.with(this.constructPredecessorCteList(queryStep)) - .select(queryStep.getSelects().all()) - .from(queryStep.getFromTable()) - .where(queryStep.getConditions()); - } - - private List> constructPredecessorCteList(QueryStep queryStep) { - return queryStep.getPredecessors().stream() - .flatMap(predecessor -> this.toCteList(predecessor).stream()) - .toList(); - } - - private List> toCteList(QueryStep queryStep) { - return Stream.concat( - this.predecessorCtes(queryStep), - Stream.of(this.toCte(queryStep)) - ).toList(); - } - - private Stream> predecessorCtes(QueryStep queryStep) { - return queryStep.getPredecessors().stream() - .flatMap(predecessor -> this.toCteList(predecessor).stream()); - } - - private CommonTableExpression toCte(QueryStep queryStep) { - return DSL.name(queryStep.getCteName()).as( - this.dslContext.select(queryStep.getSelects().all()) - .from(queryStep.getFromTable()) - .where(queryStep.getConditions()) - ); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/StepJoiner.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/StepJoiner.java deleted file mode 100644 index 84b103f955..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/step/StepJoiner.java +++ /dev/null @@ -1,89 +0,0 @@ -package com.bakdata.conquery.sql.conversion.context.step; - -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - -import com.bakdata.conquery.apiv1.query.CQElement; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.context.selects.MergedSelects; -import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; -import org.jooq.Field; -import org.jooq.Record; -import org.jooq.Table; -import org.jooq.TableLike; -import org.jooq.TableOnConditionStep; -import org.jooq.impl.DSL; - -public class StepJoiner { - - public static ConversionContext joinChildren(Iterable children, ConversionContext context, LogicalOperation logicalOperation) { - - ConversionContext childrenContext = context; - for (CQElement childNode : children) { - childrenContext = context.getNodeConverterService().convert(childNode, childrenContext); - } - - List queriesToJoin = childrenContext.getQuerySteps(); - QueryStep andQueryStep = QueryStep.builder() - .cteName(constructJoinedQueryStepLabel(queriesToJoin, logicalOperation)) - .selects(new MergedSelects(queriesToJoin)) - .fromTable(constructJoinedTable(queriesToJoin, logicalOperation, context)) - .conditions(Collections.emptyList()) - .predecessors(queriesToJoin) - .build(); - - return context.withQuerySteps(List.of(andQueryStep)); - } - - private static String constructJoinedQueryStepLabel(List queriesToJoin, LogicalOperation logicalOperation) { - - String labelConnector = switch (logicalOperation) { - case AND -> "_AND_"; - case OR -> "_OR_"; - }; - - return queriesToJoin.stream() - .map(QueryStep::getCteName) - .collect(Collectors.joining(labelConnector)); - } - - private static TableLike constructJoinedTable(List queriesToJoin, LogicalOperation logicalOperation, ConversionContext context) { - - Table joinedQuery = getIntitialJoinTable(queriesToJoin); - - SqlFunctionProvider functionProvider = context.getSqlDialect().getFunction(); - JoinType joinType = switch (logicalOperation) { - case AND -> functionProvider::innerJoin; - case OR -> functionProvider::fullOuterJoin; - }; - - for (int i = 0; i < queriesToJoin.size() - 1; i++) { - - QueryStep leftPartQS = queriesToJoin.get(i); - QueryStep rightPartQS = queriesToJoin.get(i + 1); - - Field leftPartPrimaryColumn = leftPartQS.getQualifiedSelects().getPrimaryColumn(); - Field rightPartPrimaryColumn = rightPartQS.getQualifiedSelects().getPrimaryColumn(); - - joinedQuery = joinType.join(joinedQuery, rightPartQS, leftPartPrimaryColumn, rightPartPrimaryColumn); - } - - return joinedQuery; - } - - private static Table getIntitialJoinTable(List queriesToJoin) { - return DSL.table(DSL.name(queriesToJoin.get(0).getCteName())); - } - - @FunctionalInterface - private interface JoinType { - TableOnConditionStep join( - Table leftPartQueryBase, - QueryStep rightPartQS, - Field leftPartPrimaryColumn, - Field rightPartPrimaryColumn - ); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQAndConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQAndConverter.java index 31404da21d..61d236738d 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQAndConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQAndConverter.java @@ -2,9 +2,8 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.context.step.LogicalOperation; -import com.bakdata.conquery.sql.conversion.context.step.StepJoiner; +import com.bakdata.conquery.sql.conversion.model.LogicalOperation; +import com.bakdata.conquery.sql.conversion.model.QueryStepJoiner; public class CQAndConverter implements NodeConverter { @@ -16,9 +15,14 @@ public Class getConversionClass() { @Override public ConversionContext convert(CQAnd andNode, ConversionContext context) { if (andNode.getChildren().size() == 1) { - return context.getNodeConverterService().convert(andNode.getChildren().get(0), context); + return context.getNodeConversions().convert(andNode.getChildren().get(0), context); } - return StepJoiner.joinChildren(andNode.getChildren(), context, LogicalOperation.AND); + return QueryStepJoiner.joinChildren( + andNode.getChildren(), + context, + LogicalOperation.AND, + andNode.getDateAction() + ); } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQDateRestrictionConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQDateRestrictionConverter.java index 37e3c1d56f..5ce01a3634 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQDateRestrictionConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQDateRestrictionConverter.java @@ -3,18 +3,18 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQDateRestriction; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; public class CQDateRestrictionConverter implements NodeConverter { @Override - public ConversionContext convert(CQDateRestriction node, ConversionContext context) { - ConversionContext childContext = context.withDateRestrictionRange(CDateRange.of(node.getDateRange())); - return context.getNodeConverterService().convert(node.getChild(), childContext).withDateRestrictionRange(null); + public ConversionContext convert(CQDateRestriction dateRestrictionNode, ConversionContext context) { + ConversionContext childContext = context.withDateRestrictionRange(CDateRange.of(dateRestrictionNode.getDateRange())); + return context.getNodeConversions().convert(dateRestrictionNode.getChild(), childContext).withDateRestrictionRange(null); } @Override public Class getConversionClass() { return CQDateRestriction.class; } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQNegationConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQNegationConverter.java index ce1db05bd7..ae97e0e726 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQNegationConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQNegationConverter.java @@ -1,29 +1,42 @@ package com.bakdata.conquery.sql.conversion.cqelement; -import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.apiv1.query.concept.specific.CQNegation; +import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.google.common.base.Preconditions; public class CQNegationConverter implements NodeConverter { - @Override - public Class getConversionClass() { - return CQNegation.class; - } - - @Override - public ConversionContext convert(CQNegation negationNode, ConversionContext context) { - return this.convertChildWithNegationActive(negationNode.getChild(), context); - } - - private ConversionContext convertChildWithNegationActive(CQElement child, ConversionContext context) { - // TODO: handle negation properly after GroupSelect/GroupFilter has been implemented - // - anti-join vs. negating conditions - // - handle double negation - return context.getNodeConverterService() - .convert(child, context.withNegation(true)) - .withNegation(false); - } + @Override + public Class getConversionClass() { + return CQNegation.class; + } + + + @Override + public ConversionContext convert(CQNegation negationNode, ConversionContext context) { + + ConversionContext converted = context.getNodeConversions() + .convert(negationNode.getChild(), context.withNegation(true)) + .withNegation(false); + + Preconditions.checkArgument( + converted.getQuerySteps().size() == 1, + "As we convert only 1 child CQElement, their should be only a single query step." + ); + QueryStep queryStep = converted.getQuerySteps().get(0); + + if (negationNode.getDateAction() != DateAggregationAction.NEGATE) { + QueryStep withBlockedValidityDate = queryStep.toBuilder() + .selects(queryStep.getSelects().blockValidityDate()) + .build(); + return context.withQueryStep(withBlockedValidityDate); + } + QueryStep withInvertedValidityDate = converted.getSqlDialect() + .getDateAggregator() + .invertAggregatedIntervals(queryStep); + return context.withQueryStep(withInvertedValidityDate); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQOrConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQOrConverter.java index 7df18dedaf..66f14a0a16 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQOrConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/CQOrConverter.java @@ -2,9 +2,8 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQOr; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.context.step.LogicalOperation; -import com.bakdata.conquery.sql.conversion.context.step.StepJoiner; +import com.bakdata.conquery.sql.conversion.model.LogicalOperation; +import com.bakdata.conquery.sql.conversion.model.QueryStepJoiner; public class CQOrConverter implements NodeConverter { @@ -16,9 +15,15 @@ public Class getConversionClass() { @Override public ConversionContext convert(CQOr orNode, ConversionContext context) { if (orNode.getChildren().size() == 1) { - return context.getNodeConverterService().convert(orNode.getChildren().get(0), context); + return context.getNodeConversions().convert(orNode.getChildren().get(0), context); } - return StepJoiner.joinChildren(orNode.getChildren(), context, LogicalOperation.OR); + return QueryStepJoiner.joinChildren( + orNode.getChildren(), + context, + LogicalOperation.OR, + orNode.getDateAction() + ); } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConversionContext.java similarity index 76% rename from backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java rename to backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConversionContext.java index cdd73d7f8f..d5bd32f8f7 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/context/ConversionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/ConversionContext.java @@ -1,12 +1,13 @@ -package com.bakdata.conquery.sql.conversion.context; +package com.bakdata.conquery.sql.conversion.cqelement; import java.util.List; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.config.SqlConnectorConfig; -import com.bakdata.conquery.sql.conversion.NodeConverterService; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; +import com.bakdata.conquery.sql.conversion.Context; +import com.bakdata.conquery.sql.conversion.NodeConversions; import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; +import com.bakdata.conquery.sql.conversion.model.QueryStep; import lombok.Builder; import lombok.Singular; import lombok.Value; @@ -17,10 +18,10 @@ @Value @With @Builder(toBuilder = true) -public class ConversionContext { +public class ConversionContext implements Context { SqlConnectorConfig config; - NodeConverterService nodeConverterService; + NodeConversions nodeConversions; SqlDialect sqlDialect; @Singular List querySteps; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/AnsiSqlDateAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/AnsiSqlDateAggregator.java new file mode 100644 index 0000000000..1f2db0d6eb --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/AnsiSqlDateAggregator.java @@ -0,0 +1,96 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.List; + +import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; +import com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.IntervalPackingContext; +import com.bakdata.conquery.sql.conversion.dialect.IntervalPacker; +import com.bakdata.conquery.sql.conversion.dialect.SqlDateAggregator; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; + +public class AnsiSqlDateAggregator implements SqlDateAggregator { + + private final SqlFunctionProvider functionProvider; + private final IntervalPacker intervalPacker; + + public AnsiSqlDateAggregator(SqlFunctionProvider functionProvider, IntervalPacker intervalPacker) { + this.functionProvider = functionProvider; + this.intervalPacker = intervalPacker; + } + + @Override + public QueryStep apply( + QueryStep joinedStep, + List carryThroughSelects, + DateAggregationDates dateAggregationDates, + DateAggregationAction dateAggregationAction + ) { + SqlAggregationAction aggregationAction = switch (dateAggregationAction) { + case MERGE -> new MergeAggregateAction(joinedStep); + case INTERSECT -> new IntersectAggregationAction(joinedStep); + default -> throw new IllegalStateException("Unexpected date aggregation action: %s".formatted(dateAggregationAction)); + }; + + DateAggregationContext context = DateAggregationContext.builder() + .sqlAggregationAction(aggregationAction) + .carryThroughSelects(carryThroughSelects) + .dateAggregationDates(dateAggregationDates) + .dateAggregationTables(aggregationAction.tableNames()) + .primaryColumn(joinedStep.getQualifiedSelects().getPrimaryColumn()) + .functionProvider(this.functionProvider) + .intervalPacker(this.intervalPacker) + .build(); + + QueryStep finalDateAggregationStep = convertSteps(joinedStep, aggregationAction.dateAggregationCtes(), context); + if (aggregationAction.requiresIntervalPackingAfterwards()) { + return withIntervalPackingApplied(joinedStep, carryThroughSelects, finalDateAggregationStep); + } + return finalDateAggregationStep; + } + + @Override + public QueryStep invertAggregatedIntervals(QueryStep baseStep) { + + DateAggregationDates dateAggregationDates = DateAggregationDates.forSingleStep(baseStep); + if (dateAggregationDates.dateAggregationImpossible()) { + return baseStep; + } + + Selects baseStepQualifiedSelects = baseStep.getQualifiedSelects(); + DateAggregationTables dateAggregationTables = InvertStep.createTableNames(baseStep); + + DateAggregationContext context = DateAggregationContext.builder() + .sqlAggregationAction(null) // when inverting, an aggregation has already been applied + .carryThroughSelects(baseStepQualifiedSelects.getSqlSelects()) + .dateAggregationDates(dateAggregationDates) + .dateAggregationTables(dateAggregationTables) + .primaryColumn(baseStepQualifiedSelects.getPrimaryColumn()) + .functionProvider(this.functionProvider) + .intervalPacker(this.intervalPacker) + .build(); + + return convertSteps(baseStep, InvertStep.requiredSteps(), context); + } + + private QueryStep convertSteps(QueryStep baseStep, List dateAggregationCTEs, DateAggregationContext context) { + QueryStep finalDateAggregationStep = baseStep; + for (DateAggregationCte step : dateAggregationCTEs) { + finalDateAggregationStep = step.convert(context, finalDateAggregationStep); + context = context.withStep(step.getCteStep(), finalDateAggregationStep); + } + return finalDateAggregationStep; + } + + private QueryStep withIntervalPackingApplied(QueryStep joinedStep, List carryThroughSelects, QueryStep finalDateAggregationStep) { + IntervalPackingContext intervalPackingContext = new IntervalPackingContext( + joinedStep.getCteName(), + finalDateAggregationStep, + carryThroughSelects + ); + return this.intervalPacker.createIntervalPackingSteps(intervalPackingContext); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationContext.java new file mode 100644 index 0000000000..5645d31605 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationContext.java @@ -0,0 +1,63 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.bakdata.conquery.sql.conversion.Context; +import com.bakdata.conquery.sql.conversion.dialect.IntervalPacker; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.QualifyingUtil; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.Builder; +import lombok.Value; +import org.jooq.Field; + +@Value +@Builder(toBuilder = true) +class DateAggregationContext implements Context { + + Field primaryColumn; + List carryThroughSelects; + DateAggregationTables dateAggregationTables; + DateAggregationDates dateAggregationDates; + @Builder.Default + Map> intervalMergeSteps = new HashMap<>(); + SqlAggregationAction sqlAggregationAction; + SqlFunctionProvider functionProvider; + IntervalPacker intervalPacker; + + public DateAggregationContext withStep(DateAggregationStep dateAggregationStep, QueryStep queryStep) { + this.intervalMergeSteps.computeIfAbsent(dateAggregationStep, k -> new ArrayList<>()) + .add(queryStep); + return this; + } + + public QueryStep getStep(DateAggregationStep dateAggregationStep) { + List querySteps = intervalMergeSteps.get(dateAggregationStep); + if (querySteps != null && !querySteps.isEmpty()) { + return querySteps.get(0); + } + return null; + } + + public List getSteps(DateAggregationStep dateAggregationStep) { + if (dateAggregationStep != MergeStep.NODE_NO_OVERLAP) { + throw new UnsupportedOperationException( + "Only MergeStep.NODE_NO_OVERLAP has multiple steps. Use getStep() for all other DateAggregationSteps." + ); + } + return this.intervalMergeSteps.get(dateAggregationStep); + } + + public DateAggregationContext qualify(String qualifier) { + return this.toBuilder() + .primaryColumn(QualifyingUtil.qualify(this.primaryColumn, qualifier)) + .carryThroughSelects(QualifyingUtil.qualify(this.carryThroughSelects, qualifier)) + .dateAggregationDates(this.dateAggregationDates.qualify(qualifier)) + .build(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationCte.java new file mode 100644 index 0000000000..a74a907d5e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationCte.java @@ -0,0 +1,40 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.List; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; + +/** + * Base class for a CTE that is part of the date aggregation process. + */ +abstract class DateAggregationCte { + + public static final String RANGE_START = "RANGE_START"; + public static final String RANGE_END = "RANGE_END"; + + public QueryStep convert(DateAggregationContext context, QueryStep previous) { + + DateAggregationStep cteStep = getCteStep(); + DateAggregationTables dateAggregationTables = context.getDateAggregationTables(); + + // this way all selects are already qualified, and we don't need to care for that in the respective steps + context = context.qualify(dateAggregationTables.getFromTableOf(cteStep)); + + QueryStep.QueryStepBuilder builder = this.convertStep(context); + + if (cteStep != MergeStep.NODE_NO_OVERLAP) { + builder = builder.cteName(dateAggregationTables.cteName(cteStep)) + .predecessors(List.of(previous)); + } + if (cteStep != InvertStep.INVERT) { + builder = builder.fromTable(QueryStep.toTableLike(dateAggregationTables.getFromTableOf(cteStep))); + } + + return builder.build(); + } + + protected abstract QueryStep.QueryStepBuilder convertStep(DateAggregationContext context); + + public abstract DateAggregationStep getCteStep(); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationCteConstructor.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationCteConstructor.java new file mode 100644 index 0000000000..c8b92e38fa --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationCteConstructor.java @@ -0,0 +1,6 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +@FunctionalInterface +interface DateAggregationCteConstructor { + DateAggregationCte create(DateAggregationStep cteStep); +}; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationDates.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationDates.java new file mode 100644 index 0000000000..6fba04fe73 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationDates.java @@ -0,0 +1,86 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.sql.Date; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.select.FieldWrapper; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.jooq.Field; + +/** + * {@link DateAggregationDates} keep track of all validity dates of list of {@link QueryStep}s that need to be aggregated. + */ +@Getter +@AllArgsConstructor(access = AccessLevel.PRIVATE) +public class DateAggregationDates { + + private static final String RANGE_START = "RANGE_START"; + private static final String RANGE_END = "RANGE_END"; + private static final AtomicInteger validityDateCounter = new AtomicInteger(); + private final List validityDates; + + public static DateAggregationDates forSingleStep(QueryStep queryStep) { + List validityDates = queryStep.getSelects() + .getValidityDate() + .map(List::of) + .orElse(Collections.emptyList()); + return new DateAggregationDates(validityDates); + } + + public static DateAggregationDates forSteps(List querySteps) { + List validityDates = querySteps.stream() + .filter(queryStep -> queryStep.getSelects().getValidityDate().isPresent()) + .map(DateAggregationDates::numerateValidityDate) + .toList(); + return new DateAggregationDates(validityDates); + } + + public boolean dateAggregationImpossible() { + return this.validityDates.isEmpty(); + } + + public List> allStarts() { + return this.validityDates.stream().map(ColumnDateRange::getStart).toList(); + } + + public List> allEnds() { + return this.validityDates.stream().map(ColumnDateRange::getEnd).toList(); + } + + public List allStartsAndEnds() { + return this.validityDates.stream() + .flatMap(validityDate -> validityDate.toFields().stream()) + .map(FieldWrapper::new) + .collect(Collectors.toList()); + } + + public DateAggregationDates qualify(String qualifier) { + List qualified = this.validityDates.stream() + .map(validityDate -> validityDate.qualify(qualifier)) + .toList(); + // validity dates will already be numerated, no we don't need no apply a counter again + return new DateAggregationDates(qualified); + } + + private static ColumnDateRange numerateValidityDate(QueryStep queryStep) { + ColumnDateRange validityDate = queryStep.getQualifiedSelects().getValidityDate().get(); + + if (validityDate.isSingleColumnRange()) { + return validityDate; + } + + Field rangeStart = validityDate.getStart().as("%s_%s".formatted(RANGE_START, validityDateCounter.get())); + Field rangeEnd = validityDate.getEnd().as("%s_%s".formatted(RANGE_END, validityDateCounter.getAndIncrement())); + + return ColumnDateRange.of(rangeStart, rangeEnd); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationStep.java new file mode 100644 index 0000000000..38620bd8be --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationStep.java @@ -0,0 +1,30 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.Arrays; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.model.CteStep; +import com.bakdata.conquery.sql.conversion.model.QueryStep; + +interface DateAggregationStep extends CteStep { + + String suffix(); + + DateAggregationStep predecessor(); + + static Map createCteNameMap(QueryStep joinedTable, DateAggregationStep[] dateAggregationSteps) { + return Arrays.stream(dateAggregationSteps) + .collect(Collectors.toMap( + Function.identity(), + dateAggregationStep -> dateAggregationStep.cteName(joinedTable.getCteName()) + )); + } + + @Override + default String cteName(String nodeLabel) { + return "%s%s".formatted(nodeLabel, suffix()); + }; + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationTables.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationTables.java new file mode 100644 index 0000000000..5d7a8b809c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/DateAggregationTables.java @@ -0,0 +1,30 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.Map; + +import lombok.Value; + +@Value +class DateAggregationTables { + + String joinedTable; + Map cteNameMap; + + public DateAggregationTables(String joinedTable, Map cteNameMap) { + this.joinedTable = joinedTable; + this.cteNameMap = cteNameMap; + } + + public String cteName(DateAggregationStep dateAggregationStep) { + return this.cteNameMap.get(dateAggregationStep); + } + + public String getFromTableOf(DateAggregationStep dateAggregationStep) { + DateAggregationStep predecessor = dateAggregationStep.predecessor(); + if (predecessor == null) { + return joinedTable; + } + return this.cteNameMap.get(predecessor); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntermediateTableCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntermediateTableCte.java new file mode 100644 index 0000000000..9a031392ab --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntermediateTableCte.java @@ -0,0 +1,55 @@ + +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.sql.Date; +import java.util.List; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.Getter; +import org.jooq.Condition; +import org.jooq.Field; + +@Getter +class IntermediateTableCte extends DateAggregationCte { + + private final DateAggregationStep cteStep; + + public IntermediateTableCte(DateAggregationStep cteStep) { + this.cteStep = cteStep; + } + + @Override + protected QueryStep.QueryStepBuilder convertStep(DateAggregationContext context) { + + List selects = context.getSqlAggregationAction().getIntermediateTableSelects( + context.getDateAggregationDates(), + context.getCarryThroughSelects() + ); + Selects intermediateTableSelects = new Selects( + context.getPrimaryColumn(), + selects + ); + + DateAggregationDates dateAggregationDates = context.getDateAggregationDates(); + List> allStarts = dateAggregationDates.allStarts(); + List> allEnds = dateAggregationDates.allEnds(); + + SqlFunctionProvider functionProvider = context.getFunctionProvider(); + Condition startBeforeEnd = functionProvider.greatest(allStarts).lessThan(functionProvider.least(allEnds)); + + Condition startIsNull = allStarts.stream() + .map(Field::isNull) + .reduce(Condition::or) + .orElseThrow(); + + Condition intermediateTableCondition = startIsNull.orNot(startBeforeEnd); + + return QueryStep.builder() + .selects(intermediateTableSelects) + .conditions(List.of(intermediateTableCondition)); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntersectAggregationAction.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntersectAggregationAction.java new file mode 100644 index 0000000000..42bf6bfd3e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntersectAggregationAction.java @@ -0,0 +1,76 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.sql.Date; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.select.FieldWrapper; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +class IntersectAggregationAction implements SqlAggregationAction { + + private final QueryStep joinedStep; + + @Override + public DateAggregationTables tableNames() { + return IntersectStep.tableNames(this.joinedStep); + } + + @Override + public List dateAggregationCtes() { + return IntersectStep.requiredSteps(); + } + + @Override + public ColumnDateRange getOverlapValidityDate(DateAggregationDates dateAggregationDates, SqlFunctionProvider functionProvider) { + + Field rangeStart = functionProvider.greatest(dateAggregationDates.allStarts()); + Field rangeEnd = functionProvider.least(dateAggregationDates.allEnds()); + + return ColumnDateRange.of( + rangeStart.as(DateAggregationCte.RANGE_START), + rangeEnd.as(DateAggregationCte.RANGE_END) + ); + } + + @Override + public List getIntermediateTableSelects(DateAggregationDates dateAggregationDates, List carryThroughSelects) { + + List nulledRangeStartAndEnd = + Stream.of( + DSL.inline(null, Date.class).as(DateAggregationCte.RANGE_START), + DSL.inline(null, Date.class).as(DateAggregationCte.RANGE_END) + ) + .map(FieldWrapper::new) + .collect(Collectors.toList()); + + return Stream.of(nulledRangeStartAndEnd, carryThroughSelects) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + } + + @Override + public List getNoOverlapSelects(DateAggregationContext dateAggregationContext) { + return List.of(dateAggregationContext.getStep(IntersectStep.INTERMEDIATE_TABLE)); + } + + @Override + public QueryStep getOverlapStep(DateAggregationContext dateAggregationContext) { + return dateAggregationContext.getStep(IntersectStep.OVERLAP); + } + + @Override + public boolean requiresIntervalPackingAfterwards() { + return false; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntersectStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntersectStep.java new file mode 100644 index 0000000000..d30d0456e3 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/IntersectStep.java @@ -0,0 +1,45 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +@Getter +@RequiredArgsConstructor +enum IntersectStep implements DateAggregationStep { + + OVERLAP("_overlap", OverlapCte::new, null), + INTERMEDIATE_TABLE("_no_overlap", IntermediateTableCte::new, null), + MERGE("_merge", MergeCte::new, OVERLAP); + + private final String suffix; + @Getter + private final DateAggregationCteConstructor stepConstructor; + private final IntersectStep predecessor; + + @Override + public String suffix() { + return this.suffix; + } + + @Override + public DateAggregationStep predecessor() { + return this.predecessor; + } + + static List requiredSteps() { + return Arrays.stream(values()) + .map(cteStep -> cteStep.getStepConstructor().create(cteStep)) + .toList(); + } + + static DateAggregationTables tableNames(QueryStep joinedTable) { + Map cteNameMap = DateAggregationStep.createCteNameMap(joinedTable, values()); + return new DateAggregationTables(joinedTable.getCteName(), cteNameMap); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/InvertCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/InvertCte.java new file mode 100644 index 0000000000..e08e7a3c1c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/InvertCte.java @@ -0,0 +1,96 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.sql.Date; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QualifyingUtil; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import lombok.Getter; +import org.jooq.Condition; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.TableLike; +import org.jooq.TableOnConditionStep; +import org.jooq.impl.DSL; + +/** + * Concept for date range inversion taken from Inverting date ranges. + */ +@Getter +class InvertCte extends DateAggregationCte { + + public static final String ROWS_LEFT_TABLE_NAME = "rows_left"; + public static final String ROWS_RIGHT_TABLE_NAME = "rows_right"; + public static final String PRIMARY_COLUMN_FIELD_NAME = "primary_column"; + + private final DateAggregationStep cteStep; + + public InvertCte(DateAggregationStep cteStep) { + this.cteStep = cteStep; + } + + @Override + protected QueryStep.QueryStepBuilder convertStep(DateAggregationContext context) { + + QueryStep rowNumberStep = context.getStep(InvertStep.ROW_NUMBER); + + Field primaryColumn = context.getPrimaryColumn(); + Field leftPrimaryColumn = QualifyingUtil.qualify(primaryColumn, ROWS_LEFT_TABLE_NAME); + Field rightPrimaryColumn = QualifyingUtil.qualify(primaryColumn, ROWS_RIGHT_TABLE_NAME); + Field coalescedPrimaryColumn = DSL.coalesce(leftPrimaryColumn, rightPrimaryColumn) + .as(PRIMARY_COLUMN_FIELD_NAME); + + Selects invertSelects = getInvertSelects(rowNumberStep, coalescedPrimaryColumn, context); + TableOnConditionStep fromTable = selfJoinWithShiftedRows(leftPrimaryColumn, rightPrimaryColumn, rowNumberStep); + + return QueryStep.builder() + .selects(invertSelects) + .fromTable(fromTable); + } + + private Selects getInvertSelects( + QueryStep rowNumberStep, + Field coalescedPrimaryColumn, + DateAggregationContext context + ) { + + SqlFunctionProvider functionProvider = context.getFunctionProvider(); + ColumnDateRange validityDate = rowNumberStep.getSelects().getValidityDate().get(); + + Field rangeStart = DSL.coalesce( + QualifyingUtil.qualify(validityDate.getEnd(), ROWS_LEFT_TABLE_NAME), + functionProvider.toDateField(functionProvider.getMinDateExpression()) + ).as(DateAggregationCte.RANGE_START); + + Field rangeEnd = DSL.coalesce( + QualifyingUtil.qualify(validityDate.getStart(), ROWS_RIGHT_TABLE_NAME), + functionProvider.toDateField(functionProvider.getMaxDateExpression()) + ).as(DateAggregationCte.RANGE_END); + + return new Selects( + coalescedPrimaryColumn, + Optional.of(ColumnDateRange.of(rangeStart, rangeEnd)), + context.getCarryThroughSelects() + ); + } + + private TableOnConditionStep selfJoinWithShiftedRows(Field leftPrimaryColumn, Field rightPrimaryColumn, QueryStep rowNumberStep) { + + Field leftRowNumber = DSL.field(DSL.name(ROWS_LEFT_TABLE_NAME, RowNumberCte.ROW_NUMBER_FIELD_NAME), Integer.class) + .plus(1); + Field rightRowNumber = DSL.field(DSL.name(ROWS_RIGHT_TABLE_NAME, RowNumberCte.ROW_NUMBER_FIELD_NAME), Integer.class); + + Condition joinCondition = leftPrimaryColumn.eq(rightPrimaryColumn) + .and(leftRowNumber.eq(rightRowNumber)); + + TableLike rowNumberTable = QueryStep.toTableLike(rowNumberStep.getCteName()); + return rowNumberTable.asTable(ROWS_LEFT_TABLE_NAME) + .fullJoin(rowNumberTable.asTable(ROWS_RIGHT_TABLE_NAME)) + .on(joinCondition); + } + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/InvertStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/InvertStep.java new file mode 100644 index 0000000000..082dbdb7ba --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/InvertStep.java @@ -0,0 +1,43 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +enum InvertStep implements DateAggregationStep { + + ROW_NUMBER("_row_numbers", RowNumberCte::new, null), + INVERT("_inverted_dates", InvertCte::new, InvertStep.ROW_NUMBER); + + private final String suffix; + @Getter + private final DateAggregationCteConstructor stepConstructor; + private final InvertStep predecessor; + + @Override + public String suffix() { + return this.suffix; + } + + @Override + public DateAggregationStep predecessor() { + return this.predecessor; + } + + static List requiredSteps() { + return Arrays.stream(values()) + .map(cteStep -> cteStep.getStepConstructor().create(cteStep)) + .toList(); + } + + static DateAggregationTables createTableNames(QueryStep joinedTable) { + Map cteNameMap = DateAggregationStep.createCteNameMap(joinedTable, values()); + return new DateAggregationTables(joinedTable.getCteName(), cteNameMap); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeAggregateAction.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeAggregateAction.java new file mode 100644 index 0000000000..472b60e3f4 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeAggregateAction.java @@ -0,0 +1,65 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.sql.Date; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; + +@RequiredArgsConstructor +class MergeAggregateAction implements SqlAggregationAction { + + private final QueryStep joinedStep; + + @Override + public DateAggregationTables tableNames() { + return MergeStep.tableNames(this.joinedStep); + } + + @Override + public List dateAggregationCtes() { + return MergeStep.requiredSteps(); + } + + @Override + public ColumnDateRange getOverlapValidityDate(DateAggregationDates dateAggregationDates, SqlFunctionProvider functionProvider) { + + Field rangeStart = functionProvider.least(dateAggregationDates.allStarts()); + Field rangeEnd = functionProvider.greatest(dateAggregationDates.allEnds()); + + return ColumnDateRange.of( + rangeStart.as(DateAggregationCte.RANGE_START), + rangeEnd.as(DateAggregationCte.RANGE_END) + ); + } + + @Override + public List getIntermediateTableSelects(DateAggregationDates dateAggregationDates, List carryThroughSelects) { + return Stream.of(dateAggregationDates.allStartsAndEnds(), carryThroughSelects) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + } + + @Override + public List getNoOverlapSelects(DateAggregationContext dateAggregationContext) { + return dateAggregationContext.getSteps(MergeStep.NODE_NO_OVERLAP); + } + + @Override + public QueryStep getOverlapStep(DateAggregationContext dateAggregationContext) { + return dateAggregationContext.getStep(MergeStep.OVERLAP); + } + + @Override + public boolean requiresIntervalPackingAfterwards() { + return true; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeCte.java new file mode 100644 index 0000000000..3d761cd18a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeCte.java @@ -0,0 +1,39 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.List; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import lombok.Getter; + +@Getter +class MergeCte extends DateAggregationCte { + + private final DateAggregationStep cteStep; + + public MergeCte(DateAggregationStep cteStep) { + this.cteStep = cteStep; + } + + @Override + protected QueryStep.QueryStepBuilder convertStep(DateAggregationContext context) { + + SqlAggregationAction aggregationAction = context.getSqlAggregationAction(); + List noOverlapSteps = aggregationAction.getNoOverlapSelects(context); + QueryStep overlapStep = aggregationAction.getOverlapStep(context); + + List unionSteps = noOverlapSteps.stream().map(MergeCte::createUnionStep).collect(Collectors.toList()); + + return QueryStep.builder() + .selects(overlapStep.getQualifiedSelects()) + .union(unionSteps); + } + + private static QueryStep createUnionStep(QueryStep noOverlapStep) { + return QueryStep.builder() + .selects(noOverlapStep.getQualifiedSelects()) + .fromTable(QueryStep.toTableLike(noOverlapStep.getCteName())) + .build(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeStep.java new file mode 100644 index 0000000000..1e39a57a6d --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/MergeStep.java @@ -0,0 +1,45 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +enum MergeStep implements DateAggregationStep { + + OVERLAP("_overlap", OverlapCte::new, null), + INTERMEDIATE_TABLE("_no_overlap", IntermediateTableCte::new, null), + NODE_NO_OVERLAP("_node_no_overlap", NodeNoOverlapCte::new, INTERMEDIATE_TABLE), + MERGE("_merge", MergeCte::new, OVERLAP); + + private final String suffix; + @Getter + private final DateAggregationCteConstructor stepConstructor; + private final MergeStep predecessor; + + @Override + public String suffix() { + return this.suffix; + } + + @Override + public DateAggregationStep predecessor() { + return this.predecessor; + } + + static List requiredSteps() { + return Arrays.stream(values()) + .map(cteStep -> cteStep.getStepConstructor().create(cteStep)) + .toList(); + } + + static DateAggregationTables tableNames(QueryStep joinedTable) { + Map cteNameMap = DateAggregationStep.createCteNameMap(joinedTable, values()); + return new DateAggregationTables(joinedTable.getCteName(), cteNameMap); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/NodeNoOverlapCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/NodeNoOverlapCte.java new file mode 100644 index 0000000000..0397071e05 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/NodeNoOverlapCte.java @@ -0,0 +1,77 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.sql.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import lombok.Getter; +import org.jooq.Condition; +import org.jooq.Field; + +class NodeNoOverlapCte extends DateAggregationCte { + + @Getter + private final DateAggregationStep cteStep; + private int counter = 0; // used to make each no-overlap CTE name unique + + public NodeNoOverlapCte(DateAggregationStep cteStep) { + this.cteStep = cteStep; + } + + @Override + protected QueryStep.QueryStepBuilder convertStep(DateAggregationContext context) { + + // we create a no-overlap node for each query step we need to aggregate + DateAggregationDates dateAggregationDates = context.getDateAggregationDates(); + Iterator validityDates = dateAggregationDates.getValidityDates().iterator(); + QueryStep intermediateTableStep = context.getStep(MergeStep.INTERMEDIATE_TABLE); + + // first no-overlap step has intermediate table as predecessor + QueryStep.QueryStepBuilder noOverlapStep = createNoOverlapStep(validityDates.next(), context, intermediateTableStep); + + // each following step has it's predeceasing no-overlap as predecessor + while (validityDates.hasNext()) { + counter++; + QueryStep predeceasingNoOverlapStep = noOverlapStep.build(); + context = context.withStep(getCteStep(), predeceasingNoOverlapStep); + noOverlapStep = createNoOverlapStep(validityDates.next(), context, predeceasingNoOverlapStep); + } + + return noOverlapStep; + } + + private QueryStep.QueryStepBuilder createNoOverlapStep( + ColumnDateRange validityDate, + DateAggregationContext context, + QueryStep predecessor + ) { + + DateAggregationTables dateAggregationTables = context.getDateAggregationTables(); + + Field start = validityDate.getStart(); + Field end = validityDate.getEnd(); + + Field asRangeEnd = end.as(DateAggregationCte.RANGE_END); + Field asRangeStart = start.as(DateAggregationCte.RANGE_START); + String intermediateTableCteName = dateAggregationTables.getFromTableOf(getCteStep()); + Selects nodeNoOverlapSelects = new Selects( + context.getPrimaryColumn(), + Optional.of(ColumnDateRange.of(asRangeStart, asRangeEnd)), + context.getCarryThroughSelects() + ); + + Condition startNotNull = start.isNotNull(); + + return QueryStep.builder() + .cteName("%s_%s".formatted(dateAggregationTables.cteName(MergeStep.NODE_NO_OVERLAP), counter)) + .selects(nodeNoOverlapSelects) + .fromTable(QueryStep.toTableLike(intermediateTableCteName)) + .conditions(List.of(startNotNull)) + .predecessors(List.of(predecessor)); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/OverlapCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/OverlapCte.java new file mode 100644 index 0000000000..b818340aaf --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/OverlapCte.java @@ -0,0 +1,54 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.sql.Date; +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import lombok.Getter; +import org.jooq.Condition; +import org.jooq.Field; + +@Getter +class OverlapCte extends DateAggregationCte { + + private final DateAggregationStep cteStep; + + public OverlapCte(DateAggregationStep cteStep) { + this.cteStep = cteStep; + } + + @Override + protected QueryStep.QueryStepBuilder convertStep(DateAggregationContext context) { + + DateAggregationDates dateAggregationDates = context.getDateAggregationDates(); + List> allStarts = dateAggregationDates.allStarts(); + List> allEnds = dateAggregationDates.allEnds(); + + ColumnDateRange overlapValidityDate = context.getSqlAggregationAction().getOverlapValidityDate( + context.getDateAggregationDates(), + context.getFunctionProvider() + ); + Selects overlapSelects = new Selects( + context.getPrimaryColumn(), + Optional.of(overlapValidityDate), + context.getCarryThroughSelects() + ); + + SqlFunctionProvider functionProvider = context.getFunctionProvider(); + Condition startBeforeEnd = functionProvider.greatest(allStarts).lessThan(functionProvider.least(allEnds)); + Condition allStartsNotNull = allStarts.stream() + .map(Field::isNotNull) + .reduce(Condition::and) + .orElseThrow(); + Condition overlapConditions = allStartsNotNull.and(startBeforeEnd); + + return QueryStep.builder() + .selects(overlapSelects) + .conditions(List.of(overlapConditions)); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/PostgreSqlDateAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/PostgreSqlDateAggregator.java new file mode 100644 index 0000000000..2ca126870e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/PostgreSqlDateAggregator.java @@ -0,0 +1,113 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; +import com.bakdata.conquery.sql.conversion.dialect.SqlDateAggregator; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QualifyingUtil; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import org.jooq.Field; +import org.jooq.impl.DSL; + +public class PostgreSqlDateAggregator implements SqlDateAggregator { + + private final static String DATE_AGGREGATION_CTE_NAME = "_dates_aggregated"; + private final static String INVERTED_DATE_AGGREGATION_CTE_NAME = "_dates_inverted"; + + private final SqlFunctionProvider functionProvider; + + public PostgreSqlDateAggregator(SqlFunctionProvider functionProvider) { + this.functionProvider = functionProvider; + } + + @Override + public QueryStep apply( + QueryStep joinedStep, + List carryThroughSelects, + DateAggregationDates dateAggregationDates, + DateAggregationAction dateAggregationAction + ) { + String joinedStepCteName = joinedStep.getCteName(); + + ColumnDateRange aggregatedValidityDate = getAggregatedValidityDate(dateAggregationDates, dateAggregationAction, joinedStepCteName); + + Selects dateAggregationSelects = new Selects( + joinedStep.getQualifiedSelects().getPrimaryColumn(), + Optional.ofNullable(aggregatedValidityDate), + QualifyingUtil.qualify(carryThroughSelects, joinedStepCteName) + ); + + return QueryStep.builder() + .cteName(joinedStepCteName + DATE_AGGREGATION_CTE_NAME) + .selects(dateAggregationSelects) + .fromTable(QueryStep.toTableLike(joinedStepCteName)) + .predecessors(List.of(joinedStep)) + .build(); + } + + @Override + public QueryStep invertAggregatedIntervals(QueryStep baseStep) { + + Selects baseStepSelects = baseStep.getQualifiedSelects(); + Optional validityDate = baseStepSelects.getValidityDate(); + if (validityDate.isEmpty()) { + return baseStep; + } + + Field maxDateRange = DSL.function( + "daterange", + Object.class, + this.functionProvider.toDateField(this.functionProvider.getMinDateExpression()), + this.functionProvider.toDateField(this.functionProvider.getMaxDateExpression()), + DSL.val("[]") + ); + + // see https://www.postgresql.org/docs/current/functions-range.html + // {[-infinity,infinity]} - {multirange} computes the inverse of a {multirange} + Field invertedValidityDate = DSL.field( + "{0}::datemultirange - {1}", + Object.class, + maxDateRange, + validityDate.get().getRange() + ).as(DATE_AGGREGATION_CTE_NAME); + + return QueryStep.builder() + .cteName(baseStep.getCteName() + INVERTED_DATE_AGGREGATION_CTE_NAME) + .selects(baseStepSelects.withValidityDate(ColumnDateRange.of(invertedValidityDate))) + .fromTable(QueryStep.toTableLike(baseStep.getCteName())) + .predecessors(List.of(baseStep)) + .build(); + } + + private ColumnDateRange getAggregatedValidityDate(DateAggregationDates dateAggregationDates, DateAggregationAction dateAggregationAction, String joinedStepCteName) { + + // see https://www.postgresql.org/docs/current/functions-range.html + String aggregatingOperator = switch (dateAggregationAction) { + case MERGE -> " + "; + case INTERSECT -> " * "; + default -> throw new IllegalStateException("Unexpected aggregation mode: " + dateAggregationAction); + }; + + String aggregatedExpression = dateAggregationDates.qualify(joinedStepCteName) + .getValidityDates().stream() + .flatMap(validityDate -> validityDate.toFields().stream()) + .map(PostgreSqlDateAggregator::createEmptyRangeForNullValues) + .collect(Collectors.joining(aggregatingOperator)); + + return ColumnDateRange.of(DSL.field(aggregatedExpression)) + .asValidityDateRange(joinedStepCteName); + } + + private static String createEmptyRangeForNullValues(Field field) { + return DSL.when(field.isNull(), DSL.field("'{}'::datemultirange")) + .otherwise(field) + .toString(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/RowNumberCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/RowNumberCte.java new file mode 100644 index 0000000000..5768cd3f0c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/RowNumberCte.java @@ -0,0 +1,50 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.ArrayList; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.FieldWrapper; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.Getter; +import org.jooq.Field; +import org.jooq.impl.DSL; + +/** + * Concept for date range inversion taken from Inverting date ranges. + */ +@Getter +class RowNumberCte extends DateAggregationCte { + + public static final String ROW_NUMBER_FIELD_NAME = "row_number"; + private final DateAggregationStep cteStep; + + public RowNumberCte(DateAggregationStep cteStep) { + this.cteStep = cteStep; + } + + @Override + protected QueryStep.QueryStepBuilder convertStep(DateAggregationContext context) { + + Field primaryColumn = context.getPrimaryColumn(); + + ColumnDateRange aggregatedValidityDate = context.getDateAggregationDates().getValidityDates().get(0); + Field rowNumber = DSL.rowNumber().over(DSL.partitionBy(primaryColumn).orderBy(aggregatedValidityDate.getStart())) + .as(ROW_NUMBER_FIELD_NAME); + + ArrayList selects = new ArrayList<>(context.getCarryThroughSelects()); + selects.add(new FieldWrapper(rowNumber)); + + Selects rowNumberSelects = new Selects( + primaryColumn, + Optional.of(aggregatedValidityDate), + selects + ); + + return QueryStep.builder() + .selects(rowNumberSelects); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/SqlAggregationAction.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/SqlAggregationAction.java new file mode 100644 index 0000000000..3684a48b26 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/SqlAggregationAction.java @@ -0,0 +1,30 @@ +package com.bakdata.conquery.sql.conversion.cqelement.aggregation; + +import java.util.List; + +import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; + +/** + * Represents a subset of {@link DateAggregationAction}. + */ +interface SqlAggregationAction { + + DateAggregationTables tableNames(); + + List dateAggregationCtes(); + + ColumnDateRange getOverlapValidityDate(DateAggregationDates dateAggregationDates, SqlFunctionProvider functionProvider); + + List getIntermediateTableSelects(DateAggregationDates dateAggregationDates, List carryThroughSelects); + + List getNoOverlapSelects(DateAggregationContext dateAggregationContext); + + QueryStep getOverlapStep(DateAggregationContext dateAggregationContext); + + boolean requiresIntervalPackingAfterwards(); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/sql_date_aggregation.md b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/sql_date_aggregation.md new file mode 100644 index 0000000000..ec4a707087 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/aggregation/sql_date_aggregation.md @@ -0,0 +1,498 @@ +# SQL date aggregation + +When joining multiple nodes with their respective validity date ranges, we need to aggregate these dates. +There are 2 aggregation actions we can apply: + +- `MERGE`: unions the validity dates of all nodes +- `INTERSECT`: intersects the validity dates of all nodes + +Besides that, there is also the possibility to `NEGATE` a date aggregation: this will invert the validity date ranges of +a node. Finally, dates of a certain node can also be `BLOCKED` which will block the upwards aggregation of a nodes' +validity dates. + +The document outlines the process the [AnsiSqlDateAggregator](./AnsiSqlDateAggregator.java) employs to realise the +different aggregations. + +Suppose we have two concepts, each containing a date range set with only unique date range values. +Starting point of the date aggregation process will be the joined node of the two nodes. Like for interval packing, +we treat the end date of date ranges as excluded. Thus, in the following example tables, the `date_end` is considered +excluded. + +**Node 1**: + +| id | date\_start | date\_end | +|:---|:------------|:-----------| +| 1 | 2012-01-01 | 2013-01-01 | +| 2 | 2013-01-02 | 2013-01-03 | +| 3 | 2014-01-01 | 2014-12-31 | +| 3 | 2015-06-01 | 2015-12-31 | + +**Node 2**: + +| id | date\_start | date\_end | +|:---|:------------|:-----------| +| 1 | 2011-07-01 | 2012-07-02 | +| 2 | 2013-01-03 | 2013-01-04 | +| 3 | 2017-01-01 | 2017-12-31 | + +The following join query is our starting point for the date aggregation. It contains the cross-product of the validity +date ranges of both tables. + +```sql +select coalesce("node_1"."id", "node_2"."id") as "primary_column", + "node_1"."date_start" as "date_start_1", + "node_1"."date_end" as "date_end_1", + "node_2"."date_start" as "date_start_2", + "node_2"."date_end" as "date_end_2" +from "node_1" + join "node_2" + on "node_1"."id" = "node_2"."id"; +``` + +| primary\_column | date\_start\_1 | date\_end\_1 | date\_start\_2 | date\_end\_2 | +|:----------------|:---------------|:-------------|:---------------|:-------------| +| 1 | 2012-01-01 | 2013-01-01 | 2011-07-01 | 2012-07-02 | +| 2 | 2013-01-02 | 2013-01-03 | 2013-01-03 | 2013-01-04 | + +## `MERGE` + +To `MERGE` the dates, we'll have to create 5 CTEs: + +- `overlap`: entries where the date ranges of both nodes overlap. +- `no_overlap`: entries where the date ranges of both nodes do not overlap. +- `left_node_no_overlap`: entries with all unmatched date ranges from the left join node. +- `right_node_no_overlap`: entries with all unmatched date ranges from the right join node. +- `merge`: the union of all entries from the `overlap`, `left_node_no_overlap` and `right_node_no_overlap`. + +The result of the `merge` CTE will contain the newly created ranges. + +### `overlap` + +The `overlap` CTE will the show overlapping ranges by selecting the least of all start dates and the greatest of all +end dates where all starts are non-null and there is an overlap between the date ranges. This is ensured by comparing +the maximum of the start dates with the minimum of the end dates. + +```sql +select "primary_column", + least("date_start_1", "date_start_2") as "range_start", + greatest("date_end_1", "date_end_2") as "range_end" +from "joined_node" +where ( + "date_start_1" is not null + and "date_start_2" is not null + and greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + ) +``` + +Looking at our base table, we can conclude that the date range `2012-01-01` to `2013-01-01` from the left node and the +date range `2011-07-01` to `2012-07-02` from the right node clearly overlap. The result of the overlap table computes +the new maximum overlapping range from `2011-07-01` to `2013-01-01`. + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 1 | 2011-07-01 | 2013-01-01 | + +### `no_overlap` - intermediate table + +This CTE includes every row that isn’t part of the previous `overlap` CTE by inverting its conditions. It’s then used +as an intermediate table for the following two steps. + +```sql +select "primary_column", + "date_start_1", + "date_end_1", + "date_start_2", + "date_end_2" +from "joined_node" +where ( + "date_start_1" is null + or "date_start_2" is null + or not greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + ) +``` + +| primary\_column | date\_start\_1 | date\_end\_1 | date\_start\_2 | date\_end\_2 | +|:----------------|:---------------|:-------------|:---------------|:-------------| +| 2 | 2013-01-02 | 2013-01-03 | 2013-01-03 | 2013-01-04 | +| 3 | 2014-01-01 | 2014-12-31 | 2017-01-01 | 2017-12-31 | +| 3 | 2015-06-01 | 2015-12-31 | 2017-01-01 | 2017-12-31 | + +### `left_node_no_overlap` + +This query selects all unmatched date ranges from the left join partner. + +```sql +select "primary_column", + "date_start_1" "range_start", + "date_end_1" "range_end" +from "no_overlap" +where "date_start_1" is not null +``` + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 2 | 2013-01-02 | 2013-01-03 | +| 3 | 2014-01-01 | 2014-12-31 | +| 3 | 2015-06-01 | 2015-12-31 | + +### `right_node_no_overlap` + +```sql +select "primary_column", + "date_start_2" "range_start", + "date_end_2" "range_end" +from "no_overlap" +where "date_start_2" is not null +``` + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 2 | 2013-01-03 | 2013-01-04 | +| 3 | 2017-01-01 | 2017-12-31 | +| 3 | 2017-01-01 | 2017-12-31 | + +### `merge` + +In the final step, we'll merge the `overlap`, `left_node_no_overlap` and `right_node_no_overlap` together. +The result of the CTE will then contain all overlapping ranges as well as all non-overlapping ranges from the left and +right join partners. + +```sql +select * +from "overlap" +union all +select * +from "left_node_no_overlap" +union all +select * +from "right_node_no_overlap" +``` + +We can use a `union all` instead of a `union distinct`. It's faster, and because we have to apply interval packing +again anyway, which will ensure a unique set of date ranges in the final result of the `MERGE` aggregation. + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 1 | 2011-07-01 | 2013-01-01 | +| 2 | 2013-01-02 | 2013-01-03 | +| 3 | 2014-01-01 | 2014-12-31 | +| 3 | 2015-06-01 | 2015-12-31 | +| 2 | 2013-01-03 | 2013-01-04 | +| 3 | 2017-01-01 | 2017-12-31 | +| 3 | 2017-01-01 | 2017-12-31 | + +The result contains the newly created ranges - with some overlapping ranges for the subject with id 3. We therefore +again +apply the steps from interval packing which gives us the final result of the `MERGE` aggregation. + +| primary\_column | range\_start\_min | range\_end\_max | +|:----------------|:------------------|:----------------| +| 1 | 2011-07-01 | 2013-01-01 | +| 2 | 2013-01-02 | 2013-01-04 | +| 3 | 2014-01-01 | 2014-12-31 | +| 3 | 2015-06-01 | 2015-12-31 | +| 3 | 2017-01-01 | 2017-12-31 | + +## `INTERSECT` + +Suppose again that we have two concepts, each containing a date range set with only unique date range values. +The starting point of the date aggregation process remains the joined node of the two nodes. + +To `INTERSECT` the dates, we'll have to create 3 CTEs: + +- `overlap`: entries where the date ranges of both nodes overlap. +- `no_overlap`: entries where the date ranges of both nodes do not overlap. In contrast to merging, non-overlapping + and missing fields are handled as nulls. +- `merge`: the union of all entries from the `overlap` and `no_overlap` + +### `overlap` + +The `overlap` CTE will the show intersecting ranges: In contrast to the `MERGE` aggregation, we use the `GREATEST` start +and `LEAST` end to get only the intersection of both ranges. Note that the `where` condition of the overlap is the same: +so we filter for entries that overlap, but ensure that only the intersection of the ranges is selected. + +```sql +select "primary_column", + greatest("date_start_1", "date_start_2") as "range_start", + least("date_end_1", "date_end_2") as "range_end" +from "joined_node" +where ( + "date_start_1" is not null + and "date_start_2" is not null + and greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + ) +``` + +Looking at our base table, we can conclude that the date range `2012-01-01` to `2013-01-01` from the left node and the +date range `2011-07-01` to `2012-07-02` from the right node clearly overlap. The result of the overlap table computes +the new maximum intersection of both ranges: `2012-01-01` to `2012-07-02`. + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 1 | 2012-01-01 | 2012-07-02 | + +### `no_overlap` - intermediate table + +This CTE includes every row that isn’t part of the previous `overlap` CTE by inverting its conditions. Because we now +the entries of this table have no intersecting validity date values, we null their validity date entries. + +```sql +select "primary_column", + null as "range_start", + null as "range_end" +from "joined_node" +where ( + "date_start_1" is null + or "date_start_2" is null + or not greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + ) +``` + +We need this table because we don't want to filter out subjects that have no overlapping range, we just assign them an +empty validity date range. + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 2 | null | null | + +### `merge` + +In the final step, we'll merge the `overlap` and `no_overlap` table together. +The result of the CTE will then contain all overlapping ranges as well as all non-overlapping ranges. + +```sql +select * +from "overlap" +union all +select * +from "no_overlap" +``` + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 1 | 2012-01-01 | 2012-07-02 | +| 2 | null | null | + +## `NEGATE` + +The idea on how to negate or respectively invert a set of date ranges was taken from this +[blog post](https://explainextended.com/2009/11/09/inverting-date-ranges). + +Take the following table containing a date set of unique date ranges as starting point, +sorted by `range_start`. Like always, the `range_end` date is considered excluded. + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 1 | 2012-01-01 | 2013-01-01 | +| 1 | 2015-01-01 | 2016-01-01 | +| 1 | 2018-01-01 | 2019-01-01 | + +To invert these ranges, we will need 2 CTEs: + +- `row_numbers`: assigns row numbers to validity date rows. +- `inverted_dates`: makes a self-join but using the row number shifted up by one as join condition. This way, we get + the intervals between the start and ends of the respective validity date intervals. + +### `row_numbers` + +We assign row numbers to the ordered validity dates while partitioning by primary column. + +```sql +select "primary_column", + "range_start", + "range_end", + row_number() over ( + partition by "primary_column" + order by "range_start" + ) as "row_number" +from "table" +``` + +This gives us the following result: + +| primary\_column | range\_start | range\_end | row\_number | +|:----------------|:-------------|:-----------|:------------| +| 1 | 2012-01-01 | 2013-01-01 | 1 | +| 1 | 2015-01-01 | 2016-01-01 | 2 | + +### `inverted_dates` + +Now, we do a self-join, selecting the end dates of the left join parter and the start dates of the right join partner, +but shifted up by 1 row as join condition. This would result in the first start and last end date of each partition +being `null`. In this case, we replace the`null` start date value with the minimum possible date and the `null` end +date value with the maximum possible date value. + +```sql +select coalesce("rows_left"."pid", "rows_right"."pid") as "primary_column", + coalesce( + "rows_left"."range_end", + TO_DATE('0001-01-01', 'yyyy-mm-dd') + ) as "range_start", + coalesce( + "rows_right"."range_start", + TO_DATE('9999-12-31', 'yyyy-mm-dd') + ) as "range_end" +from "row_numbers" "rows_left" + full outer join "row_numbers" "rows_right" + on ( + "rows_left"."pid" = "rows_right"."pid" + and ("rows_left"."row_number" + 1) = "rows_right"."row_number" + ) +``` + +This gives us the following result: + +| primary\_column | range\_start | range\_end | +|:----------------|:-------------|:-----------| +| 1 | 0001-01-01 | 2012-01-01 | +| 1 | 2013-01-01 | 2015-01-01 | +| 1 | 2016-01-01 | 9999-12-31 | + +First, we got the interval from the first possible date to the start of the first validity date: `0001-01-01` to +`2012-01-01`. Second, we have the interval between the two original intervals from `2013-01-01` to `2015-01-01`. Last, +we got the interval from the last end date to the maximum possible date: `2016-01-01` to `9999-12-31`. + +# Appendix + +### Whole `MERGE` query + +```sql +with "joined_node" as (select coalesce("node_1"."id", "node_2"."id") as "primary_column", + "node_1"."date_start" as "date_start_1", + "node_1"."date_end" as "date_end_1", + "node_2"."date_start" as "date_start_2", + "node_2"."date_end" as "date_end_2" + from "node_1" + join "node_2" + on "node_1"."id" = "node_2"."id"), + "overlap" as (select "primary_column", + least("date_start_1", "date_start_2") as "range_start", + greatest("date_end_1", "date_end_2") as "range_end" + from "joined_node" + where ( + "date_start_1" is not null + and "date_start_2" is not null + and greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + )), + "no_overlap" as (select "primary_column", + "date_start_1", + "date_end_1", + "date_start_2", + "date_end_2" + from "joined_node" + where ( + "date_start_1" is null + or "date_start_2" is null + or not greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + )), + "left_node_no_overlap" as (select "primary_column", + "date_start_1" "range_start", + "date_end_1" "range_end" + from "no_overlap" + where "date_start_1" is not null), + "right_node_no_overlap" as (select "primary_column", + "date_start_2" "range_start", + "date_end_2" "range_end" + from "no_overlap" + where "date_start_2" is not null), + "merge" as (select * + from "overlap" + union all + select * + from "left_node_no_overlap" + union all + select * + from "right_node_no_overlap"), + "previous_end" as + (select "primary_column", + "range_start", + "range_end", + max("range_end") over ( + partition by "primary_column" + order by "range_start", "range_end" + rows between unbounded preceding and 1 preceding + ) as "previous_end" + from "merge"), + "range_index" as + (select "primary_column", + "range_start", + "range_end", + sum(case + when "range_start" > "previous_end" then 1 + else null + end) over ( + partition by "primary_column" + order by "range_start", "range_end" + rows unbounded preceding + ) "range_index" + from "previous_end") +select "primary_column", + min("range_start") "range_start_min", + max("range_end") "range_end_max" +from "range_index" +group by "primary_column", "range_index" +``` + +### Whole `INTERSECT` query + +```sql +with "joined_node" as (select coalesce("node_1"."id", "node_2"."id") as "primary_column", + "node_1"."date_start" as "date_start_1", + "node_1"."date_end" as "date_end_1", + "node_2"."date_start" as "date_start_2", + "node_2"."date_end" as "date_end_2" + from "node_1" + join "node_2" + on "node_1"."id" = "node_2"."id"), + "overlap" as (select "primary_column", + greatest("date_start_1", "date_start_2") as "range_start", + least("date_end_1", "date_end_2") as "range_end" + from "joined_node" + where ( + "date_start_1" is not null + and "date_start_2" is not null + and greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + )), + "no_overlap" as (select "primary_column", + null::date as "range_start", + null::date as "range_end" + from "joined_node" + where ( + "date_start_1" is null + or "date_start_2" is null + or not greatest("date_start_1", "date_start_2") < least("date_end_1", "date_end_2") + )) +select * +from "overlap" +union all +select * +from "no_overlap"; +``` + +### Whole `NEGATE` query + +```sql +with "row_numbers" as (select "primary_column", + "range_start", + "range_end", + row_number() over ( + partition by "primary_column" + order by "range_start" + ) as "row_number" + from "table") +select coalesce("rows_left"."pid", "rows_right"."pid") as "primary_column", + coalesce( + "rows_left"."range_end", + TO_DATE('0001-01-01', 'yyyy-mm-dd') + ) as "range_start", + coalesce( + "rows_right"."range_start", + TO_DATE('9999-12-31', 'yyyy-mm-dd') + ) as "range_end" +from "row_numbers" "rows_left" + full outer join "row_numbers" "rows_right" + on ( + "rows_left"."pid" = "rows_right"."pid" + and ("rows_left"."row_number" + 1) = "rows_right"."row_number" + ) +``` diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/AggregationFilterCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/AggregationFilterCte.java new file mode 100644 index 0000000000..5e44e59d27 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/AggregationFilterCte.java @@ -0,0 +1,48 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.filter.FilterCondition; +import com.bakdata.conquery.sql.conversion.model.select.ExistsSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import org.jooq.Condition; + +class AggregationFilterCte extends ConceptCte { + + @Override + public QueryStep.QueryStepBuilder convertStep(ConceptCteContext conceptCteContext) { + + Selects aggregationFilterSelects = Selects.qualified( + conceptCteContext.getConceptTables().getPredecessorTableName(ConceptCteStep.AGGREGATION_FILTER), + conceptCteContext.getPrimaryColumn(), + getForAggregationFilterSelects(conceptCteContext) + ); + + List aggregationFilterConditions = conceptCteContext.getFilters().stream() + .flatMap(conceptFilter -> conceptFilter.getFilters().getGroup().stream()) + .map(FilterCondition::filterCondition) + .toList(); + + return QueryStep.builder() + .selects(aggregationFilterSelects) + .conditions(aggregationFilterConditions); + } + + private List getForAggregationFilterSelects(ConceptCteContext conceptCteContext) { + return conceptCteContext.getSelects().stream() + .flatMap(sqlSelects -> sqlSelects.getForFinalStep().stream()) + // TODO: EXISTS edge case is only in a concepts final select statement and has no predecessor selects + .filter(conquerySelect -> !(conquerySelect instanceof ExistsSqlSelect)) + .distinct() + .collect(Collectors.toList()); + } + + @Override + public ConceptCteStep cteStep() { + return ConceptCteStep.AGGREGATION_FILTER; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/AggregationSelectCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/AggregationSelectCte.java new file mode 100644 index 0000000000..9800862396 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/AggregationSelectCte.java @@ -0,0 +1,32 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; + +class AggregationSelectCte extends ConceptCte { + + @Override + public QueryStep.QueryStepBuilder convertStep(ConceptCteContext conceptCteContext) { + + List requiredInAggregationFilterStep = conceptCteContext.allConceptSelects() + .flatMap(sqlSelects -> sqlSelects.getForAggregationSelectStep().stream()) + .distinct() + .collect(Collectors.toList()); + + Selects aggregationSelectSelects = new Selects(conceptCteContext.getPrimaryColumn(), requiredInAggregationFilterStep); + + return QueryStep.builder() + .selects(aggregationSelectSelects) + .groupBy(List.of(conceptCteContext.getPrimaryColumn())); + } + + @Override + public ConceptCteStep cteStep() { + return ConceptCteStep.AGGREGATION_SELECT; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java index 64e05c8cfd..c10ecefb34 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java @@ -1,28 +1,54 @@ package com.bakdata.conquery.sql.conversion.cqelement.concept; +import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Objects; import java.util.Optional; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; -import com.bakdata.conquery.sql.conversion.select.SelectConverterService; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.FilterConversions; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.FilterValueConversions; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectContext; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectConversions; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; +import com.bakdata.conquery.sql.conversion.model.filter.ConditionUtil; +import com.bakdata.conquery.sql.conversion.model.filter.FilterType; +import com.bakdata.conquery.sql.conversion.model.filter.Filters; +import com.bakdata.conquery.sql.conversion.model.select.FieldWrapper; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import org.jooq.Condition; +import org.jooq.impl.DSL; public class CQConceptConverter implements NodeConverter { - private final List querySteps; + private static final Pattern WHITESPACE = Pattern.compile("\\s+"); + private final List conceptCTEs; + private final FilterValueConversions filterValueConversions; + private final SelectConversions selectConversions; + private final SqlFunctionProvider functionProvider; - public CQConceptConverter(FilterConverterService filterConverterService, SelectConverterService selectConverterService) { - this.querySteps = List.of( - new PreprocessingQueryStep(), - new DateRestrictionQueryStep(), - new EventSelectStep(selectConverterService), - new EventFilterQueryStep(filterConverterService), - new FinalConceptQueryStep() + public CQConceptConverter(FilterConversions filterConversions, SelectConversions selectConversions, SqlFunctionProvider functionProvider) { + this.filterValueConversions = new FilterValueConversions(filterConversions); + this.selectConversions = selectConversions; + this.functionProvider = functionProvider; + this.conceptCTEs = List.of( + new PreprocessingCte(), + new EventFilterCte(), + new AggregationSelectCte(), + new AggregationFilterCte(), + new FinalConceptCte() ); } @@ -38,39 +64,120 @@ public ConversionContext convert(CQConcept node, ConversionContext context) { throw new UnsupportedOperationException("Can't handle concepts with multiple tables for now."); } - StepContext stepContext = StepContext.builder() - .context(context) - .node(node) - .table(node.getTables().get(0)) - .conceptLabel(this.getConceptLabel(node, context)) - .sqlFunctions(context.getSqlDialect().getFunction()) - .build(); - - for (ConceptQueryStep queryStep : this.querySteps) { - Optional convert = queryStep.convert(stepContext); - if (convert.isEmpty()) { + ConceptCteContext conceptCteContext = createConceptCteContext(node, context); + + Optional lastQueryStep = Optional.empty(); + for (ConceptCte queryStep : this.conceptCTEs) { + Optional convertedStep = queryStep.convert(conceptCteContext, lastQueryStep); + if (convertedStep.isEmpty()) { continue; } - stepContext = stepContext.toBuilder() - .previous(convert.get()) - .previousSelects((ConceptSelects) convert.get().getQualifiedSelects()) - .build(); + lastQueryStep = convertedStep; + conceptCteContext = conceptCteContext.withPrevious(lastQueryStep.get()); + } + + return context.withQueryStep(lastQueryStep.orElseThrow(() -> new RuntimeException("No conversion for concept possible. Required steps: %s".formatted(requiredSteps())))); + } + + private ConceptCteContext createConceptCteContext(CQConcept node, ConversionContext context) { + + CQTable table = node.getTables().get(0); + String tableName = table.getConnector().getTable().getName(); + String conceptLabel = createConceptLabel(node, context); + Optional validityDateSelect = convertValidityDate(table, tableName, conceptLabel); + + Set requiredSteps = getRequiredSteps(table, context.dateRestrictionActive(), validityDateSelect); + ConceptTables conceptTables = new ConceptTables(conceptLabel, requiredSteps, tableName); + + // convert filters + Stream conceptFilters = table.getFilters().stream() + .map(filterValue -> this.filterValueConversions.convert(filterValue, context, conceptTables)); + Stream dateRestrictionFilter = getDateRestriction(context, validityDateSelect).stream(); + List allFilters = Stream.concat(conceptFilters, dateRestrictionFilter).toList(); + + // convert selects + SelectContext selectContext = new SelectContext(context, node, conceptLabel, validityDateSelect, conceptTables); + List conceptSelects = Stream.concat(node.getSelects().stream(), table.getSelects().stream()) + .map(select -> this.selectConversions.convert(select, selectContext)) + .toList(); + + return ConceptCteContext.builder() + .conversionContext(context) + .filters(allFilters) + .selects(conceptSelects) + .primaryColumn(DSL.field(DSL.name(context.getConfig().getPrimaryColumn()))) + .validityDate(validityDateSelect) + .isExcludedFromDateAggregation(node.isExcludeFromTimeAggregation()) + .conceptTables(conceptTables) + .conceptLabel(conceptLabel) + .build(); + } + + /** + * Determines if event/aggregation filter steps are required. + * + *

+ * {@link ConceptCteStep#MANDATORY_STEPS} are allways part of any concept conversion. + */ + private Set getRequiredSteps(CQTable table, boolean dateRestrictionRequired, Optional validityDateSelect) { + Set requiredSteps = new HashSet<>(ConceptCteStep.MANDATORY_STEPS); + + if (dateRestrictionApplicable(dateRestrictionRequired, validityDateSelect)) { + requiredSteps.add(ConceptCteStep.EVENT_FILTER); } - return context.withQueryStep(stepContext.getPrevious()); + table.getFilters().stream() + .flatMap(filterValue -> this.filterValueConversions.requiredSteps(filterValue).stream()) + .forEach(requiredSteps::add); + + return requiredSteps; } - private String getConceptLabel(CQConcept node, ConversionContext context) { + private static String createConceptLabel(CQConcept node, ConversionContext context) { // only relevant for debugging purposes as it will be part of the generated SQL query - // we prefix each cte name of a concept with an incrementing counter to prevent naming collisions if the same concept is selected multiple times - return "%s_%s".formatted( - context.getQueryStepCounter(), - node.getUserOrDefaultLabel(Locale.ENGLISH) - .toLowerCase() - .replace(' ', '_') - .replaceAll("\\s", "_") - ); + // we prefix each cte name of a concept with an incrementing counter to prevent naming collisions if the same concept is selected multiple times + return "%s_%s".formatted( + context.getQueryStepCounter(), + WHITESPACE.matcher(node.getUserOrDefaultLabel(Locale.ENGLISH).toLowerCase()).replaceAll("_") + ); + } + + private Optional convertValidityDate( + CQTable table, + String tableName, + String conceptLabel + ) { + if (Objects.isNull(table.findValidityDate())) { + return Optional.empty(); + } + ColumnDateRange validityDate = functionProvider.daterange(table.findValidityDate(), tableName, conceptLabel); + return Optional.of(validityDate); } + private Optional getDateRestriction(ConversionContext context, Optional validityDate) { + + if (!dateRestrictionApplicable(context.dateRestrictionActive(), validityDate)) { + return Optional.empty(); + } + + ColumnDateRange dateRestriction = this.functionProvider + .daterange(context.getDateRestrictionRange()) + .asDateRestrictionRange(); + + List dateRestrictionSelects = dateRestriction.toFields().stream() + .map(FieldWrapper::new) + .collect(Collectors.toList()); + + Condition dateRestrictionCondition = this.functionProvider.dateRestriction(dateRestriction, validityDate.get()); + + return Optional.of(new ConceptFilter( + SqlSelects.builder().forPreprocessingStep(dateRestrictionSelects).build(), + Filters.builder().event(List.of(ConditionUtil.wrap(dateRestrictionCondition, FilterType.EVENT))).build() + )); + } + + private static boolean dateRestrictionApplicable(boolean dateRestrictionRequired, Optional validityDateSelect) { + return dateRestrictionRequired && validityDateSelect.isPresent(); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCte.java new file mode 100644 index 0000000000..16bd05e753 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCte.java @@ -0,0 +1,43 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; + +abstract class ConceptCte { + + + protected Optional convert(ConceptCteContext context, Optional previous) { + + if (!isRequired(context.getConceptTables())) { + return Optional.empty(); + } + + String cteName = context.getConceptTables().cteName(cteStep()); + QueryStep.QueryStepBuilder queryStepBuilder = this.convertStep(context).cteName(cteName); + + // only preprocessing has no previously converted step + if (previous.isEmpty()) { + queryStepBuilder.predecessors(List.of()); + } + // if interval packing takes place, fromTable and predecessors of the final concept step are already set + else if (queryStepBuilder.build().getFromTable() == null && queryStepBuilder.build().getPredecessors().isEmpty()) { + queryStepBuilder.fromTable(QueryStep.toTableLike(previous.get().getCteName())) + .predecessors(List.of(previous.get())); + } + return Optional.of(queryStepBuilder.build()); + } + + /** + * @return The {@link ConceptCteStep} this instance belongs to. + */ + protected abstract ConceptCteStep cteStep(); + + protected abstract QueryStep.QueryStepBuilder convertStep(ConceptCteContext conceptCteContext); + + private boolean isRequired(ConceptTables conceptTables) { + return conceptTables.isRequiredStep(cteStep()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCteContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCteContext.java new file mode 100644 index 0000000000..4661bf4271 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCteContext.java @@ -0,0 +1,45 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import com.bakdata.conquery.models.datasets.concepts.filters.Filter; +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.sql.conversion.Context; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import lombok.Builder; +import lombok.Value; +import lombok.With; +import org.jooq.Field; + +@Value +@Builder(toBuilder = true) +public class ConceptCteContext implements Context { + + ConversionContext conversionContext; + String conceptLabel; + Field primaryColumn; + Optional validityDate; + boolean isExcludedFromDateAggregation; + List selects; + List filters; + ConceptTables conceptTables; + @With + QueryStep previous; + + /** + * @return All concepts {@link SqlSelects} that are either required for {@link Filter}'s or {@link Select}'s. + */ + public Stream allConceptSelects() { + return Stream.concat( + getFilters().stream().map(ConceptFilter::getSelects), + getSelects().stream() + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCteStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCteStep.java new file mode 100644 index 0000000000..bc47aebc37 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptCteStep.java @@ -0,0 +1,38 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.HashSet; +import java.util.Set; + +import com.bakdata.conquery.sql.conversion.model.CteStep; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public enum ConceptCteStep implements CteStep { + + PREPROCESSING("_preprocessing", null), + EVENT_FILTER("_event_filter", PREPROCESSING), + AGGREGATION_SELECT("_group_select", EVENT_FILTER), + AGGREGATION_FILTER("_group_filter", AGGREGATION_SELECT), + FINAL("", AGGREGATION_FILTER); + + public static final Set MANDATORY_STEPS = Set.of(ConceptCteStep.PREPROCESSING, ConceptCteStep.AGGREGATION_SELECT, ConceptCteStep.FINAL); + + private final String suffix; + private final ConceptCteStep predecessor; + + public static Set withOptionalSteps(ConceptCteStep... conceptCteStep) { + HashSet steps = new HashSet<>(MANDATORY_STEPS); + steps.addAll(Set.of(conceptCteStep)); + return steps; + } + + @Override + public String cteName(String conceptLabel) { + return "concept_%s%s".formatted(conceptLabel, this.suffix); + } + + public ConceptCteStep predecessor() { + return this.predecessor; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptQueryStep.java deleted file mode 100644 index d0175c8523..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptQueryStep.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement.concept; - -import java.util.Collections; -import java.util.List; -import java.util.Optional; - -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; - -abstract class ConceptQueryStep { - - public Optional convert(StepContext context) { - if (!canConvert(context)) { - return Optional.empty(); - } - - QueryStep.QueryStepBuilder queryStepBuilder = this.convertStep(context).cteName(createCteName(context)); - - if (context.getPrevious() != null) { - queryStepBuilder.predecessors(List.of(context.getPrevious())) - .fromTable(QueryStep.toTableLike(context.getPrevious().getCteName())); - } - else { - queryStepBuilder.predecessors(Collections.emptyList()) - .fromTable(QueryStep.toTableLike(context.getTable().getConnector().getTable().getName())); - } - return Optional.of(queryStepBuilder.build()); - - } - - abstract boolean canConvert(StepContext stepContext); - - abstract QueryStep.QueryStepBuilder convertStep(StepContext stepContext); - - abstract String nameSuffix(); - - private String createCteName(StepContext stepContext) { - return "concept_%s%s".formatted(stepContext.getConceptLabel(), nameSuffix()); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptTables.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptTables.java new file mode 100644 index 0000000000..eac3683229 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/ConceptTables.java @@ -0,0 +1,63 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.jooq.DataType; +import org.jooq.Field; +import org.jooq.impl.DSL; + +public class ConceptTables { + + private final Map cteNames; + private final String rootTable; + + public ConceptTables(String conceptLabel, Set requiredSteps, String rootTableName) { + this.cteNames = requiredSteps.stream() + .collect(Collectors.toMap( + Function.identity(), + step -> step.cteName(conceptLabel) + )); + this.rootTable = rootTableName; + } + + public boolean isRequiredStep(ConceptCteStep conceptCteStep) { + return this.cteNames.containsKey(conceptCteStep); + } + + /** + * @return The CTE name for a {@link ConceptCteStep}. + */ + public String cteName(ConceptCteStep conceptCteStep) { + return this.cteNames.get(conceptCteStep); + } + + /** + * @return The name of the table this {@link ConceptCteStep} will select from. + */ + public String getPredecessorTableName(ConceptCteStep conceptCteStep) { + ConceptCteStep predecessor = conceptCteStep.predecessor(); + if (predecessor == null) { + return rootTable; + } + return this.cteNames.get(predecessor); + } + + /** + * Qualify a field for a {@link ConceptCteStep}. + *

+ * For example, if you want to qualify a {@link Field} for the AGGREGATION_SELECT step, + * it's qualified on the EVENT_FILTER or PREPROCESSING_STEP depending on the presence of the respective step. + * See {@link ConceptTables#getPredecessorTableName(ConceptCteStep)} + * + * @param conceptCteStep The {@link ConceptCteStep} you want to qualify the given field for. + * @param field The field you want to qualify. + */ + @SuppressWarnings("unchecked") + public Field qualifyOnPredecessorTableName(ConceptCteStep conceptCteStep, Field field) { + return DSL.field(DSL.name(getPredecessorTableName(conceptCteStep), field.getName()), (DataType) field.getDataType()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/DateRestrictionQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/DateRestrictionQueryStep.java deleted file mode 100644 index c41c4bea7c..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/DateRestrictionQueryStep.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement.concept; - -import java.util.List; -import java.util.Optional; - -import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import org.jooq.Condition; - -class DateRestrictionQueryStep extends ConceptQueryStep { - - @Override - public boolean canConvert(StepContext stepContext) { - return stepContext.getPreviousSelects().getDateRestrictionRange().isPresent(); - } - - @Override - public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { - ConceptSelects dateRestrictionSelects = this.prepareDateRestrictionSelects(stepContext); - Condition dateRestriction = stepContext.getSqlFunctions().dateRestriction( - stepContext.getPreviousSelects().getDateRestrictionRange().get(), - stepContext.getPreviousSelects().getValidityDate().get() - ); - - return QueryStep.builder() - .selects(dateRestrictionSelects) - .conditions(List.of(dateRestriction)); - } - - @Override - public String nameSuffix() { - return "_date_restriction"; - } - - private ConceptSelects prepareDateRestrictionSelects(final StepContext stepContext) { - ConceptSelects.ConceptSelectsBuilder selectsBuilder = stepContext.getPreviousSelects().toBuilder(); - selectsBuilder.dateRestrictionRange(Optional.empty()); - if (stepContext.getNode().isExcludeFromTimeAggregation()) { - selectsBuilder.validityDate(Optional.empty()); - } - return selectsBuilder.build(); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterCte.java new file mode 100644 index 0000000000..a6966bf94e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterCte.java @@ -0,0 +1,46 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.filter.FilterCondition; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import org.jooq.Condition; + +class EventFilterCte extends ConceptCte { + + @Override + public QueryStep.QueryStepBuilder convertStep(ConceptCteContext conceptCteContext) { + + Selects eventFilterSelects = Selects.qualified( + conceptCteContext.getConceptTables().getPredecessorTableName(ConceptCteStep.EVENT_FILTER), + conceptCteContext.getPrimaryColumn(), + conceptCteContext.getValidityDate(), + getForAggregationSelectStep(conceptCteContext) + ); + + List eventFilterConditions = conceptCteContext.getFilters().stream() + .flatMap(conceptFilter -> conceptFilter.getFilters().getEvent().stream()) + .map(FilterCondition::filterCondition) + .toList(); + + return QueryStep.builder() + .selects(eventFilterSelects) + .conditions(eventFilterConditions); + } + + private static List getForAggregationSelectStep(ConceptCteContext conceptCteContext) { + return conceptCteContext.allConceptSelects() + .flatMap(sqlSelects -> sqlSelects.getForAggregationSelectStep().stream()) + .distinct() + .collect(Collectors.toList()); + } + + @Override + public ConceptCteStep cteStep() { + return ConceptCteStep.EVENT_FILTER; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterQueryStep.java deleted file mode 100644 index 71b0b80a01..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventFilterQueryStep.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement.concept; - -import java.util.Collections; -import java.util.List; - -import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; -import org.jooq.Condition; - -public class EventFilterQueryStep extends ConceptQueryStep { - - private final FilterConverterService filterConverterService; - - public EventFilterQueryStep(FilterConverterService filterConverterService) { - this.filterConverterService = filterConverterService; - } - - @Override - public boolean canConvert(StepContext stepContext) { - return !stepContext.getTable().getFilters().isEmpty(); - } - - @Override - public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { - - ConceptSelects eventFilterSelects = stepContext.getPreviousSelects().withEventFilter(Collections.emptyList()); - List eventFilterConditions = stepContext.getTable().getFilters().stream() - .map(filterValue -> this.filterConverterService.convert(filterValue, stepContext.getContext())) - .toList(); - return QueryStep.builder().selects(eventFilterSelects).conditions(eventFilterConditions); - } - - @Override - public String nameSuffix() { - return "_event_filter"; - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventSelectStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventSelectStep.java deleted file mode 100644 index c0ceb15299..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/EventSelectStep.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement.concept; - -import java.util.List; -import java.util.stream.Stream; - -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.conversion.select.SelectConverterService; -import org.jooq.Field; - -class EventSelectStep extends ConceptQueryStep { - - private final SelectConverterService selectConverterService; - - EventSelectStep(SelectConverterService selectConverterService) { - this.selectConverterService = selectConverterService; - } - - @Override - public boolean canConvert(StepContext stepContext) { - return !stepContext.getTable().getSelects().isEmpty() || !stepContext.getNode().getSelects().isEmpty(); - } - - @Override - public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { - return QueryStep.builder().selects(stepContext.getPreviousSelects().withEventSelect(this.getEventSelects(stepContext))); - } - - @Override - public String nameSuffix() { - return "_event_select"; - } - - @SuppressWarnings("unchecked") - private List> getEventSelects(StepContext stepContext) { - return Stream.concat(stepContext.getTable().getSelects().stream(), stepContext.getNode().getSelects().stream()) - .map(select -> (Field) this.selectConverterService.convert(select, stepContext.getContext())) - .toList(); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptCte.java new file mode 100644 index 0000000000..e50c52ed89 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptCte.java @@ -0,0 +1,81 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.IntervalPackingContext; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.LogicalOperation; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.QueryStepJoiner; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.TableLike; + +class FinalConceptCte extends ConceptCte { + + @Override + protected QueryStep.QueryStepBuilder convertStep(ConceptCteContext conceptCteContext) { + + List forFinalStep = conceptCteContext.getSelects().stream() + .flatMap(sqlSelects -> sqlSelects.getForFinalStep().stream()) + .distinct() + .toList(); + + if (conceptCteContext.getValidityDate().isEmpty() || conceptCteContext.isExcludedFromDateAggregation()) { + Selects finalConceptSelects = new Selects(conceptCteContext.getPrimaryColumn(), Optional.empty(), forFinalStep); + return QueryStep.builder() + .selects(finalConceptSelects); + } + + return applyIntervalPacking(forFinalStep, conceptCteContext); + } + + @Override + protected ConceptCteStep cteStep() { + return ConceptCteStep.FINAL; + } + + private QueryStep.QueryStepBuilder applyIntervalPacking(List forFinalStep, ConceptCteContext conceptCteContext) { + + IntervalPackingContext intervalPackingContext = new IntervalPackingContext( + conceptCteContext.getConceptLabel(), + conceptCteContext.getPrimaryColumn(), + conceptCteContext.getValidityDate().get(), + conceptCteContext.getConceptTables() + ); + + QueryStep finalIntervalPackingStep = conceptCteContext.getConversionContext() + .getSqlDialect() + .getIntervalPacker() + .createIntervalPackingSteps(intervalPackingContext); + + return joinSelectsAndFiltersWithIntervalPackingStep(forFinalStep, finalIntervalPackingStep, conceptCteContext); + } + + private QueryStep.QueryStepBuilder joinSelectsAndFiltersWithIntervalPackingStep( + List forFinalStep, + QueryStep finalIntervalPackingStep, + ConceptCteContext conceptCteContext + ) { + QueryStep finalSelectsAndFilterStep = conceptCteContext.getPrevious(); + Field primaryColumn = finalSelectsAndFilterStep.getQualifiedSelects().getPrimaryColumn(); + Optional validityDate = Optional.of(finalIntervalPackingStep.getQualifiedSelects().getValidityDate().get()); + + TableLike joinedTable = QueryStepJoiner.constructJoinedTable( + List.of(finalSelectsAndFilterStep, finalIntervalPackingStep), + LogicalOperation.AND, + conceptCteContext.getConversionContext() + ); + + Selects finalConceptSelects = new Selects(primaryColumn, validityDate, forFinalStep); + + return QueryStep.builder() + .selects(finalConceptSelects) + .fromTable(joinedTable) + .predecessors(List.of(finalSelectsAndFilterStep, finalIntervalPackingStep)); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptQueryStep.java deleted file mode 100644 index a58e7e1c91..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/FinalConceptQueryStep.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement.concept; - -import com.bakdata.conquery.sql.conversion.context.selects.Selects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; - -class FinalConceptQueryStep extends ConceptQueryStep { - - @Override - public boolean canConvert(StepContext stepContext) { - return true; - } - - @Override - public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { - Selects finalSelects = stepContext.getPrevious().getQualifiedSelects(); - return QueryStep.builder().selects(finalSelects); - } - - @Override - public String nameSuffix() { - return ""; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingCte.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingCte.java new file mode 100644 index 0000000000..2c716d4546 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingCte.java @@ -0,0 +1,30 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept; + +import java.util.List; + +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; + +class PreprocessingCte extends ConceptCte { + + public QueryStep.QueryStepBuilder convertStep(ConceptCteContext conceptCteContext) { + + List forPreprocessing = conceptCteContext.allConceptSelects() + .flatMap(sqlSelects -> sqlSelects.getForPreprocessingStep().stream()) + .distinct() + .toList(); + + Selects preprocessingSelects = new Selects(conceptCteContext.getPrimaryColumn(), conceptCteContext.getValidityDate(), forPreprocessing); + + return QueryStep.builder() + .selects(preprocessingSelects) + .fromTable(QueryStep.toTableLike(conceptCteContext.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING))); + } + + @Override + public ConceptCteStep cteStep() { + return ConceptCteStep.PREPROCESSING; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingQueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingQueryStep.java deleted file mode 100644 index ce6b9875e4..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/PreprocessingQueryStep.java +++ /dev/null @@ -1,107 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement.concept; - -import java.util.Collections; -import java.util.List; -import java.util.Optional; - -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.models.ColumnDateRange; -import org.jooq.Field; -import org.jooq.impl.DSL; - -class PreprocessingQueryStep extends ConceptQueryStep { - - public boolean canConvert(StepContext stepContext) { - // We always apply preprocessing to select the required columns - return true; - } - - public QueryStep.QueryStepBuilder convertStep(StepContext stepContext) { - - CQTable table = stepContext.getTable(); - ConceptSelects.ConceptSelectsBuilder selectsBuilder = ConceptSelects.builder(); - - selectsBuilder.primaryColumn(DSL.field(DSL.name(stepContext.getContext().getConfig().getPrimaryColumn()))) - .dateRestrictionRange(this.getDateRestrictionSelect(stepContext)) - .validityDate(this.getValidityDateSelect(stepContext)); - - List> conceptSelectFields = this.getColumnSelectReferences(table); - List> conceptFilterFields = this.getColumnFilterReferences(table); - - // deduplicate because a concepts selects and filters can require the same columns - // and selecting the same columns several times will cause SQL errors - List> deduplicatedFilterFields = conceptFilterFields.stream() - .filter(field -> !conceptSelectFields.contains(field)) - .toList(); - - selectsBuilder.eventSelect(conceptSelectFields). - eventFilter(deduplicatedFilterFields); - - // not part of preprocessing yet - selectsBuilder.groupSelect(Collections.emptyList()) - .groupFilter(Collections.emptyList()); - - return QueryStep.builder() - .selects(selectsBuilder.build()) - .conditions(Collections.emptyList()) - .predecessors(Collections.emptyList()); - } - - @Override - public String nameSuffix() { - return "_preprocessing"; - } - - private Optional getDateRestrictionSelect(final StepContext stepContext) { - if (!stepContext.getContext().dateRestrictionActive() || !this.tableHasValidityDates(stepContext.getTable())) { - return Optional.empty(); - } - ColumnDateRange dateRestriction = stepContext.getContext().getSqlDialect().getFunction().daterange(stepContext.getContext().getDateRestrictionRange()); - return Optional.of(dateRestriction); - } - - private Optional getValidityDateSelect(final StepContext stepContext) { - if (!this.validityDateIsRequired(stepContext)) { - return Optional.empty(); - } - return Optional.of(stepContext.getSqlFunctions().daterange(stepContext.getTable().findValidityDate(), stepContext.getConceptLabel())); - } - - /** - * @return True, if a date restriction is active and the node is not excluded from time aggregation - * OR there is no date restriction, but still existing validity dates which are included in time aggregation. - */ - private boolean validityDateIsRequired(final StepContext stepContext) { - return this.tableHasValidityDates(stepContext.getTable()) - && !stepContext.getNode().isExcludeFromTimeAggregation(); - } - - private boolean tableHasValidityDates(CQTable table) { - return !table.getConnector() - .getValidityDates() - .isEmpty(); - } - - private List> getColumnSelectReferences(CQTable table) { - return table.getSelects().stream() - .flatMap(select -> select.getRequiredColumns().stream().map(column -> this.mapColumnOntoTable(column, table))) - .toList(); - } - - private List> getColumnFilterReferences(CQTable table) { - return table.getFilters().stream() - .map(FilterValue::getFilter) - .flatMap(filter -> filter.getRequiredColumns().stream().map(column -> this.mapColumnOntoTable(column, table))) - .toList(); - } - - - private Field mapColumnOntoTable(Column column, CQTable table) { - return DSL.field(DSL.name(table.getConnector().getTable().getName(), column.getName()));} - - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/StepContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/StepContext.java deleted file mode 100644 index 92f242c5bc..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/StepContext.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.bakdata.conquery.sql.conversion.cqelement.concept; - -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.context.selects.ConceptSelects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; -import lombok.Builder; -import lombok.Value; - -@Value -@Builder(toBuilder = true) -class StepContext { - ConversionContext context; - SqlFunctionProvider sqlFunctions; - CQConcept node; - CQTable table; - String conceptLabel; - QueryStep previous; - ConceptSelects previousSelects; -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/BigMultiSelectFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/BigMultiSelectFilterConverter.java new file mode 100644 index 0000000000..c51d0f8ee3 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/BigMultiSelectFilterConverter.java @@ -0,0 +1,26 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.Set; + +import com.bakdata.conquery.models.datasets.concepts.filters.specific.BigMultiSelectFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; + +public class BigMultiSelectFilterConverter implements FilterConverter { + + @Override + public ConceptFilter convert(BigMultiSelectFilter bigMultiSelectFilter, FilterContext context) { + return SelectFilterUtil.convert(bigMultiSelectFilter, context, context.getValue()); + } + + @Override + public Set requiredSteps() { + return ConceptCteStep.withOptionalSteps(ConceptCteStep.EVENT_FILTER); + } + + @Override + public Class getConversionClass() { + return BigMultiSelectFilter.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/CountFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/CountFilterConverter.java new file mode 100644 index 0000000000..ff66ee5073 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/CountFilterConverter.java @@ -0,0 +1,56 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.List; +import java.util.Set; + +import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.datasets.concepts.filters.specific.CountFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; +import com.bakdata.conquery.sql.conversion.model.filter.CountCondition; +import com.bakdata.conquery.sql.conversion.model.filter.Filters; +import com.bakdata.conquery.sql.conversion.model.select.CountSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import org.jooq.Field; + +public class CountFilterConverter implements FilterConverter { + + @Override + public ConceptFilter convert(CountFilter countFilter, FilterContext context) { + + SqlSelect rootSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + countFilter.getColumn().getName(), + Object.class + ); + + Field qualifiedRootSelect = context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased()); + CountSqlSelect countSqlSelect = new CountSqlSelect(qualifiedRootSelect, countFilter.getName(), CountSqlSelect.CountType.fromBoolean(countFilter.isDistinct())); + + Field qualifiedCountGroupBy = context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_FILTER, countSqlSelect.aliased()); + CountCondition countFilterCondition = new CountCondition(qualifiedCountGroupBy, context.getValue()); + + return new ConceptFilter( + SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(countSqlSelect)) + .build(), + Filters.builder() + .group(List.of(countFilterCondition)) + .build() + ); + } + + @Override + public Set requiredSteps() { + return ConceptCteStep.withOptionalSteps(ConceptCteStep.AGGREGATION_FILTER); + } + + @Override + public Class getConversionClass() { + return CountFilter.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/DateDistanceFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/DateDistanceFilterConverter.java new file mode 100644 index 0000000000..226ece0185 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/DateDistanceFilterConverter.java @@ -0,0 +1,63 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.List; +import java.util.Set; + +import com.bakdata.conquery.models.common.Range; +import com.bakdata.conquery.models.datasets.concepts.filters.specific.DateDistanceFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; +import com.bakdata.conquery.sql.conversion.model.filter.DateDistanceCondition; +import com.bakdata.conquery.sql.conversion.model.filter.FilterCondition; +import com.bakdata.conquery.sql.conversion.model.filter.Filters; +import com.bakdata.conquery.sql.conversion.model.select.DateDistanceSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; + +public class DateDistanceFilterConverter implements FilterConverter { + + private final DateNowSupplier dateNowSupplier; + + public DateDistanceFilterConverter(DateNowSupplier dateNowSupplier) { + this.dateNowSupplier = dateNowSupplier; + } + + @Override + public ConceptFilter convert(DateDistanceFilter dateDistanceFilter, FilterContext context) { + + DateDistanceSqlSelect dateDistanceSqlSelect = new DateDistanceSqlSelect( + dateNowSupplier, + dateDistanceFilter.getTimeUnit(), + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + dateDistanceFilter.getColumn(), + dateDistanceFilter.getName(), + context.getParentContext().getDateRestrictionRange(), + context.getParentContext().getSqlDialect().getFunctionProvider() + ); + + FilterCondition dateDistanceCondition = new DateDistanceCondition( + context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.EVENT_FILTER, dateDistanceSqlSelect.aliased()), + context.getValue() + ); + + return new ConceptFilter( + SqlSelects.builder() + .forPreprocessingStep(List.of(dateDistanceSqlSelect)) + .build(), + Filters.builder() + .event(List.of(dateDistanceCondition)) + .build() + ); + } + + @Override + public Set requiredSteps() { + return ConceptCteStep.withOptionalSteps(ConceptCteStep.EVENT_FILTER); + } + + @Override + public Class getConversionClass() { + return DateDistanceFilter.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterContext.java new file mode 100644 index 0000000000..3284bd2f50 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterContext.java @@ -0,0 +1,16 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import com.bakdata.conquery.sql.conversion.Context; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptTables; +import lombok.Value; + +@Value +public class FilterContext implements Context { + /** + * Filter Value + */ + V value; + ConversionContext parentContext; + ConceptTables conceptTables; +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterConversions.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterConversions.java new file mode 100644 index 0000000000..9ab2bbe9e0 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterConversions.java @@ -0,0 +1,15 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.filters.Filter; +import com.bakdata.conquery.sql.conversion.Conversions; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; + +public class FilterConversions extends Conversions, ConceptFilter, FilterContext> { + + public FilterConversions(List> converters) { + super(converters); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterConverter.java new file mode 100644 index 0000000000..da66ce9035 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterConverter.java @@ -0,0 +1,9 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import com.bakdata.conquery.models.datasets.concepts.filters.Filter; +import com.bakdata.conquery.sql.conversion.Converter; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; + +public interface FilterConverter> extends Converter> { + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterValueConversions.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterValueConversions.java new file mode 100644 index 0000000000..3733a3e31c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/FilterValueConversions.java @@ -0,0 +1,38 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.Set; + +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptTables; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; + +public class FilterValueConversions { + private final FilterConversions filterConversions; + + public FilterValueConversions(FilterConversions filterConversions) { + this.filterConversions = filterConversions; + } + + public ConceptFilter convert(FilterValue filterValue, ConversionContext context, ConceptTables conceptTables) { + ConceptFilter + convert = + this.filterConversions.convert(filterValue.getFilter(), new FilterContext<>(filterValue.getValue(), context, conceptTables)); + if (context.isNegation()) { + return new ConceptFilter(convert.getSelects(), convert.getFilters().negated()); + } + return convert; + } + + public Set requiredSteps(FilterValue filterValue) { + return this.filterConversions.getConverters().stream() + .filter(converter -> converter.getConversionClass().isInstance(filterValue.getFilter())) + .findFirst() + .orElseThrow(() -> new RuntimeException( + "Could not find a matching converter for filter %s. Converters: %s".formatted(filterValue.getFilter(), this.filterConversions.getConverters())) + ) + .requiredSteps(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/MultiSelectFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/MultiSelectFilterConverter.java new file mode 100644 index 0000000000..7cca2ba098 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/MultiSelectFilterConverter.java @@ -0,0 +1,26 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.Set; + +import com.bakdata.conquery.models.datasets.concepts.filters.specific.MultiSelectFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; + +public class MultiSelectFilterConverter implements FilterConverter { + + @Override + public ConceptFilter convert(MultiSelectFilter multiSelectFilter, FilterContext context) { + return SelectFilterUtil.convert(multiSelectFilter, context, context.getValue()); + } + + @Override + public Set requiredSteps() { + return ConceptCteStep.withOptionalSteps(ConceptCteStep.EVENT_FILTER); + } + + @Override + public Class getConversionClass() { + return MultiSelectFilter.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/NumberFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/NumberFilterConverter.java new file mode 100644 index 0000000000..278370caa2 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/NumberFilterConverter.java @@ -0,0 +1,56 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.List; +import java.util.Set; + +import com.bakdata.conquery.models.common.IRange; +import com.bakdata.conquery.models.datasets.concepts.filters.specific.NumberFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; +import com.bakdata.conquery.sql.conversion.model.filter.Filters; +import com.bakdata.conquery.sql.conversion.model.filter.NumberCondition; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; + +public class NumberFilterConverter implements FilterConverter, NumberFilter>> { + + private static final Class CLASS = NumberFilter.class; + + @Override + public ConceptFilter convert(NumberFilter> numberFilter, FilterContext> context) { + + Class numberClass = NumberMapUtil.NUMBER_MAP.get(numberFilter.getColumn().getType()); + + ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + numberFilter.getColumn().getName(), + numberClass + ); + + NumberCondition condition = new NumberCondition( + context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.EVENT_FILTER, rootSelect.aliased()), + context.getValue() + ); + + return new ConceptFilter( + SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .build(), + Filters.builder() + .event(List.of(condition)) + .build() + ); + } + + @Override + public Set requiredSteps() { + return ConceptCteStep.withOptionalSteps(ConceptCteStep.EVENT_FILTER); + } + + @Override + @SuppressWarnings("unchecked") + public Class>> getConversionClass() { + return (Class>>) CLASS; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/NumberMapUtil.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/NumberMapUtil.java new file mode 100644 index 0000000000..5d15745432 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/NumberMapUtil.java @@ -0,0 +1,15 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.Map; + +import com.bakdata.conquery.models.events.MajorTypeId; + +public class NumberMapUtil { + + public static final Map> NUMBER_MAP = Map.of( + MajorTypeId.DECIMAL, Double.class, + MajorTypeId.REAL, Double.class, + MajorTypeId.INTEGER, Integer.class + ); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SelectFilterUtil.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SelectFilterUtil.java new file mode 100644 index 0000000000..b0e430890c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SelectFilterUtil.java @@ -0,0 +1,41 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; +import com.bakdata.conquery.sql.conversion.model.filter.FilterCondition; +import com.bakdata.conquery.sql.conversion.model.filter.Filters; +import com.bakdata.conquery.sql.conversion.model.filter.MultiSelectCondition; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; + +class SelectFilterUtil { + + public static ConceptFilter convert(SelectFilter selectFilter, FilterContext context, String[] values) { + + SqlSelect rootSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + selectFilter.getColumn().getName(), + String.class + ); + + FilterCondition condition = new MultiSelectCondition( + context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.EVENT_FILTER, rootSelect.aliased()), + values, + context.getParentContext().getSqlDialect().getFunctionProvider() + ); + + return new ConceptFilter( + SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .build(), + Filters.builder() + .event(List.of(condition)) + .build() + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SingleSelectFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SingleSelectFilterConverter.java new file mode 100644 index 0000000000..451aa33944 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SingleSelectFilterConverter.java @@ -0,0 +1,26 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.Set; + +import com.bakdata.conquery.models.datasets.concepts.filters.specific.SingleSelectFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; + +public class SingleSelectFilterConverter implements FilterConverter { + + @Override + public ConceptFilter convert(SingleSelectFilter singleSelect, FilterContext context) { + return SelectFilterUtil.convert(singleSelect, context, new String[]{context.getValue()}); + } + + @Override + public Set requiredSteps() { + return ConceptCteStep.withOptionalSteps(ConceptCteStep.EVENT_FILTER); + } + + @Override + public Class getConversionClass() { + return SingleSelectFilter.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SumFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SumFilterConverter.java new file mode 100644 index 0000000000..9dfa7c5a72 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/filter/SumFilterConverter.java @@ -0,0 +1,62 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.filter; + +import java.util.List; +import java.util.Set; + +import com.bakdata.conquery.models.common.IRange; +import com.bakdata.conquery.models.datasets.concepts.filters.specific.SumFilter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.filter.ConceptFilter; +import com.bakdata.conquery.sql.conversion.model.filter.Filters; +import com.bakdata.conquery.sql.conversion.model.filter.SumCondition; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import com.bakdata.conquery.sql.conversion.model.select.SumSqlSelect; +import org.jooq.Field; + +public class SumFilterConverter implements FilterConverter, SumFilter>> { + + private static final Class CLASS = SumFilter.class; + + @Override + public ConceptFilter convert(SumFilter> sumFilter, FilterContext> context) { + + // TODO(tm): convert getSubtractColumn and getDistinctByColumn + Class numberClass = NumberMapUtil.NUMBER_MAP.get(sumFilter.getColumn().getType()); + ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + sumFilter.getColumn().getName(), + numberClass + ); + + Field qualifiedRootSelect = context.getConceptTables() + .qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased()); + SumSqlSelect sumSqlSelect = new SumSqlSelect(qualifiedRootSelect, sumFilter.getName()); + + Field qualifiedSumGroupBy = context.getConceptTables() + .qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_FILTER, sumSqlSelect.aliased()); + SumCondition sumFilterCondition = new SumCondition(qualifiedSumGroupBy, context.getValue()); + + return new ConceptFilter( + SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(sumSqlSelect)) + .build(), + Filters.builder() + .group(List.of(sumFilterCondition)) + .build() + ); + } + + @Override + public Set requiredSteps() { + return ConceptCteStep.withOptionalSteps(ConceptCteStep.AGGREGATION_FILTER); + } + + @Override + @SuppressWarnings("unchecked") + public Class>> getConversionClass() { + return (Class>>) CLASS; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/CountSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/CountSelectConverter.java new file mode 100644 index 0000000000..51fe5430c2 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/CountSelectConverter.java @@ -0,0 +1,45 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.CountSelect; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.select.CountSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import org.jooq.Field; + +public class CountSelectConverter implements SelectConverter { + + @Override + public SqlSelects convert(CountSelect countSelect, SelectContext context) { + + SqlSelect rootSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + countSelect.getColumn().getName(), + Object.class + ); + + Field qualifiedRootSelect = context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased()); + CountSqlSelect countSqlSelect = new CountSqlSelect(qualifiedRootSelect, countSelect.getName(), CountSqlSelect.CountType.fromBoolean(countSelect.isDistinct())); + + ExtractingSqlSelect finalSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.FINAL), + countSqlSelect.aliased().getName(), + Integer.class + ); + + return SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(countSqlSelect)) + .forFinalStep(List.of(finalSelect)) + .build(); + } + + @Override + public Class getConversionClass() { + return CountSelect.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/DateDistanceSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/DateDistanceSelectConverter.java new file mode 100644 index 0000000000..b5c0c5707d --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/DateDistanceSelectConverter.java @@ -0,0 +1,59 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.DateDistanceSelect; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.select.DateDistanceSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.MinSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; +import org.jooq.Field; + +public class DateDistanceSelectConverter implements SelectConverter { + + private final DateNowSupplier dateNowSupplier; + + public DateDistanceSelectConverter(DateNowSupplier dateNowSupplier) { + this.dateNowSupplier = dateNowSupplier; + } + + @Override + public SqlSelects convert(DateDistanceSelect dateDistanceSelect, SelectContext context) { + + + SqlSelect rootSelect = new DateDistanceSqlSelect( + dateNowSupplier, + dateDistanceSelect.getTimeUnit(), context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + dateDistanceSelect.getColumn(), + dateDistanceSelect.getName(), + context.getParentContext().getDateRestrictionRange(), + context.getParentContext().getSqlDialect().getFunctionProvider() + ); + + Field + qualifiedDateDistance = + context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased()); + MinSqlSelect minDateDistance = new MinSqlSelect(qualifiedDateDistance, dateDistanceSelect.getName()); + + ExtractingSqlSelect firstValueReference = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.FINAL), + minDateDistance.aliased().getName(), + Object.class + ); + + return SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(minDateDistance)) + .forFinalStep(List.of(firstValueReference)) + .build(); + } + + @Override + public Class getConversionClass() { + return DateDistanceSelect.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/ExistsSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/ExistsSelectConverter.java new file mode 100644 index 0000000000..4c0cc0777e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/ExistsSelectConverter.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.concept.specific.ExistsSelect; +import com.bakdata.conquery.sql.conversion.model.select.ExistsSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; + +public class ExistsSelectConverter implements SelectConverter { + + @Override + public SqlSelects convert(ExistsSelect convert, SelectContext context) { + return SqlSelects.builder() + .forFinalStep(List.of(new ExistsSqlSelect(context.getLabel()))) + .build(); + } + + @Override + public Class getConversionClass() { + return ExistsSelect.class; + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/FirstValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/FirstValueSelectConverter.java new file mode 100644 index 0000000000..0bd17c4ed7 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/FirstValueSelectConverter.java @@ -0,0 +1,59 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.Collections; +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.connector.FirstValueSelect; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.FirstValueSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import org.jooq.Field; + +public class FirstValueSelectConverter implements SelectConverter { + + @Override + public SqlSelects convert(FirstValueSelect firstSelect, SelectContext context) { + + ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + firstSelect.getColumn().getName(), + Object.class + ); + + List> validityDateFields = context.getValidityDate() + .map(validityDate -> validityDate.qualify(context.getConceptTables() + .getPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT))) + .map(ColumnDateRange::toFields) + .orElse(Collections.emptyList()); + + FirstValueSqlSelect firstValueSqlSelect = + FirstValueSqlSelect.builder() + .firstColumn(context.getConceptTables() + .qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased())) + .alias(firstSelect.getName()) + .orderByColumns(validityDateFields) + .functionProvider(context.getParentContext().getSqlDialect().getFunctionProvider()) + .build(); + + + ExtractingSqlSelect finalSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.FINAL), + firstValueSqlSelect.aliased().getName(), + Object.class + ); + + return SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(firstValueSqlSelect)) + .forFinalStep(List.of(finalSelect)) + .build(); + } + + @Override + public Class getConversionClass() { + return FirstValueSelect.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/LastValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/LastValueSelectConverter.java new file mode 100644 index 0000000000..2f4d9e1148 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/LastValueSelectConverter.java @@ -0,0 +1,57 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.Collections; +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.connector.LastValueSelect; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.LastValueSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import org.jooq.Field; + +public class LastValueSelectConverter implements SelectConverter { + + @Override + public SqlSelects convert(LastValueSelect lastSelect, SelectContext context) { + + String rootTableName = context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING); + String columName = lastSelect.getColumn().getName(); + SqlSelect rootSelect = new ExtractingSqlSelect<>(rootTableName, columName, Object.class); + + List> validityDateFields = context.getValidityDate() + .map(validityDate -> validityDate.qualify(context.getConceptTables() + .getPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT))) + .map(ColumnDateRange::toFields) + .orElse(Collections.emptyList()); + + Field qualifiedRootSelect = context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased()); + String alias = lastSelect.getName(); + SqlSelect lastValueSqlSelect = LastValueSqlSelect.builder() + .lastColumn(qualifiedRootSelect) + .alias(alias) + .orderByColumns(validityDateFields) + .functionProvider(context.getParentContext().getSqlDialect().getFunctionProvider()) + .build(); + + ExtractingSqlSelect finalSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.FINAL), + lastValueSqlSelect.aliased().getName(), + Object.class + ); + + return SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(lastValueSqlSelect)) + .forFinalStep(List.of(finalSelect)) + .build(); + } + + @Override + public Class getConversionClass() { + return LastValueSelect.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/RandomValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/RandomValueSelectConverter.java new file mode 100644 index 0000000000..135e3f2fa4 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/RandomValueSelectConverter.java @@ -0,0 +1,49 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.connector.RandomValueSelect; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.RandomValueSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import org.jooq.Field; + +public class RandomValueSelectConverter implements SelectConverter { + + @Override + public SqlSelects convert(RandomValueSelect randomSelect, SelectContext context) { + + String rootTableName = context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING); + String columnName = randomSelect.getColumn().getName(); + SqlSelect rootSelect = new ExtractingSqlSelect<>(rootTableName, columnName, Object.class); + + Field qualifiedRootSelect = context.getConceptTables().qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased()); + String alias = randomSelect.getName(); + SqlSelect randomValueSqlSelect = RandomValueSqlSelect.builder() + .randomColumn(qualifiedRootSelect) + .alias(alias) + .functionProvider(context.getParentContext().getSqlDialect().getFunctionProvider()) + .build(); + + ExtractingSqlSelect finalSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.FINAL), + randomValueSqlSelect.aliased().getName(), + Object.class + ); + + return SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(randomValueSqlSelect)) + .forFinalStep(List.of(finalSelect)) + .build(); + } + + @Override + public Class getConversionClass() { + return RandomValueSelect.class; + } + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectContext.java new file mode 100644 index 0000000000..b3d28ace55 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectContext.java @@ -0,0 +1,19 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.Optional; + +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.sql.conversion.Context; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptTables; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import lombok.Value; + +@Value +public class SelectContext implements Context { + ConversionContext parentContext; + CQConcept concept; + String label; + Optional validityDate; + ConceptTables conceptTables; +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectConversions.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectConversions.java new file mode 100644 index 0000000000..8395d79230 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectConversions.java @@ -0,0 +1,15 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.sql.conversion.Conversions; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; + +public class SelectConversions extends Conversions { + + public SelectConversions(List> converters) { + super(converters); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectConverter.java similarity index 69% rename from backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverter.java rename to backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectConverter.java index 1ec492c90b..258f45a9ab 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SelectConverter.java @@ -1,14 +1,14 @@ -package com.bakdata.conquery.sql.conversion.select; +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.sql.conversion.Converter; -import org.jooq.Field; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; /** * Converts a {@link com.bakdata.conquery.models.datasets.concepts.select.Select} to a field for a SQL SELECT statement. * * @param The type of Select this converter is responsible for. */ -public interface SelectConverter extends Converter> { +public interface SelectConverter extends Converter { } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SumSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SumSelectConverter.java new file mode 100644 index 0000000000..0dea645cc4 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/select/SumSelectConverter.java @@ -0,0 +1,48 @@ +package com.bakdata.conquery.sql.conversion.cqelement.concept.select; + +import java.util.List; + +import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.SumSelect; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.NumberMapUtil; +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import com.bakdata.conquery.sql.conversion.model.select.SumSqlSelect; +import org.jooq.Field; + +public class SumSelectConverter implements SelectConverter { + + @Override + public SqlSelects convert(SumSelect sumSelect, SelectContext context) { + + Class numberClass = NumberMapUtil.NUMBER_MAP.get(sumSelect.getColumn().getType()); + + ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.PREPROCESSING), + sumSelect.getColumn().getName(), + numberClass + ); + + Field qualifiedRootSelect = context.getConceptTables() + .qualifyOnPredecessorTableName(ConceptCteStep.AGGREGATION_SELECT, rootSelect.aliased()); + SumSqlSelect sumGroupBy = new SumSqlSelect(qualifiedRootSelect, sumSelect.getName()); + + ExtractingSqlSelect finalSelect = new ExtractingSqlSelect<>( + context.getConceptTables().getPredecessorTableName(ConceptCteStep.FINAL), + sumGroupBy.aliased().getName(), + numberClass + ); + + return SqlSelects.builder() + .forPreprocessingStep(List.of(rootSelect)) + .forAggregationSelectStep(List.of(sumGroupBy)) + .forFinalStep(List.of(finalSelect)) + .build(); + } + + @Override + public Class getConversionClass() { + return SumSelect.class; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/AnsiSqlIntervalPacker.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/AnsiSqlIntervalPacker.java new file mode 100644 index 0000000000..83b8b47536 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/AnsiSqlIntervalPacker.java @@ -0,0 +1,128 @@ +package com.bakdata.conquery.sql.conversion.cqelement.intervalpacking; + +import java.math.BigDecimal; +import java.sql.Date; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.dialect.IntervalPacker; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QualifyingUtil; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import com.bakdata.conquery.sql.conversion.model.select.FieldWrapper; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +public class AnsiSqlIntervalPacker implements IntervalPacker { + + public QueryStep createIntervalPackingSteps(IntervalPackingContext context) { + QueryStep previousEndStep = createPreviousEndStep(context); + QueryStep rangeIndexStep = createRangeIndexStep(previousEndStep, context); + QueryStep intervalCompleteStep = createIntervalCompleteStep(rangeIndexStep, context); + return intervalCompleteStep; + } + + private QueryStep createPreviousEndStep(IntervalPackingContext context) { + + String sourceTableName = context.getIntervalPackingTables().getValidityDateSourceTableName(); + Field primaryColumn = QualifyingUtil.qualify(context.getPrimaryColumn(), sourceTableName); + ColumnDateRange validityDate = context.getValidityDate().qualify(sourceTableName); + + Field previousEnd = DSL.max(validityDate.getEnd()) + .over(DSL.partitionBy(primaryColumn) + .orderBy(validityDate.getStart(), validityDate.getEnd()) + .rowsBetweenUnboundedPreceding() + .andPreceding(1)) + .as(IntervalPacker.PREVIOUS_END_FIELD_NAME); + + ArrayList qualifiedSelects = new ArrayList<>(QualifyingUtil.qualify(context.getCarryThroughSelects(), sourceTableName)); + qualifiedSelects.add(new FieldWrapper(previousEnd)); + + Selects previousEndSelects = new Selects( + primaryColumn, + Optional.of(validityDate), + qualifiedSelects + ); + + return QueryStep.builder() + .cteName(context.getIntervalPackingTables().cteName(IntervalPackingCteStep.PREVIOUS_END)) + .selects(previousEndSelects) + .fromTable(QueryStep.toTableLike(sourceTableName)) + .predecessors(context.getPredecessor() == null ? Collections.emptyList() : List.of(context.getPredecessor())) + .build(); + } + + private QueryStep createRangeIndexStep(QueryStep previousEndStep, IntervalPackingContext context) { + + String previousEndCteName = previousEndStep.getCteName(); + Selects previousEndSelects = previousEndStep.getQualifiedSelects(); + Field primaryColumn = previousEndSelects.getPrimaryColumn(); + ColumnDateRange validityDate = previousEndSelects.getValidityDate().get(); + Field previousEnd = DSL.field(DSL.name(previousEndCteName, IntervalPacker.PREVIOUS_END_FIELD_NAME), Date.class); + + Field rangeIndex = + DSL.sum( + DSL.when(validityDate.getStart().greaterThan(previousEnd), DSL.val(1)) + .otherwise(DSL.inline(null, Integer.class))) + .over(DSL.partitionBy(primaryColumn) + .orderBy(validityDate.getStart(), validityDate.getEnd()) + .rowsUnboundedPreceding()) + .as(IntervalPacker.RANGE_INDEX_FIELD_NAME); + + ArrayList qualifiedSelects = new ArrayList<>(QualifyingUtil.qualify(context.getCarryThroughSelects(), previousEndCteName)); + qualifiedSelects.add(new FieldWrapper(rangeIndex)); + + Selects rangeIndexSelects = new Selects( + primaryColumn, + Optional.of(validityDate), + qualifiedSelects + ); + + return QueryStep.builder() + .cteName(context.getIntervalPackingTables().cteName(IntervalPackingCteStep.RANGE_INDEX)) + .selects(rangeIndexSelects) + .fromTable(QueryStep.toTableLike(previousEndCteName)) + .predecessors(List.of(previousEndStep)) + .build(); + } + + private QueryStep createIntervalCompleteStep(QueryStep rangeIndexStep, IntervalPackingContext context) { + + String rangeIndexCteName = rangeIndexStep.getCteName(); + Selects rangeIndexSelects = rangeIndexStep.getQualifiedSelects(); + Field primaryColumn = rangeIndexSelects.getPrimaryColumn(); + ColumnDateRange validityDate = rangeIndexSelects.getValidityDate().get(); + + Field rangeStart = DSL.min(validityDate.getStart()).as(IntervalPacker.RANGE_START_MIN_FIELD_NAME); + Field rangeEnd = DSL.max(validityDate.getEnd()).as(IntervalPacker.RANGE_END_MAX_FIELD_NAME); + Field rangeIndex = DSL.field(DSL.name(rangeIndexCteName, IntervalPacker.RANGE_INDEX_FIELD_NAME), BigDecimal.class); + + List qualifiedSelects = QualifyingUtil.qualify(context.getCarryThroughSelects(), rangeIndexCteName); + Selects intervalCompleteSelects = new Selects( + primaryColumn, + Optional.of(ColumnDateRange.of(rangeStart, rangeEnd)), + qualifiedSelects + ); + + // we group range start and end by range index + List> groupBySelects = new ArrayList<>(); + groupBySelects.add(primaryColumn); + groupBySelects.add(rangeIndex); + qualifiedSelects.stream().map(SqlSelect::select).forEach(groupBySelects::add); + + return QueryStep.builder() + .cteName(context.getIntervalPackingTables().cteName(IntervalPackingCteStep.INTERVAL_COMPLETE)) + .selects(intervalCompleteSelects) + .fromTable(QueryStep.toTableLike(rangeIndexCteName)) + .predecessors(List.of(rangeIndexStep)) + .groupBy(groupBySelects) + .build(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingContext.java new file mode 100644 index 0000000000..8785e4b94e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingContext.java @@ -0,0 +1,63 @@ +package com.bakdata.conquery.sql.conversion.cqelement.intervalpacking; + +import java.util.Collections; +import java.util.List; + +import javax.annotation.CheckForNull; + +import com.bakdata.conquery.sql.conversion.Context; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptTables; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.Value; +import org.jooq.Field; + +@Value +public class IntervalPackingContext implements Context { + + String nodeLabel; + Field primaryColumn; + ColumnDateRange validityDate; + QueryStep predecessor; + IntervalPackingTables intervalPackingTables; + List carryThroughSelects; + + public IntervalPackingContext( + String conceptLabel, + Field primaryColumn, + ColumnDateRange validityDate, + ConceptTables conceptTables + ) { + this.nodeLabel = conceptLabel; + this.primaryColumn = primaryColumn; + this.validityDate = validityDate; + this.predecessor = null; // we don't need a predecessor because the interval packing steps will be joined with the other concept steps + this.intervalPackingTables = IntervalPackingTables.forConcept(conceptLabel, conceptTables); + this.carryThroughSelects = Collections.emptyList(); + } + + /** + * @param nodeLabel A unique CTE label which will be suffixed with the interval packing CTE names. + * @param predeceasingStep The predeceasing step containing the validity date which should be interval-packed. + * @param carryThroughSelects The selects you want to carry through all interval packing steps. They won't get touched besides qualifying. + */ + public IntervalPackingContext( + String nodeLabel, + QueryStep predeceasingStep, + List carryThroughSelects + ) { + this.nodeLabel = nodeLabel; + this.primaryColumn = predeceasingStep.getSelects().getPrimaryColumn(); + this.validityDate = predeceasingStep.getSelects().getValidityDate().get(); + this.predecessor = predeceasingStep; + this.carryThroughSelects = carryThroughSelects; + this.intervalPackingTables = IntervalPackingTables.forGenericQueryStep(nodeLabel, predeceasingStep); + } + + @CheckForNull + public QueryStep getPredecessor() { + return predecessor; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingCteStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingCteStep.java new file mode 100644 index 0000000000..71740fe1b8 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingCteStep.java @@ -0,0 +1,19 @@ +package com.bakdata.conquery.sql.conversion.cqelement.intervalpacking; + +import com.bakdata.conquery.sql.conversion.model.CteStep; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +enum IntervalPackingCteStep implements CteStep { + + PREVIOUS_END("_previous_end"), + RANGE_INDEX("_range_index"), + INTERVAL_COMPLETE("_interval_complete"); + + private final String suffix; + + public String cteName(String nodeLabel) { + return "%s%s".formatted(nodeLabel, this.suffix); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingTables.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingTables.java new file mode 100644 index 0000000000..71ca0a229f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/IntervalPackingTables.java @@ -0,0 +1,49 @@ +package com.bakdata.conquery.sql.conversion.cqelement.intervalpacking; + +import java.util.Arrays; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; +import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptTables; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Getter; + +@AllArgsConstructor(access = AccessLevel.PRIVATE) +class IntervalPackingTables { + + @Getter + private final String validityDateSourceTableName; + private final Map cteNames; + + public static IntervalPackingTables forConcept(String nodeLabel, ConceptTables conceptTables) { + Map cteNames = Arrays.stream(IntervalPackingCteStep.values()) + .collect(Collectors.toMap( + Function.identity(), + step -> step.cteName(nodeLabel) + )); + String preprocessingCteName = conceptTables.cteName(ConceptCteStep.PREPROCESSING); + return new IntervalPackingTables(preprocessingCteName, cteNames); + } + + public static IntervalPackingTables forGenericQueryStep(String nodeLabel, QueryStep predecessor) { + Map cteNames = createCteNameMap(nodeLabel); + return new IntervalPackingTables(predecessor.getCteName(), cteNames); + } + + public String cteName(IntervalPackingCteStep intervalPackingCteStep) { + return this.cteNames.get(intervalPackingCteStep); + } + + private static Map createCteNameMap(String nodeLabel) { + return Arrays.stream(IntervalPackingCteStep.values()) + .collect(Collectors.toMap( + Function.identity(), + step -> step.cteName(nodeLabel) + )); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/PostgreSqlIntervalPacker.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/PostgreSqlIntervalPacker.java new file mode 100644 index 0000000000..7ce0664b7c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/PostgreSqlIntervalPacker.java @@ -0,0 +1,49 @@ +package com.bakdata.conquery.sql.conversion.cqelement.intervalpacking; + +import java.util.Collections; +import java.util.List; +import java.util.Optional; + +import com.bakdata.conquery.sql.conversion.dialect.IntervalPacker; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QualifyingUtil; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.Selects; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; + +/** + * PostgreSql supports interval packing with a native range function. + *

+ * See && range function operator + */ +@RequiredArgsConstructor +public class PostgreSqlIntervalPacker implements IntervalPacker { + + private final SqlFunctionProvider functionProvider; + + @Override + public QueryStep createIntervalPackingSteps(IntervalPackingContext context) { + + String sourceTableName = context.getIntervalPackingTables().getValidityDateSourceTableName(); + Field primaryColumn = QualifyingUtil.qualify(context.getPrimaryColumn(), sourceTableName); + ColumnDateRange qualifiedValidityDate = context.getValidityDate().qualify(sourceTableName); + ColumnDateRange aggregatedValidityDate = this.functionProvider.aggregated(qualifiedValidityDate) + .asValidityDateRange(context.getNodeLabel()); + + Selects selectsWithAggregatedValidityDate = new Selects( + primaryColumn, + Optional.of(aggregatedValidityDate), + Collections.emptyList() + ); + + return QueryStep.builder() + .cteName(context.getIntervalPackingTables().cteName(IntervalPackingCteStep.INTERVAL_COMPLETE)) + .selects(selectsWithAggregatedValidityDate) + .fromTable(QueryStep.toTableLike(sourceTableName)) + .groupBy(List.of(primaryColumn)) + .build(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/interval-packing.md b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/interval-packing.md new file mode 100644 index 0000000000..82362e8802 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/intervalpacking/interval-packing.md @@ -0,0 +1,190 @@ +# What is interval packing? + +The solution for the interval packing problem was taken +from [this article](https://www.itprotoday.com/sql-server/new-solution-packing-intervals-problem). + +Interval packing refers to packing groups of intersecting date intervals into their respective continuous intervals. +It allows us to aggregate the date range of events into date range sets for a given entity. + +A short example: consider this SQL query where the filter criteria for subjects is `"foo" = 'bar'` and +the validity date is represented by a date range between the dates `"date_start"` and `"date_end"`. + +When aggregating dates, we treat the end date of date ranges as excluded, but in our database tables, end dates are +included - that's why we add +1 day to the end date from the database table. + +```sql +select "id", + "foo", + "date_start", + "date_end" + 1 +from "table" +where "foo" = 'bar' +``` + +and suppose the result set looks like this: + +| id | foo | date\_start | date\_end | +|:---|:----|:------------|:-----------| +| 1 | bar | 2013-01-02 | 2013-02-02 | +| 1 | bar | 2012-01-01 | 2012-07-02 | +| 1 | bar | 2012-06-01 | 2013-01-01 | + +While the first entry is not intersecting with the two others, entry 2 and 3 overlap. We have to combine the two entries +by creating a new range from `2012-01-01` to `2013-01-01`. But how do we achieve this using SQL? + +## Solution for interval packing + +Taking the short example from above, we want to explain how we do this using the +[AnsiSqlIntervalPacker](./AnsiSqlIntervalPacker.java). To outline, we need to +create 3 consecutive common table expressions (CTE): + +- `previous_end`: Adds the previous end date to the table +- `range_index`: Creates and adds a counter for each new range +- and `interval_complete`: Converts this information into the desired format + The last CTE will then contain the aggregated validity date ranges for each subject. + +### `previous_end` + +This table generates the `previous_end` select: The `previous_end` value for each row will be the maximum `date_end` +within the same `id` partition that occurs before the current row's `date_start`. + +- `partion by "id"` creates a window frame for each subject. +- `order by "date_start", "date_end"`: arranges the rows in ascending order of their start dates and if start + dates are the same, it ensures that the event with the earlier end date is considered first within the window frame. +- `rows between unbounded preceding and preceding 1` ensures that the maximum `date_end` that occurred before + the current row's `date_end` is calculated. Without this clause, the window function would consider all rows in the + partition, which might include rows with end date values greater than the current row's values. + +```sql +select "id", + "date_start", + "date_end", + max("date_end") over ( + partion by "id" + order by "date_start", "date_end" + rows between unbounded preceding and preceding 1 + ) as "previous_end" +from "base" +``` + +The result of the `previous_end` query looks like this: + +| id | date\_start | date\_end | previous\_end | +|:---|:------------|:-----------|:--------------| +| 1 | 2012-01-01 | 2012-07-02 | null | +| 1 | 2012-06-01 | 2013-01-01 | 2012-07-02 | +| 1 | 2013-01-02 | 2013-02-02 | 2013-01-01 | + +The first entry for `previous_end` is null because it has no preceding row. +The following entries contain their previous row's `date_end` value as their previous end. + +### `range_index` + +The `previous_end` step builds the foundation to check in the `range_index` step via the corresponding select, +whether a rows `date_start` is greater than the `previous_end` date. This indicates that two adjacent validity date +ranges do not intersect. By generating the `range_index`, we build the foundation to group intersecting date ranges +together in the following step. + +- Again, we create a window frame for each subject (`partion by "id"`). +- Each time, the current rows' `date_start` > `previous_end` this row is marked with a `1`. +- We calculate the `sum` for all rows from the beginning of the partition up to the current + row (`rows unbounded preceding`). +- Each time a row is not intersecting with the previous range, the `range_index` is increased by `1`. +- If the current rows' `date_start` <= `previous_end`, thus the rows' validity date is intersecting + with the previous rows one, the current `range_index` value is kept. + +```sql +select "id", + "date_start", + "date_end", + sum( + case + when "date_start" > "previous_end" then 1 + else null + end + ) over ( + partition by "id" + order by "date_start", "date_end" + rows unbounded preceding + ) "range_index" +from "previous_end") +``` + +The result of `range_index` looks like this: + +| id | date\_start | date\_end | range\_index | +|:---|:------------|:-----------|:-------------| +| 1 | 2012-01-01 | 2012-07-02 | null | +| 1 | 2012-06-01 | 2013-01-01 | null | +| 1 | 2013-01-02 | 2013-02-02 | 1 | + +The first two entries intersect, because the `date_start` `2012-06-01` of the second entry is less or equal +the `previous_end` `2012-07-02` of the first entry. In contrast, the `date_start` `2013-01-02` of the third entry +starts after the `previous_end` `2013-01-01` of the second entry. Thus, it marks the beginning of a new, +non-intersecting range. + +### `interval_complete` + +The last step will select the minimum `date_start` and maximum `date_end` of the entries of +`range_index` table grouped by `range_index`. This ensures that there will be only unique date range values in the final +date range set of each subject. + +```sql +select "id", + min("date_start") "range_start_min", + max("date_end") "range_end_max" +from "range_index" +group by "id", "range_index" +``` + +The results look like this: + +| id | range\_start\_min | range\_end\_max | +|:---|:------------------|:----------------| +| 1 | 2013-01-02 | 2013-02-02 | +| 1 | 2012-01-01 | 2013-01-01 | + +In the final interval packing result, we now got 2 validity date ranges. If you compare it to our "base" table, +we now got the combined validity range from `2012-01-01` to `2013-01-01` as first entry and the succeeding +non-intersecting validity date range from `2013-01-02` to `2013-02-02` as second entry. Remember that we added +1 day +to all end dates at the beginning, so the end dates of our interval-packed ranges are excluded! + +### Combined + +```sql +with "base" as + (select "id", + "foo", + "date_start", + "date_end" + 1 + from "table" + where "foo" = 'bar'), + "previous_end" as + (select "id", + "date_start", + "date_end", + max("date_end") over ( + partition by "id" + order by "date_start", "date_end" + rows between unbounded preceding and 1 preceding + ) as "previous_end" + from "base"), + "range_index" as + (select "id", + "date_start", + "date_end", + sum(case + when "date_start" > "previous_end" then 1 + else null + end) over ( + partition by "id" + order by "date_start", "date_end" + rows unbounded preceding + ) "range_index" + from "previous_end") +select "id", + min("date_start") "range_start_min", + max("date_end") "range_end_max" +from "range_index" +group by "id", "range_index"; +``` diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java index 41887c2a2c..c1798a0489 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlDialect.java @@ -2,20 +2,27 @@ import java.util.List; -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.filter.FilterConverter; -import com.bakdata.conquery.sql.conversion.select.SelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.aggregation.AnsiSqlDateAggregator; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.FilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.AnsiSqlIntervalPacker; import org.jooq.DSLContext; public class HanaSqlDialect implements SqlDialect { + private final SqlFunctionProvider hanaSqlFunctionProvider; + private final IntervalPacker hanaIntervalPacker; + private final SqlDateAggregator hanaSqlDateAggregator; private final DSLContext dslContext; public HanaSqlDialect(DSLContext dslContext) { this.dslContext = dslContext; + this.hanaSqlFunctionProvider = new HanaSqlFunctionProvider(); + this.hanaIntervalPacker = new AnsiSqlIntervalPacker(); + this.hanaSqlDateAggregator = new AnsiSqlDateAggregator(this.hanaSqlFunctionProvider, this.hanaIntervalPacker); } @Override @@ -29,7 +36,7 @@ public List> getNodeConverters() { } @Override - public List>> getFilterConverters() { + public List> getFilterConverters() { return getDefaultFilterConverters(); } @@ -39,8 +46,18 @@ public List> getSelectConverters() { } @Override - public SqlFunctionProvider getFunction() { - return new HanaSqlFunctionProvider(); + public SqlFunctionProvider getFunctionProvider() { + return this.hanaSqlFunctionProvider; + } + + @Override + public IntervalPacker getIntervalPacker() { + return this.hanaIntervalPacker; + } + + @Override + public SqlDateAggregator getDateAggregator() { + return this.hanaSqlDateAggregator; } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java index 13a7682aca..908acf9bd3 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java @@ -2,22 +2,33 @@ import java.sql.Date; import java.time.temporal.ChronoUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import java.util.List; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.sql.models.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; import org.jooq.Condition; import org.jooq.Field; import org.jooq.Name; +import org.jooq.Param; import org.jooq.impl.DSL; -public class HanaSqlFunctionProvider implements SqlFunctionProvider { +class HanaSqlFunctionProvider implements SqlFunctionProvider { - private static final String INFINITY_DATE_VALUE = "9999-12-31"; - private static final String MINUS_INFINITY_DATE_VALUE = "0001-01-01"; + public static final char DELIMITER = ','; + private static final String MAX_DATE_VALUE = "9999-12-31"; + private static final String MIN_DATE_VALUE = "0001-01-01"; + + @Override + public String getMinDateExpression() { + return MIN_DATE_VALUE; + } + + @Override + public String getMaxDateExpression() { + return MAX_DATE_VALUE; + } @Override public Condition dateRestriction(ColumnDateRange dateRestriction, ColumnDateRange validityDate) { @@ -35,8 +46,8 @@ public Condition dateRestriction(ColumnDateRange dateRestriction, ColumnDateRang @Override public ColumnDateRange daterange(CDateRange dateRestriction) { - String startDateExpression = MINUS_INFINITY_DATE_VALUE; - String endDateExpression = INFINITY_DATE_VALUE; + String startDateExpression = MIN_DATE_VALUE; + String endDateExpression = MAX_DATE_VALUE; if (dateRestriction.hasLowerBound()) { startDateExpression = dateRestriction.getMin().toString(); @@ -50,7 +61,7 @@ public ColumnDateRange daterange(CDateRange dateRestriction) { } @Override - public ColumnDateRange daterange(ValidityDate validityDate, String conceptLabel) { + public ColumnDateRange daterange(ValidityDate validityDate, String qualifier, String conceptLabel) { Column startColumn; Column endColumn; @@ -64,27 +75,64 @@ public ColumnDateRange daterange(ValidityDate validityDate, String conceptLabel) endColumn = validityDate.getColumn(); } + Field rangeStart = DSL.coalesce( + DSL.field(DSL.name(qualifier, startColumn.getName()), Date.class), + toDateField(MIN_DATE_VALUE) + ); // when aggregating date ranges, we want to treat the last day of the range as excluded, // so when using the date value of the end column, we add +1 day as end of the date range - Field rangeStart = DSL.field(DSL.name(startColumn.getName()), Date.class); - Field rangeEnd = addDay(endColumn); + Field rangeEnd = DSL.coalesce( + addDays(DSL.field(DSL.name(qualifier, endColumn.getName()), Date.class), 1), + toDateField(MAX_DATE_VALUE) + ); return ColumnDateRange.of(rangeStart, rangeEnd) .asValidityDateRange(conceptLabel); } @Override - public Field daterangeString(ColumnDateRange columnDateRange) { + public ColumnDateRange aggregated(ColumnDateRange columnDateRange) { + return ColumnDateRange.of( + DSL.min(columnDateRange.getStart()), + DSL.max(columnDateRange.getEnd()) + ); + } + + @Override + public Field validityDateStringAggregation(ColumnDateRange columnDateRange) { if (columnDateRange.isSingleColumnRange()) { throw new UnsupportedOperationException("HANA does not support single-column date ranges."); } - String datesConcatenated = Stream.of(columnDateRange.getStart(), columnDateRange.getEnd()) - .map(" || %s || "::formatted) - .collect(Collectors.joining(" ',' ", "'['", "')'")); + Field startDate = columnDateRange.getStart(); + Field endDate = columnDateRange.getEnd(); - return DSL.field(datesConcatenated); + Param dateLength = DSL.val(DEFAULT_DATE_FORMAT.length()); + Field startDateExpression = toVarcharField(startDate, dateLength); + Field endDateExpression = toVarcharField(endDate, dateLength); + + Field withMinDateReplaced = replace(startDateExpression, MIN_DATE_VALUE, MINUS_INFINITY_SIGN); + Field withMaxDateReplaced = replace(endDateExpression, MAX_DATE_VALUE, INFINITY_SIGN); + + // add interval braces to ranges: start is allways included, end is allways excluded except if it's the maximum/infinity date + Field enclosedMinDate = DSL.field("'[' || %s".formatted(withMinDateReplaced), String.class); + Field enclosedMaxDate = DSL.when(withMaxDateReplaced.like(INFINITY_SIGN), DSL.field("%s || ']'".formatted(withMaxDateReplaced), String.class)) + .otherwise(DSL.field("%s || ')'".formatted(withMaxDateReplaced), String.class)); + + Field rangeConcatenated = DSL.field("%s || ',' || %s".formatted(enclosedMinDate, enclosedMaxDate), String.class); + + Field stringAggregation = DSL.field( + "STRING_AGG({0}, {1} {2})", + String.class, + rangeConcatenated, + DSL.toChar(DELIMITER), + DSL.orderBy(startDate) + ); + + // encapsulate all ranges (including empty ranges) within curly braces + return DSL.when(stringAggregation.isNull(), DSL.field(DSL.val("{}"))) + .otherwise(DSL.field(("'{' || %s || '}'".formatted(stringAggregation)), String.class)); } @Override @@ -122,13 +170,39 @@ public Field toDateField(String dateExpression) { ); } - private Field addDay(Column dateColumn) { + @Override + public Field first(Field column, List> orderByColumns) { + if (orderByColumns.isEmpty()) { + orderByColumns = List.of(column); + } + return DSL.field(DSL.sql("FIRST_VALUE({0} {1})", column, DSL.orderBy(orderByColumns))); + } + + @Override + public Field last(Field column, List> orderByColumns) { + if (orderByColumns.isEmpty()) { + orderByColumns = List.of(column); + } + return DSL.field(DSL.sql("LAST_VALUE({0} {1} DESC)", column, DSL.orderBy(orderByColumns))); + } + + @Override + public Field random(Field column) { + return DSL.field(DSL.sql("FIRST_VALUE({0} {1})", column, DSL.orderBy(DSL.function("RAND", Object.class)))); + } + + @Override + public Field addDays(Field dateColumn, int amountOfDays) { return DSL.function( "ADD_DAYS", Date.class, - DSL.field(DSL.name(dateColumn.getName())), - DSL.val(1) + dateColumn, + DSL.val(amountOfDays) ); } + private Field toVarcharField(Field startDate, Param dateExpressionLength) { + return DSL.field("CAST({0} AS VARCHAR({1}))", String.class, startDate, dateExpressionLength); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/IntervalPacker.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/IntervalPacker.java new file mode 100644 index 0000000000..15fcd74bee --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/IntervalPacker.java @@ -0,0 +1,27 @@ +package com.bakdata.conquery.sql.conversion.dialect; + + +import com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.IntervalPackingContext; +import com.bakdata.conquery.sql.conversion.model.QueryStep; + +/** + * Packing intervals involves packing groups of intersecting validity date intervals into their respective continuous intervals. + *

+ * See Interval Packing + */ +public interface IntervalPacker { + + String PREVIOUS_END_FIELD_NAME = "previous_end"; + String RANGE_INDEX_FIELD_NAME = "range_index"; + String RANGE_START_MIN_FIELD_NAME = "range_start_min"; + String RANGE_END_MAX_FIELD_NAME = "range_end_max"; + + /** + * Depending on the dialect, one or more {@link QueryStep}s are created to aggregate multiple validity date entries of the same subject. + * + *

+ * Only the last {@link QueryStep} containing the aggregated validity dates will be returned. + */ + QueryStep createIntervalPackingSteps(IntervalPackingContext context); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlDialect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlDialect.java index 406a412792..3c6e140c3f 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlDialect.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlDialect.java @@ -2,20 +2,27 @@ import java.util.List; -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.sql.conversion.filter.FilterConverter; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.select.SelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.aggregation.PostgreSqlDateAggregator; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.FilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.PostgreSqlIntervalPacker; import org.jooq.DSLContext; public class PostgreSqlDialect implements SqlDialect { + private final SqlFunctionProvider postgresqlFunctionProvider; + private final IntervalPacker postgresqlIntervalPacker; + private final SqlDateAggregator postgresqlDateAggregator; private final DSLContext dslContext; public PostgreSqlDialect(DSLContext dslContext) { this.dslContext = dslContext; + this.postgresqlFunctionProvider = new PostgreSqlFunctionProvider(); + this.postgresqlIntervalPacker = new PostgreSqlIntervalPacker(this.postgresqlFunctionProvider); + this.postgresqlDateAggregator = new PostgreSqlDateAggregator(this.postgresqlFunctionProvider); } @Override @@ -23,13 +30,18 @@ public DSLContext getDSLContext() { return this.dslContext; } + @Override + public boolean requiresAggregationInFinalStep() { + return false; + } + @Override public List> getNodeConverters() { return getDefaultNodeConverters(); } @Override - public List>> getFilterConverters() { + public List> getFilterConverters() { return getDefaultFilterConverters(); } @@ -39,8 +51,18 @@ public List> getSelectConverters() { } @Override - public SqlFunctionProvider getFunction() { - return new PostgreSqlFunctionProvider(); + public SqlFunctionProvider getFunctionProvider() { + return this.postgresqlFunctionProvider; + } + + @Override + public IntervalPacker getIntervalPacker() { + return this.postgresqlIntervalPacker; + } + + @Override + public SqlDateAggregator getDateAggregator() { + return this.postgresqlDateAggregator; } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java index 60925126bb..566006b6d1 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java @@ -2,15 +2,18 @@ import java.sql.Date; import java.time.temporal.ChronoUnit; +import java.util.List; +import java.util.stream.Collectors; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.sql.models.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; import org.jooq.Condition; import org.jooq.DatePart; import org.jooq.Field; import org.jooq.Name; +import org.jooq.WindowSpecificationRowsStep; import org.jooq.impl.DSL; /** @@ -18,11 +21,21 @@ * * @see PostgreSQL Documentation */ -public class PostgreSqlFunctionProvider implements SqlFunctionProvider { +class PostgreSqlFunctionProvider implements SqlFunctionProvider { private static final String INFINITY_DATE_VALUE = "infinity"; private static final String MINUS_INFINITY_DATE_VALUE = "-infinity"; + @Override + public String getMaxDateExpression() { + return INFINITY_DATE_VALUE; + } + + @Override + public String getMinDateExpression() { + return MINUS_INFINITY_DATE_VALUE; + } + @Override public Condition dateRestriction(ColumnDateRange dateRestriction, ColumnDateRange validityDate) { if (!validityDate.isSingleColumnRange()) { @@ -51,10 +64,12 @@ public ColumnDateRange daterange(CDateRange dateRestriction) { endDateExpression = dateRestriction.getMax().toString(); } - Field dateRestrictionRange = DSL.field( - "daterange({0}::date, {1}::date, '[]')", - DSL.val(startDateExpression), - DSL.val(endDateExpression) + Field dateRestrictionRange = DSL.function( + "daterange", + Object.class, + toDateField(startDateExpression), + toDateField(endDateExpression), + DSL.val("[]") ); return ColumnDateRange.of(dateRestrictionRange) @@ -62,40 +77,57 @@ public ColumnDateRange daterange(CDateRange dateRestriction) { } @Override - public ColumnDateRange daterange(ValidityDate validityDate, String alias) { + public ColumnDateRange daterange(ValidityDate validityDate, String qualifier, String conceptLabel) { - Field dateRange; + Field dateRange; if (validityDate.getEndColumn() != null) { - Column startColumn = validityDate.getStartColumn(); - Column endColumn = validityDate.getEndColumn(); + Field startColumn = DSL.coalesce( + DSL.field(DSL.name(qualifier, validityDate.getStartColumn().getName())), + toDateField(MINUS_INFINITY_DATE_VALUE) + ); + Field endColumn = DSL.coalesce( + DSL.field(DSL.name(qualifier, validityDate.getEndColumn().getName())), + toDateField(INFINITY_DATE_VALUE) + ); dateRange = daterange(startColumn, endColumn, "[]"); } else { - Column column = validityDate.getColumn(); - dateRange = switch (column.getType()) { + Column validityDateColumn = validityDate.getColumn(); + dateRange = switch (validityDateColumn.getType()) { // if validityDateColumn is a DATE_RANGE we can make use of Postgres' integrated daterange type. - case DATE_RANGE -> DSL.field(DSL.name(column.getName())); + case DATE_RANGE -> DSL.field(validityDateColumn.getName()); // if the validity date column is not of daterange type, we construct it manually - case DATE -> daterange(column, column, "[]"); + case DATE -> { + Field column = DSL.field(DSL.name(qualifier, validityDate.getColumn().getName()), Date.class); + Field startColumn = DSL.coalesce(column, toDateField(MINUS_INFINITY_DATE_VALUE)); + Field endColumn = DSL.coalesce(column, toDateField(INFINITY_DATE_VALUE)); + yield daterange(startColumn, endColumn, "[]"); + } default -> throw new IllegalArgumentException( - "Given column type '%s' can't be converted to a proper date restriction.".formatted(column.getType()) + "Given column type '%s' can't be converted to a proper date restriction.".formatted(validityDateColumn.getType()) ); }; } return ColumnDateRange.of(dateRange) - .asValidityDateRange(alias); + .asValidityDateRange(conceptLabel); } @Override - public Field daterangeString(ColumnDateRange columnDateRange) { + public ColumnDateRange aggregated(ColumnDateRange columnDateRange) { + return ColumnDateRange.of(DSL.field("range_agg({0})", columnDateRange.getRange())); + } + + @Override + public Field validityDateStringAggregation(ColumnDateRange columnDateRange) { if (!columnDateRange.isSingleColumnRange()) { throw new UnsupportedOperationException("All column date ranges should have been converted to single column ranges."); } - return columnDateRange.getRange(); + Field aggregatedValidityDate = DSL.field("%s::varchar".formatted(columnDateRange.getRange().toString()), String.class); + return replace(aggregatedValidityDate, INFINITY_DATE_VALUE, INFINITY_SIGN); } @Override @@ -108,7 +140,7 @@ public Field dateDistance(ChronoUnit timeUnit, Name startDateColumnName return endDate.minus(startDate).coerce(Integer.class); } - Field age = DSL.function("AGE", Object.class, endDate, startDate); + Field age = DSL.function("AGE", Integer.class, endDate, startDate); return switch (timeUnit) { case MONTHS -> extract(DatePart.YEAR, age).multiply(12) @@ -120,17 +152,41 @@ public Field dateDistance(ChronoUnit timeUnit, Name startDateColumnName }; } - private Field daterange(Column startColumn, Column endColumn, String bounds) { + @Override + public Field addDays(Field dateColumn, int amountOfDays) { + return dateColumn.plus(amountOfDays); + } + + @Override + public Field first(Field column, List> orderByColumn) { + return DSL.field(DSL.sql("({0})[1]", DSL.arrayAgg(column))); + } + + @Override + public Field last(Field column, List> orderByColumns) { + String orderByClause = orderByColumns.stream() + .map(Field::toString) + .collect(Collectors.joining(", ", "ORDER BY ", " DESC")); + return DSL.field(DSL.sql("({0})[1]", DSL.arrayAgg(DSL.field("%s %s".formatted(column, orderByClause))))); + } + + @Override + public Field random(Field column) { + WindowSpecificationRowsStep orderByRandomClause = DSL.orderBy(DSL.function("random", Object.class)); + return DSL.field(DSL.sql("({0})[1]", DSL.arrayAgg(DSL.field("%s %s".formatted(column, orderByRandomClause))))); + } + + private Field daterange(Field startColumn, Field endColumn, String bounds) { return DSL.function( "daterange", Object.class, - DSL.field(DSL.name(startColumn.getName())), - DSL.field(DSL.name(endColumn.getName())), + startColumn, + endColumn, DSL.val(bounds) ); } - private Field extract(DatePart datePart, Field timeInterval) { + private Field extract(DatePart datePart, Field timeInterval) { return DSL.function( "EXTRACT", Integer.class, @@ -138,4 +194,9 @@ private Field extract(DatePart datePart, Field timeInterval) { ); } + @Override + public Field toDateField(String dateValue) { + return DSL.field("%s::date".formatted(DSL.val(dateValue)), Date.class); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDateAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDateAggregator.java new file mode 100644 index 0000000000..aef17aa02a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDateAggregator.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.sql.conversion.dialect; + +import java.util.List; + +import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; +import com.bakdata.conquery.sql.conversion.cqelement.aggregation.DateAggregationDates; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; + +public interface SqlDateAggregator { + + /** + * MERGE or INTERSECT the validity dates of the given {@link QueryStep}s based on the given {@link DateAggregationAction}. + * + * @param carryThroughSelects The selects that should be carried through the date aggregation process. They remain unchanged. + */ + QueryStep apply( + QueryStep joinedStep, + List carryThroughSelects, + DateAggregationDates dateAggregationDates, + DateAggregationAction dateAggregationAction + ); + + /** + * Inverts the validity date of the given base step. + */ + public QueryStep invertAggregatedIntervals(QueryStep baseStep); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java index 6bdba63b99..51ddf3d5fb 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlDialect.java @@ -5,82 +5,112 @@ import java.util.function.Function; import java.util.stream.Collectors; -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.sql.conversion.Converter; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.step.QueryStepTransformer; import com.bakdata.conquery.sql.conversion.cqelement.CQAndConverter; import com.bakdata.conquery.sql.conversion.cqelement.CQDateRestrictionConverter; import com.bakdata.conquery.sql.conversion.cqelement.CQNegationConverter; import com.bakdata.conquery.sql.conversion.cqelement.CQOrConverter; import com.bakdata.conquery.sql.conversion.cqelement.concept.CQConceptConverter; -import com.bakdata.conquery.sql.conversion.filter.FilterConverter; -import com.bakdata.conquery.sql.conversion.filter.FilterConverterService; -import com.bakdata.conquery.sql.conversion.filter.MultiSelectConverter; -import com.bakdata.conquery.sql.conversion.filter.RealRangeConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.BigMultiSelectFilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.CountFilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.DateDistanceFilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.FilterConversions; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.FilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.MultiSelectFilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.NumberFilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.SingleSelectFilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.filter.SumFilterConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.CountSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.DateDistanceSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.ExistsSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.FirstValueSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.LastValueSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.RandomValueSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectConversions; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SumSelectConverter; +import com.bakdata.conquery.sql.conversion.model.QueryStepTransformer; import com.bakdata.conquery.sql.conversion.query.ConceptQueryConverter; -import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; -import com.bakdata.conquery.sql.conversion.select.FirstValueConverter; -import com.bakdata.conquery.sql.conversion.select.SelectConverter; -import com.bakdata.conquery.sql.conversion.select.SelectConverterService; import com.bakdata.conquery.sql.conversion.supplier.SystemDateNowSupplier; import org.jooq.DSLContext; public interface SqlDialect { - SqlFunctionProvider getFunction(); + SystemDateNowSupplier DEFAULT_DATE_NOW_SUPPLIER = new SystemDateNowSupplier(); - List> getNodeConverters(); + SqlFunctionProvider getFunctionProvider(); + + IntervalPacker getIntervalPacker(); - List>> getFilterConverters(); + SqlDateAggregator getDateAggregator(); + + List> getNodeConverters(); List> getSelectConverters(); + List> getFilterConverters(); + DSLContext getDSLContext(); + default boolean requiresAggregationInFinalStep() { + return true; + } + default List> getDefaultNodeConverters() { return List.of( new CQDateRestrictionConverter(), new CQAndConverter(), new CQOrConverter(), new CQNegationConverter(), - new CQConceptConverter(new FilterConverterService(getFilterConverters()), new SelectConverterService(getSelectConverters())), + new CQConceptConverter(new FilterConversions(getFilterConverters()), new SelectConversions(getSelectConverters()), getFunctionProvider()), new ConceptQueryConverter(new QueryStepTransformer(getDSLContext())) ); } - default List>> getDefaultFilterConverters() { - return List.of( - new MultiSelectConverter(), - new RealRangeConverter() - ); - } - default List> customizeSelectConverters(List> substitutes) { return customize(getDefaultSelectConverters(), substitutes); } + default List> getDefaultFilterConverters() { + return List.of( + new DateDistanceFilterConverter(DEFAULT_DATE_NOW_SUPPLIER), + new BigMultiSelectFilterConverter(), + new MultiSelectFilterConverter(), + new SingleSelectFilterConverter(), + new NumberFilterConverter(), + new SumFilterConverter(), + new CountFilterConverter() + ); + } + default List> getDefaultSelectConverters() { return List.of( - new FirstValueConverter(), - new DateDistanceConverter(new SystemDateNowSupplier()) + new FirstValueSelectConverter(), + new LastValueSelectConverter(), + new RandomValueSelectConverter(), + new DateDistanceSelectConverter(DEFAULT_DATE_NOW_SUPPLIER), + new ExistsSelectConverter(), + new SumSelectConverter(), + new CountSelectConverter() ); } - private static > List customize(List defaults, List substitutes) { + private static > List customize(List defaults, List substitutes) { Map, C> substituteMap = getSubstituteMap(substitutes); return defaults.stream() .map(converter -> substituteMap.getOrDefault(converter.getConversionClass(), converter)) - .collect(Collectors.toList()); + .toList(); } - private static > Map, C> getSubstituteMap(List substitutes) { + private static > Map, C> getSubstituteMap(List substitutes) { return substitutes.stream() .collect(Collectors.toMap( Converter::getConversionClass, Function.identity() )); } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java index 3a0b305139..0de6d84c1e 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/SqlFunctionProvider.java @@ -2,11 +2,12 @@ import java.sql.Date; import java.time.temporal.ChronoUnit; +import java.util.List; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.models.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; import org.jooq.Condition; import org.jooq.Field; import org.jooq.Name; @@ -21,29 +22,67 @@ public interface SqlFunctionProvider { String DEFAULT_DATE_FORMAT = "yyyy-mm-dd"; + String INFINITY_SIGN = "∞"; + String MINUS_INFINITY_SIGN = "-∞"; + + String getMinDateExpression(); + + String getMaxDateExpression(); /** - * A date restriction condition is true if holds: - * dateRestrictionStart <= validityDateEnd and dateRestrictionEnd >= validityDateStart + * A date restriction condition is true if holds: dateRestrictionStart <= validityDateEnd and dateRestrictionEnd >= validityDateStart */ Condition dateRestriction(ColumnDateRange dateRestrictionRange, ColumnDateRange validityFieldRange); ColumnDateRange daterange(CDateRange dateRestriction); - ColumnDateRange daterange(ValidityDate validityDate, String conceptLabel); + ColumnDateRange daterange(ValidityDate validityDate, String qualifier, String conceptLabel); - Field daterangeString(ColumnDateRange columnDateRange); + ColumnDateRange aggregated(ColumnDateRange columnDateRange); + + /** + * Aggregates the start and end columns of the validity date of entries into one compound string expression. + *

+ * Example: {[2013-11-10,2013-11-11),[2015-11-10,2015-11-11)} + *

+ * Also, if the aggregated expression contains the dialect specific {@link SqlFunctionProvider#getMaxDateExpression()} or + * {@link SqlFunctionProvider#getMaxDateExpression()} expression, it should be replaced with the {@link SqlFunctionProvider#INFINITY_SIGN} or + * {@link SqlFunctionProvider#MINUS_INFINITY_SIGN}. + *

+ * Example: {[-∞,2013-11-11),[2015-11-10,∞)} + */ + Field validityDateStringAggregation(ColumnDateRange columnDateRange); Field dateDistance(ChronoUnit datePart, Name startDateColumn, Date endDateExpression); - default Condition in(Name columnName, String[] values) { - return DSL.field(columnName) - .in(values); + Field addDays(Field dateColumn, int amountOfDays); + + Field first(Field field, List> orderByColumn); + + Field last(Field column, List> orderByColumns); + + Field random(Field column); + + default Field least(List> fields) { + if (fields.isEmpty()) { + return null; + } + Field[] fieldArray = fields.toArray(Field[]::new); + // signature only accepts arrays/varargs + return DSL.function("least", fieldArray[0].getType(), fieldArray); + } + + default Field greatest(List> fields) { + if (fields.isEmpty()) { + return null; + } + Field[] fieldArray = fields.toArray(Field[]::new); + // signature only accepts arrays/varargs + return DSL.function("greatest", fieldArray[0].getType(), fieldArray); } - default Field first(Name columnName) { - // TODO: this is just a temporary placeholder - return DSL.field(columnName); + default Condition in(Field column, String[] values) { + return column.in(values); } default TableOnConditionStep innerJoin( @@ -72,4 +111,14 @@ default Field toDateField(String dateExpression) { return DSL.toDate(dateExpression, DEFAULT_DATE_FORMAT); } + default Field replace(Field target, String old, String _new) { + return DSL.function("replace", String.class, target, DSL.val(old), DSL.val(_new)); + } + + default Field prefixStringAggregation(Field field, String prefix) { + Field likePattern = DSL.inline(prefix + "%"); + String sqlTemplate = "'[' || STRING_AGG(CASE WHEN {0} LIKE {1} THEN {0} ELSE NULL END, ', ') || ']'"; + return DSL.field(DSL.sql(sqlTemplate, field, likePattern)); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java deleted file mode 100644 index cdc40c3395..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverter.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.bakdata.conquery.sql.conversion.filter; - -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.models.datasets.concepts.filters.SingleColumnFilter; -import com.bakdata.conquery.sql.conversion.Converter; -import org.jooq.Condition; -import org.jooq.Name; -import org.jooq.impl.DSL; - -/** - * Converts a {@link com.bakdata.conquery.apiv1.query.concept.filter.FilterValue} - * to a condition for a SQL WHERE clause. - * - * @param The type of Filter this converter is responsible for. - */ -public interface FilterConverter> extends Converter { - - static Name getColumnName(FilterValue filter) { - // works for now but we might have to distinguish later if we encounter non-SingleColumnFilters - return DSL.name(((SingleColumnFilter) filter.getFilter()).getColumn().getName()); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverterService.java deleted file mode 100644 index 1b5a4253af..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/FilterConverterService.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.bakdata.conquery.sql.conversion.filter; - -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.sql.conversion.ConverterService; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import org.jooq.Condition; -import org.jooq.impl.DSL; - -import java.util.List; - -public class FilterConverterService extends ConverterService, Condition> { - - public FilterConverterService(List> converters) { - super(converters); - } - - @Override - public Condition convert(FilterValue filterValue, ConversionContext context) { - Condition condition = super.convert(filterValue, context); - if (!context.isNegation()) { - return condition; - } - return DSL.not(condition); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/MultiSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/MultiSelectConverter.java deleted file mode 100644 index 14239afd0a..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/MultiSelectConverter.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.bakdata.conquery.sql.conversion.filter; - -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import org.jooq.Condition; - -public class MultiSelectConverter implements FilterConverter { - - @Override - public Condition convert(FilterValue.CQBigMultiSelectFilter filter, ConversionContext context) { - return context.getSqlDialect().getFunction() - .in(FilterConverter.getColumnName(filter), filter.getValue()); - } - - @Override - public Class getConversionClass() { - return FilterValue.CQBigMultiSelectFilter.class; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/RealRangeConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/RealRangeConverter.java deleted file mode 100644 index ec77749315..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/filter/RealRangeConverter.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.bakdata.conquery.sql.conversion.filter; - -import java.util.Optional; -import java.util.stream.Stream; - -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import org.jooq.Condition; -import org.jooq.Field; -import org.jooq.impl.DSL; - -public class RealRangeConverter implements FilterConverter { - - @Override - public Condition convert(FilterValue.CQRealRangeFilter filter, ConversionContext context) { - Field field = DSL.field(FilterConverter.getColumnName(filter)); - - Optional greaterOrEqualCondition = Optional.ofNullable(filter.getValue().getMin()).map(field::greaterOrEqual); - Optional lessOrEqualCondition = Optional.ofNullable(filter.getValue().getMax()).map(field::lessOrEqual); - return Stream.concat(greaterOrEqualCondition.stream(), lessOrEqualCondition.stream()) - .reduce(Condition::and) - .orElseThrow(() -> new IllegalArgumentException("Missing min or max value for real range filter.")); - } - - @Override - public Class getConversionClass() { - return FilterValue.CQRealRangeFilter.class; - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/ColumnDateRange.java similarity index 85% rename from backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java rename to backend/src/main/java/com/bakdata/conquery/sql/conversion/model/ColumnDateRange.java index 6e1f832c18..55013f2efa 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/models/ColumnDateRange.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/ColumnDateRange.java @@ -1,7 +1,8 @@ -package com.bakdata.conquery.sql.models; +package com.bakdata.conquery.sql.conversion.model; import java.sql.Date; import java.util.List; +import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.Getter; @@ -17,18 +18,18 @@ public class ColumnDateRange { private static final String END_SUFFIX = "_end"; private final boolean isEmpty; - private final Field range; + private final Field range; private final Field start; private final Field end; - private ColumnDateRange(boolean isEmpty, Field range, Field startColumn, Field endColumn) { + private ColumnDateRange(boolean isEmpty, Field range, Field startColumn, Field endColumn) { this.isEmpty = isEmpty; this.range = range; this.start = startColumn; this.end = endColumn; } - public static ColumnDateRange of(Field rangeColumn) { + public static ColumnDateRange of(Field rangeColumn) { return new ColumnDateRange(false, rangeColumn, null, null); } @@ -52,13 +53,12 @@ public boolean isSingleColumnRange() { return this.range != null; } - public List> toFields() { + public List> toFields() { if (isSingleColumnRange()) { return List.of(this.range); } return Stream.of(this.start, this.end) - .map(dateField -> dateField.coerce(Object.class)) - .toList(); + .collect(Collectors.toList()); } public ColumnDateRange qualify(String qualifier) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/CteStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/CteStep.java new file mode 100644 index 0000000000..069fa9e3a2 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/CteStep.java @@ -0,0 +1,7 @@ +package com.bakdata.conquery.sql.conversion.model; + +public interface CteStep { + + String cteName(String nodeLabel); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/LogicalOperation.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/LogicalOperation.java new file mode 100644 index 0000000000..57220db714 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/LogicalOperation.java @@ -0,0 +1,6 @@ +package com.bakdata.conquery.sql.conversion.model; + +public enum LogicalOperation { + AND, + OR +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QualifyingUtil.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QualifyingUtil.java new file mode 100644 index 0000000000..c17e024784 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QualifyingUtil.java @@ -0,0 +1,23 @@ +package com.bakdata.conquery.sql.conversion.model; + +import java.util.List; +import java.util.stream.Collectors; + +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import org.jooq.Field; +import org.jooq.impl.DSL; + +public class QualifyingUtil { + + public static Field qualify(Field field, String qualifier) { + return DSL.field(DSL.name(qualifier, field.getName()), field.getType()); + } + + public static List qualify(List sqlSelects, String qualifier) { + return sqlSelects.stream() + .map(sqlSelect -> ExtractingSqlSelect.fromSqlSelect(sqlSelect, qualifier)) + .collect(Collectors.toList()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStep.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStep.java new file mode 100644 index 0000000000..31cf3eb459 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStep.java @@ -0,0 +1,61 @@ +package com.bakdata.conquery.sql.conversion.model; + +import java.util.Collections; +import java.util.List; + +import lombok.Builder; +import lombok.Value; +import org.jooq.Condition; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.TableLike; +import org.jooq.impl.DSL; + +/** + * Intermediate representation of an SQL query. + */ +@Value +@Builder(toBuilder = true) +public class QueryStep { + + String cteName; + Selects selects; + TableLike fromTable; + @Builder.Default + List conditions = Collections.emptyList(); + /** + * All {@link Field}s that should be part of the SQL GROUPY BY clause. + */ + @Builder.Default + List> groupBy = Collections.emptyList(); + /** + * All {@link QueryStep}s that should be connected via a SQL UNION operator + */ + @Builder.Default + List union = Collections.emptyList(); + /** + * All {@link QueryStep}'s that shall be converted before this {@link QueryStep}. + */ + @Builder.Default + List predecessors = Collections.emptyList(); + + public static TableLike toTableLike(String fromTableName) { + return DSL.table(DSL.name(fromTableName)); + } + + /** + * @return All selects re-mapped to a qualifier, which is the cteName of this QueryStep. + */ + public Selects getQualifiedSelects() { + return this.selects.qualify(this.cteName); + } + + public boolean isGroupBy() { + return !this.groupBy.isEmpty(); + } + + public boolean isUnion() { + return !this.union.isEmpty(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStepJoiner.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStepJoiner.java new file mode 100644 index 0000000000..0d62614b8e --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStepJoiner.java @@ -0,0 +1,144 @@ +package com.bakdata.conquery.sql.conversion.model; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import com.bakdata.conquery.apiv1.query.CQElement; +import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; +import com.bakdata.conquery.sql.conversion.cqelement.aggregation.DateAggregationDates; +import com.bakdata.conquery.sql.conversion.dialect.SqlDateAggregator; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.select.FieldWrapper; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.Table; +import org.jooq.TableLike; +import org.jooq.TableOnConditionStep; +import org.jooq.impl.DSL; + +public class QueryStepJoiner { + + static String PRIMARY_COLUMN_NAME = "primary_column"; + + public static ConversionContext joinChildren( + Iterable children, + ConversionContext context, + LogicalOperation logicalOperation, + DateAggregationAction dateAggregationAction + ) { + + ConversionContext childrenContext = context; + for (CQElement childNode : children) { + childrenContext = context.getNodeConversions().convert(childNode, childrenContext); + } + + List queriesToJoin = childrenContext.getQuerySteps(); + Field primaryColumn = coalescePrimaryColumns(queriesToJoin); + List mergedSelects = mergeSelects(queriesToJoin); + + QueryStep.QueryStepBuilder andQueryStep = QueryStep.builder() + .cteName(constructJoinedQueryStepLabel(queriesToJoin, logicalOperation)) + .fromTable(constructJoinedTable(queriesToJoin, logicalOperation, context)) + .conditions(Collections.emptyList()) + .predecessors(queriesToJoin); + + DateAggregationDates dateAggregationDates = DateAggregationDates.forSteps(queriesToJoin); + if (dateAggregationAction == DateAggregationAction.BLOCK || dateAggregationDates.dateAggregationImpossible()) { + andQueryStep = andQueryStep.selects(new Selects(primaryColumn, mergedSelects)); + return context.withQuerySteps(List.of(andQueryStep.build())); + } + // if there is only 1 child node containing a validity date, we just keep it as overall validity date for the joined node + else if (dateAggregationDates.getValidityDates().size() == 1) { + ColumnDateRange validityDate = dateAggregationDates.getValidityDates().get(0); + andQueryStep = andQueryStep.selects(new Selects(primaryColumn, Optional.ofNullable(validityDate), mergedSelects)); + return context.withQuerySteps(List.of(andQueryStep.build())); + } + + List mergedSelectsWithAllValidityDates = new ArrayList<>(mergedSelects); + mergedSelectsWithAllValidityDates.addAll(dateAggregationDates.allStartsAndEnds()); + andQueryStep = andQueryStep.selects(new Selects(primaryColumn, mergedSelectsWithAllValidityDates)); + + SqlDateAggregator sqlDateAggregator = context.getSqlDialect().getDateAggregator(); + QueryStep mergeIntervalsStep = sqlDateAggregator.apply( + andQueryStep.build(), + mergedSelects, + dateAggregationDates, + dateAggregationAction + ); + + return context.withQuerySteps(List.of(mergeIntervalsStep)); + } + + public static TableLike constructJoinedTable(List queriesToJoin, LogicalOperation logicalOperation, ConversionContext context) { + + Table joinedQuery = getIntitialJoinTable(queriesToJoin); + + SqlFunctionProvider functionProvider = context.getSqlDialect().getFunctionProvider(); + JoinType joinType = switch (logicalOperation) { + case AND -> functionProvider::innerJoin; + case OR -> functionProvider::fullOuterJoin; + }; + + for (int i = 0; i < queriesToJoin.size() - 1; i++) { + + QueryStep leftPartQS = queriesToJoin.get(i); + QueryStep rightPartQS = queriesToJoin.get(i + 1); + + Field leftPartPrimaryColumn = leftPartQS.getQualifiedSelects().getPrimaryColumn(); + Field rightPartPrimaryColumn = rightPartQS.getQualifiedSelects().getPrimaryColumn(); + + joinedQuery = joinType.join(joinedQuery, rightPartQS, leftPartPrimaryColumn, rightPartPrimaryColumn); + } + + return joinedQuery; + } + + private static Field coalescePrimaryColumns(List querySteps) { + List> primaryColumns = querySteps.stream() + .map(queryStep -> queryStep.getQualifiedSelects().getPrimaryColumn()) + .collect(Collectors.toList()); + return DSL.coalesce(primaryColumns.get(0), primaryColumns.subList(1, primaryColumns.size()).toArray()) + .as(PRIMARY_COLUMN_NAME); + } + + private static List mergeSelects(List querySteps) { + return querySteps.stream() + .flatMap(queryStep -> queryStep.getQualifiedSelects().getSqlSelects().stream()) + .map(FieldWrapper::unique) + .collect(Collectors.toList()); + } + + private static String constructJoinedQueryStepLabel(List queriesToJoin, LogicalOperation logicalOperation) { + + String labelConnector = switch (logicalOperation) { + case AND -> "AND"; + case OR -> "OR"; + }; + + String concatenatedCteNames = queriesToJoin.stream() + .map(QueryStep::getCteName) + .collect(Collectors.joining("")); + + return "%s_%8H".formatted(labelConnector, concatenatedCteNames.hashCode()); + } + + private static Table getIntitialJoinTable(List queriesToJoin) { + return DSL.table(DSL.name(queriesToJoin.get(0).getCteName())); + } + + @FunctionalInterface + private interface JoinType { + TableOnConditionStep join( + Table leftPartQueryBase, + QueryStep rightPartQS, + Field leftPartPrimaryColumn, + Field rightPartPrimaryColumn + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStepTransformer.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStepTransformer.java new file mode 100644 index 0000000000..a0b8a2c339 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/QueryStepTransformer.java @@ -0,0 +1,82 @@ +package com.bakdata.conquery.sql.conversion.model; + +import java.util.List; +import java.util.stream.Stream; + +import org.jooq.CommonTableExpression; +import org.jooq.DSLContext; +import org.jooq.Record; +import org.jooq.Select; +import org.jooq.SelectConditionStep; +import org.jooq.impl.DSL; + +/** + * Transformer for translating the intermediate representation of {@link QueryStep} into the final SQL query. + */ +public class QueryStepTransformer { + + private final DSLContext dslContext; + + public QueryStepTransformer(DSLContext dslContext) { + this.dslContext = dslContext; + } + + /** + * Converts a given {@link QueryStep} into an executable SELECT statement. + */ + public Select toSelectQuery(QueryStep queryStep) { + SelectConditionStep queryBase = this.dslContext.with(constructPredecessorCteList(queryStep)) + .select(queryStep.getSelects().all()) + .from(queryStep.getFromTable()) + .where(queryStep.getConditions()); + if (queryStep.isGroupBy()) { + return queryBase.groupBy(queryStep.getGroupBy()); + } + else { + return queryBase; + } + } + + private List> constructPredecessorCteList(QueryStep queryStep) { + return queryStep.getPredecessors().stream() + .flatMap(predecessor -> toCteList(predecessor).stream()) + .toList(); + } + + private List> toCteList(QueryStep queryStep) { + return Stream.concat( + this.predecessorCtes(queryStep), + Stream.of(toCte(queryStep)) + ).toList(); + } + + private Stream> predecessorCtes(QueryStep queryStep) { + return queryStep.getPredecessors().stream() + .flatMap(predecessor -> toCteList(predecessor).stream()); + } + + private CommonTableExpression toCte(QueryStep queryStep) { + + Select selectStep = this.dslContext + .select(queryStep.getSelects().all()) + .from(queryStep.getFromTable()) + .where(queryStep.getConditions()); + + if (queryStep.isGroupBy()) { + selectStep = ((SelectConditionStep) selectStep).groupBy(queryStep.getGroupBy()); + } + + if (queryStep.isUnion()) { + for (QueryStep unionStep : queryStep.getUnion()) { + // we only use the union as part of the date aggregation process - the entries of the UNION tables are all unique + // thus we can use a UNION ALL because it's way faster than UNION + selectStep = selectStep.unionAll( + this.dslContext.select(unionStep.getSelects().all()).from(unionStep.getFromTable()) + ); + } + } + + return DSL.name(queryStep.getCteName()).as(selectStep); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/Selects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/Selects.java new file mode 100644 index 0000000000..71ea01fc06 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/Selects.java @@ -0,0 +1,90 @@ +package com.bakdata.conquery.sql.conversion.model; + +import java.util.List; +import java.util.Optional; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.bakdata.conquery.sql.conversion.model.select.ExtractingSqlSelect; +import com.bakdata.conquery.sql.conversion.model.select.SqlSelect; +import lombok.Value; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@Value +public class Selects { + + Field primaryColumn; + Optional validityDate; + List sqlSelects; + + public Selects(Field primaryColumn, Optional validityDate, List sqlSelects) { + this.primaryColumn = primaryColumn; + this.validityDate = validityDate; + this.sqlSelects = sqlSelects; + } + + /** + * {@link Selects} without a validity date. + */ + public Selects(Field primaryColumn, List sqlSelects) { + this.primaryColumn = primaryColumn; + this.validityDate = Optional.empty(); + this.sqlSelects = sqlSelects; + } + + /** + * {@link Selects#Selects(Field, Optional, List)} qualified onto the given qualifier. + */ + public static Selects qualified(String qualifier, Field primaryColumn, Optional validityDate, List sqlSelects) { + return new Selects(primaryColumn, validityDate, sqlSelects).qualify(qualifier); + } + + /** + * {@link Selects#Selects(Field, List)} qualified onto the given qualifier. + */ + public static Selects qualified(String qualifier, Field primaryColumn, List sqlSelects) { + return new Selects(primaryColumn, sqlSelects).qualify(qualifier); + } + + public Selects withValidityDate(ColumnDateRange validityDate) { + return new Selects(this.primaryColumn, Optional.of(validityDate), this.sqlSelects); + } + + public Selects blockValidityDate() { + return new Selects(this.primaryColumn, this.sqlSelects); + } + + public Selects qualify(String qualifier) { + Field qualifiedPrimaryColumn = DSL.field(DSL.name(qualifier, this.primaryColumn.getName())); + List qualifiedSelects = this.sqlSelects.stream() + .map(select -> ExtractingSqlSelect.fromSqlSelect(select, qualifier)) + .distinct() + .collect(Collectors.toList()); + if (this.validityDate.isEmpty()) { + return new Selects(qualifiedPrimaryColumn, qualifiedSelects); + } + else { + return new Selects(qualifiedPrimaryColumn, this.validityDate.map(_validityDate -> _validityDate.qualify(qualifier)), qualifiedSelects); + } + } + + public List> all() { + return Stream.of( + Stream.of(this.primaryColumn), + this.validityDate.stream().flatMap(range -> range.toFields().stream()), + this.sqlSelects.stream().map(SqlSelect::select) + ) + .flatMap(Function.identity()) + .map(select -> (Field) select) + .collect(Collectors.toList()); + } + + public List> explicitSelects() { + return this.sqlSelects.stream() + .map(SqlSelect::select) + .collect(Collectors.toList()); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/ConceptFilter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/ConceptFilter.java new file mode 100644 index 0000000000..2929b4ea37 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/ConceptFilter.java @@ -0,0 +1,10 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import com.bakdata.conquery.sql.conversion.model.select.SqlSelects; +import lombok.Value; + +@Value +public class ConceptFilter { + SqlSelects selects; + Filters filters; +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/ConditionUtil.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/ConditionUtil.java new file mode 100644 index 0000000000..2e7b358611 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/ConditionUtil.java @@ -0,0 +1,32 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import java.util.Optional; +import java.util.stream.Stream; + +import com.bakdata.conquery.models.common.IRange; +import lombok.experimental.UtilityClass; +import org.jooq.Condition; +import org.jooq.Field; + +@UtilityClass +public class ConditionUtil { + + public Condition rangeCondition(final Field column, final IRange range) { + Field col = (Field) column; + Optional greaterOrEqualCondition = Optional.ofNullable(range.getMin()).map(col::greaterOrEqual); + Optional lessOrEqualCondition = Optional.ofNullable(range.getMax()).map(col::lessOrEqual); + return Stream.concat(greaterOrEqualCondition.stream(), lessOrEqualCondition.stream()) + .reduce(Condition::and) + .orElseThrow(() -> new IllegalArgumentException("Missing min or max value for real range filter.")); + } + + /** + * Use if you want to wrap a {@link Condition} without implementing a {@link FilterCondition} in an own class. + * + * @return A {@link FilterCondition} instance encapsulating the provided condition and type. + */ + public FilterCondition wrap(final Condition condition, final FilterType type) { + return new FilterConditionWrapper(condition, type); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/CountCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/CountCondition.java new file mode 100644 index 0000000000..10132cc273 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/CountCondition.java @@ -0,0 +1,17 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import com.bakdata.conquery.models.common.IRange; +import org.jooq.Field; + +public class CountCondition extends RangeCondition { + + public CountCondition(Field column, IRange range) { + super(column, range); + } + + @Override + public FilterType type() { + return FilterType.GROUP; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/DateDistanceCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/DateDistanceCondition.java new file mode 100644 index 0000000000..f34d1f8a6d --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/DateDistanceCondition.java @@ -0,0 +1,17 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import com.bakdata.conquery.models.common.Range; +import org.jooq.Field; + +public class DateDistanceCondition extends RangeCondition { + + public DateDistanceCondition(Field column, Range.LongRange range) { + super(column, range); + } + + @Override + public FilterType type() { + return FilterType.EVENT; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/DateRestrictionCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/DateRestrictionCondition.java new file mode 100644 index 0000000000..f0678fcba5 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/DateRestrictionCondition.java @@ -0,0 +1,25 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import lombok.RequiredArgsConstructor; +import org.jooq.Condition; + +@RequiredArgsConstructor +public class DateRestrictionCondition implements FilterCondition { + + private final SqlFunctionProvider functionProvider; + private final ColumnDateRange dateRestriction; + private final ColumnDateRange validityDate; + + @Override + public Condition filterCondition() { + return this.functionProvider.dateRestriction(dateRestriction, validityDate); + } + + @Override + public FilterType type() { + return FilterType.EVENT; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterCondition.java new file mode 100644 index 0000000000..21e19512c4 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterCondition.java @@ -0,0 +1,15 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import org.jooq.Condition; + +public interface FilterCondition { + + Condition filterCondition(); + + FilterType type(); + + default FilterCondition negate() { + return new InvertedCondition(this); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterConditionWrapper.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterConditionWrapper.java new file mode 100644 index 0000000000..74bb1f175f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterConditionWrapper.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import lombok.Value; +import org.jooq.Condition; + +@Value +public class FilterConditionWrapper implements FilterCondition { + + Condition condition; + FilterType type; + + @Override + public Condition filterCondition() { + return condition; + } + + @Override + public FilterType type() { + return type; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterType.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterType.java new file mode 100644 index 0000000000..fa8502b7a0 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/FilterType.java @@ -0,0 +1,6 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +public enum FilterType { + EVENT, + GROUP +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/Filters.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/Filters.java new file mode 100644 index 0000000000..c7e333e3eb --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/Filters.java @@ -0,0 +1,25 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import java.util.Collections; +import java.util.List; + +import lombok.Builder; +import lombok.Value; + +@Builder +@Value +public class Filters { + + @Builder.Default + List event = Collections.emptyList(); + @Builder.Default + List group = Collections.emptyList(); + + public Filters negated() { + return new Filters( + event.stream().map(FilterCondition::negate).toList(), + group.stream().map(FilterCondition::negate).toList() + ); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/InvertedCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/InvertedCondition.java new file mode 100644 index 0000000000..d0257118c6 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/InvertedCondition.java @@ -0,0 +1,27 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import lombok.RequiredArgsConstructor; +import org.jooq.Condition; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +public class InvertedCondition implements FilterCondition { + + private final FilterCondition filterCondition; + + @Override + public Condition filterCondition() { + return DSL.not(filterCondition.filterCondition()); + } + + @Override + public FilterType type() { + return filterCondition.type(); + } + + @Override + public FilterCondition negate() { + return filterCondition; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/MultiSelectCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/MultiSelectCondition.java new file mode 100644 index 0000000000..309b4efd07 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/MultiSelectCondition.java @@ -0,0 +1,48 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import java.util.Arrays; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.google.common.base.Strings; +import lombok.RequiredArgsConstructor; +import org.jooq.Condition; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +public class MultiSelectCondition implements FilterCondition { + + private final Field column; + private final String[] values; + private final SqlFunctionProvider functionProvider; + + @Override + public FilterCondition negate() { + // we want all entries that don't satisfy a condition - because in SQL a comparison with NULL equals UNKNOWN and not FALSE, + // we need to check if the entry is NULL or does not fulfil the condition + Condition valueIsNull = column.isNull(); + Condition notOrNull = DSL.not(filterCondition()).or(valueIsNull); + return ConditionUtil.wrap(notOrNull, this.type()); + } + + @Override + public Condition filterCondition() { + + // values can contain empty or null Strings + String[] valuesWithoutNull = Arrays.stream(values) + .filter(value -> !Strings.isNullOrEmpty(value)) + .toArray(String[]::new); + Condition inCondition = this.functionProvider.in(column, valuesWithoutNull); + + if (valuesWithoutNull.length < values.length) { + return inCondition.or(DSL.field(column).isNull()); + } + return inCondition; + } + + @Override + public FilterType type() { + return FilterType.EVENT; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberCondition.java new file mode 100644 index 0000000000..5e9daea62b --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberCondition.java @@ -0,0 +1,17 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import com.bakdata.conquery.models.common.IRange; +import org.jooq.Field; + +public class NumberCondition extends RangeCondition { + + public NumberCondition(Field column, IRange range) { + super(column, range); + } + + @Override + public FilterType type() { + return FilterType.EVENT; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/RangeCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/RangeCondition.java new file mode 100644 index 0000000000..50adeaaad7 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/RangeCondition.java @@ -0,0 +1,19 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import com.bakdata.conquery.models.common.IRange; +import lombok.RequiredArgsConstructor; +import org.jooq.Condition; +import org.jooq.Field; + +@RequiredArgsConstructor +abstract class RangeCondition implements FilterCondition { + + private final Field column; + private final IRange range; + + @Override + public Condition filterCondition() { + return ConditionUtil.rangeCondition(column, range); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/SumCondition.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/SumCondition.java new file mode 100644 index 0000000000..6f1e3e26bf --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/SumCondition.java @@ -0,0 +1,17 @@ +package com.bakdata.conquery.sql.conversion.model.filter; + +import com.bakdata.conquery.models.common.IRange; +import org.jooq.Field; + +public class SumCondition extends RangeCondition { + + public SumCondition(Field column, IRange range) { + super(column, range); + } + + @Override + public FilterType type() { + return FilterType.GROUP; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/CountSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/CountSqlSelect.java new file mode 100644 index 0000000000..4ba615b502 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/CountSqlSelect.java @@ -0,0 +1,43 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import java.math.BigDecimal; + +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +@EqualsAndHashCode +public class CountSqlSelect implements SqlSelect { + + private final Field columnToCount; + private final String alias; + private final CountType countType; + + @Override + public Field select() { + Field countField = countType == CountType.DISTINCT ? DSL.countDistinct(columnToCount) : DSL.count(columnToCount); + return countField.as(alias); + } + + @Override + public Field aliased() { + return DSL.field(alias, BigDecimal.class); + } + + @Override + public String columnName() { + return columnToCount.getName(); + } + + public enum CountType { + DEFAULT, + DISTINCT; + + public static CountType fromBoolean(boolean value) { + return value ? DISTINCT : DEFAULT; + } + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DateDistanceSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DateDistanceSqlSelect.java new file mode 100644 index 0000000000..a7c8401790 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DateDistanceSqlSelect.java @@ -0,0 +1,69 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import java.sql.Date; +import java.time.LocalDate; +import java.time.temporal.ChronoUnit; +import java.util.Objects; + +import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; +import org.jooq.Name; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +@EqualsAndHashCode +public class DateDistanceSqlSelect implements SqlSelect { + + private final DateNowSupplier dateNowSupplier; + private final ChronoUnit timeUnit; + private final String sourceTable; + private final Column column; + private final String alias; + private final CDateRange dateRestriction; + @EqualsAndHashCode.Exclude + private final SqlFunctionProvider functionProvider; + + @Override + public Field select() { + Date endDate = getEndDate(dateRestriction); + + if (column.getType() != MajorTypeId.DATE) { + throw new UnsupportedOperationException("Can't calculate date distance to column of type " + column.getType()); + } + + Name dateColumnName = DSL.name(sourceTable, column.getName()); + return functionProvider.dateDistance(timeUnit, dateColumnName, endDate) + .as(alias); + } + + private Date getEndDate(CDateRange dateRange) { + LocalDate endDate; + // if a date restriction is set, the max of the date restriction equals the end date of the date distance + // but there is also the possibility that the user set's an empty daterange which will be non-null but with null values + if (Objects.nonNull(dateRange) && dateRange.getMax() != null) { + endDate = dateRange.getMax(); + } + else { + // otherwise the current date is the upper bound + endDate = dateNowSupplier.getLocalDateNow(); + } + return Date.valueOf(endDate); + } + + @Override + public Field aliased() { + return DSL.field(alias, Integer.class); + } + + @Override + public String columnName() { + return column.getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ExistsSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ExistsSqlSelect.java new file mode 100644 index 0000000000..93f79bc585 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ExistsSqlSelect.java @@ -0,0 +1,33 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import lombok.EqualsAndHashCode; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@EqualsAndHashCode +public class ExistsSqlSelect implements SqlSelect { + + private static final String EXISTS_SUFFIX = "_exists"; + private final String label; + + public ExistsSqlSelect(String label) { + this.label = label + EXISTS_SUFFIX; + } + + @Override + public Field select() { + return DSL.field("1", Integer.class) + .as(label); + } + + @Override + public Field aliased() { + return DSL.field(label, Integer.class); + } + + @Override + public String columnName() { + return label; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ExtractingSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ExtractingSqlSelect.java new file mode 100644 index 0000000000..5a0788ca2a --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ExtractingSqlSelect.java @@ -0,0 +1,48 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import lombok.EqualsAndHashCode; +import lombok.Value; +import org.jooq.Field; +import org.jooq.impl.DSL; + +/** + * Select that does nothing but reference an existing column. + *

+ * This can be used if another select requires a column in a later step. + * + * @param type of column + */ +@Value +@EqualsAndHashCode +public class ExtractingSqlSelect implements SqlSelect { + + String table; + String column; + @EqualsAndHashCode.Exclude + Class columnClass; + + @SuppressWarnings("unchecked") + public static ExtractingSqlSelect fromSqlSelect(SqlSelect select, String qualifier) { + return (ExtractingSqlSelect) new ExtractingSqlSelect<>( + qualifier, + select.columnName(), + select.aliased().getType() + ); + } + + @Override + public Field select() { + return DSL.field(DSL.name(table, column), columnClass); + } + + @Override + public Field aliased() { + return DSL.field(DSL.name(column), columnClass); + } + + @Override + public String columnName() { + return column; + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FieldWrapper.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FieldWrapper.java new file mode 100644 index 0000000000..514ec783ef --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FieldWrapper.java @@ -0,0 +1,37 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +@EqualsAndHashCode +public class FieldWrapper implements SqlSelect { + + private final Field field; + + /** + * @return Aliases an existing {@link SqlSelect} with a unique alias. + */ + public static FieldWrapper unique(SqlSelect sqlSelect) { + Field field = sqlSelect.select(); + return new FieldWrapper(field.as("%s-%8X".formatted(field.getName(), field.hashCode()))); + } + + @Override + public Field select() { + return field; + } + + @Override + public Field aliased() { + return DSL.field(DSL.name(field.getName())); + } + + @Override + public String columnName() { + return field.getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSqlSelect.java new file mode 100644 index 0000000000..e61f983fd9 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSqlSelect.java @@ -0,0 +1,39 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import java.util.List; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Value; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@Value +@Builder +@EqualsAndHashCode +public class FirstValueSqlSelect implements SqlSelect { + + Field firstColumn; + String alias; + List> orderByColumns; + @EqualsAndHashCode.Exclude + SqlFunctionProvider functionProvider; + + @Override + public Field select() { + return functionProvider.first(firstColumn, orderByColumns) + .as(alias); + } + + @Override + public Field aliased() { + return DSL.field(alias); + } + + @Override + public String columnName() { + return firstColumn.getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSqlSelect.java new file mode 100644 index 0000000000..b64486077b --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSqlSelect.java @@ -0,0 +1,39 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import java.util.List; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Value; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@Value +@Builder +@EqualsAndHashCode +public class LastValueSqlSelect implements SqlSelect { + + Field lastColumn; + String alias; + List> orderByColumns; + @EqualsAndHashCode.Exclude + SqlFunctionProvider functionProvider; + + @Override + public Field select() { + return functionProvider.last(lastColumn, orderByColumns) + .as(alias); + } + + @Override + public Field aliased() { + return DSL.field(alias); + } + + @Override + public String columnName() { + return lastColumn.getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/MinSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/MinSqlSelect.java new file mode 100644 index 0000000000..f85f4301a8 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/MinSqlSelect.java @@ -0,0 +1,33 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import java.math.BigDecimal; + +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +@EqualsAndHashCode +public class MinSqlSelect implements SqlSelect { + + private final Field minColumn; + private final String alias; + + @Override + public Field select() { + return DSL.min(minColumn) + .as(alias); + } + + @Override + public Field aliased() { + return DSL.field(alias, BigDecimal.class); + } + + @Override + public String columnName() { + return minColumn.getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSqlSelect.java new file mode 100644 index 0000000000..9575504965 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSqlSelect.java @@ -0,0 +1,35 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; +import lombok.Builder; +import lombok.EqualsAndHashCode; +import lombok.Value; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@Value +@Builder +@EqualsAndHashCode +public class RandomValueSqlSelect implements SqlSelect { + + Field randomColumn; + String alias; + @EqualsAndHashCode.Exclude + SqlFunctionProvider functionProvider; + + @Override + public Field select() { + return functionProvider.random(randomColumn).as(alias); + } + + @Override + public Field aliased() { + return DSL.field(alias); + } + + @Override + public String columnName() { + return randomColumn.getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SqlSelect.java new file mode 100644 index 0000000000..35c8c3a43f --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SqlSelect.java @@ -0,0 +1,26 @@ +package com.bakdata.conquery.sql.conversion.model.select; + + +import org.jooq.Field; + +public interface SqlSelect { + + /** + * @return The whole (aliased) SQL expression of this {@link SqlSelect}. + * For example, {@code DSL.firstValue(DSL.field(DSL.name("foo", "bar"))).as("foobar")}. + */ + Field select(); + + /** + * @return Aliased column name that can be used to reference the created select. + * For example, {@code DSL.field("foobar")}. + */ + Field aliased(); + + /** + * @return Plain column name of this {@link SqlSelect}. + * For example, {@code "bar"}. + */ + String columnName(); + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SqlSelects.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SqlSelects.java new file mode 100644 index 0000000000..2b7c199297 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SqlSelects.java @@ -0,0 +1,20 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import java.util.Collections; +import java.util.List; + +import lombok.Builder; +import lombok.Value; + +@Builder +@Value +public class SqlSelects { + @Builder.Default + List forPreprocessingStep = Collections.emptyList(); + // Empty if only used in event filter + @Builder.Default + List forAggregationSelectStep = Collections.emptyList(); + // Empty if only used in aggregation select + @Builder.Default + List forFinalStep = Collections.emptyList(); +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SumSqlSelect.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SumSqlSelect.java new file mode 100644 index 0000000000..6247bd0653 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/SumSqlSelect.java @@ -0,0 +1,33 @@ +package com.bakdata.conquery.sql.conversion.model.select; + +import java.math.BigDecimal; + +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; +import org.jooq.Field; +import org.jooq.impl.DSL; + +@RequiredArgsConstructor +@EqualsAndHashCode +public class SumSqlSelect implements SqlSelect { + + private final Field columnToSum; + private final String alias; + + @Override + public Field select() { + return DSL.sum(columnToSum) + .as(alias); + } + + @Override + public Field aliased() { + return DSL.field(alias, BigDecimal.class); + } + + @Override + public String columnName() { + return columnToSum.getName(); + } + +} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java index 7a71880816..42b95f8fbb 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/ConceptQueryConverter.java @@ -1,18 +1,22 @@ package com.bakdata.conquery.sql.conversion.query; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import com.bakdata.conquery.apiv1.query.ConceptQuery; import com.bakdata.conquery.sql.conversion.NodeConverter; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.context.selects.Selects; -import com.bakdata.conquery.sql.conversion.context.step.QueryStep; -import com.bakdata.conquery.sql.conversion.context.step.QueryStepTransformer; +import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; -import com.bakdata.conquery.sql.models.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.ColumnDateRange; +import com.bakdata.conquery.sql.conversion.model.QueryStep; +import com.bakdata.conquery.sql.conversion.model.QueryStepTransformer; +import com.bakdata.conquery.sql.conversion.model.Selects; import org.jooq.Field; import org.jooq.Record; import org.jooq.Select; +import org.jooq.impl.DSL; public class ConceptQueryConverter implements NodeConverter { @@ -31,15 +35,17 @@ public Class getConversionClass() { @Override public ConversionContext convert(ConceptQuery node, ConversionContext context) { - ConversionContext contextAfterConversion = context.getNodeConverterService() + ConversionContext contextAfterConversion = context.getNodeConversions() .convert(node.getRoot(), context); QueryStep preFinalStep = contextAfterConversion.getQuerySteps().iterator().next(); + Selects preFinalSelects = preFinalStep.getQualifiedSelects(); + QueryStep finalStep = QueryStep.builder() .cteName(null) // the final QueryStep won't be converted to a CTE - .selects(this.toFinalSelects(preFinalStep, context)) + .selects(getFinalSelects(preFinalSelects, context.getSqlDialect().getFunctionProvider())) .fromTable(QueryStep.toTableLike(preFinalStep.getCteName())) - .conditions(preFinalStep.getConditions()) + .groupBy(getFinalGroupBySelects(preFinalSelects, context.getSqlDialect())) .predecessors(List.of(preFinalStep)) .build(); @@ -47,22 +53,26 @@ public ConversionContext convert(ConceptQuery node, ConversionContext context) { return context.withFinalQuery(finalQuery); } - /** - * @return The final selects containing the final validity date, if present, as a string aggregation field. - */ - private Selects toFinalSelects(QueryStep preFinalStep, ConversionContext context) { - - Selects finalSelects = preFinalStep.getQualifiedSelects(); + private Selects getFinalSelects(Selects preFinalSelects, SqlFunctionProvider functionProvider) { - if (finalSelects.getValidityDate().isEmpty()) { - return finalSelects; + if (preFinalSelects.getValidityDate().isEmpty()) { + Field emptyRange = DSL.field(DSL.val("{}")); + return preFinalSelects.withValidityDate(ColumnDateRange.of(emptyRange)); } - SqlFunctionProvider functionProvider = context.getSqlDialect().getFunction(); - Field finalValidityDateSelect = functionProvider.daterangeString(finalSelects.getValidityDate().get()) - .as(FINAL_VALIDITY_DATE_COLUMN_NAME); + Field validityDateStringAggregation = functionProvider.validityDateStringAggregation(preFinalSelects.getValidityDate().get()) + .as(FINAL_VALIDITY_DATE_COLUMN_NAME); + return preFinalSelects.withValidityDate(ColumnDateRange.of(validityDateStringAggregation)); + } - return finalSelects.withValidityDate(ColumnDateRange.of(finalValidityDateSelect)); + private List> getFinalGroupBySelects(Selects preFinalSelects, SqlDialect sqlDialect) { + if (!sqlDialect.requiresAggregationInFinalStep()) { + return Collections.emptyList(); + } + List> groupBySelects = new ArrayList<>(); + groupBySelects.add(preFinalSelects.getPrimaryColumn()); + groupBySelects.addAll(preFinalSelects.explicitSelects()); + return groupBySelects; } } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java deleted file mode 100644 index c2f90372f8..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/DateDistanceConverter.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.bakdata.conquery.sql.conversion.select; - -import java.sql.Date; -import java.time.LocalDate; -import java.time.temporal.ChronoUnit; -import java.util.Objects; - -import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.DateDistanceSelect; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.supplier.DateNowSupplier; -import org.jooq.Field; -import org.jooq.Name; -import org.jooq.impl.DSL; - -public class DateDistanceConverter implements SelectConverter { - - private final DateNowSupplier dateNowSupplier; - - public DateDistanceConverter(DateNowSupplier dateNowSupplier) { - this.dateNowSupplier = dateNowSupplier; - } - - @Override - public Field convert(DateDistanceSelect select, ConversionContext context) { - - ChronoUnit timeUnit = select.getTimeUnit(); - Name startDateColumnName = DSL.name(select.getColumn().getName()); - Date endDate = getEndDate(context); - - return context.getSqlDialect().getFunction().dateDistance(timeUnit, startDateColumnName, endDate) - .as(select.getLabel()); - } - - private Date getEndDate(ConversionContext context) { - LocalDate endDate; - // if a date restriction is set, the max of the date restriction equals the end date of the date distance - if (Objects.nonNull(context.getDateRestrictionRange())) { - endDate = context.getDateRestrictionRange().getMax(); - } - else { - // otherwise the current date is the upper bound - endDate = dateNowSupplier.getLocalDateNow(); - } - return Date.valueOf(endDate); - } - - @Override - public Class getConversionClass() { - return DateDistanceSelect.class; - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java deleted file mode 100644 index f91c5e4d8a..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/FirstValueConverter.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.bakdata.conquery.sql.conversion.select; - -import com.bakdata.conquery.models.datasets.concepts.select.connector.FirstValueSelect; -import com.bakdata.conquery.sql.conversion.context.ConversionContext; -import com.bakdata.conquery.sql.conversion.dialect.SqlFunctionProvider; -import org.jooq.Field; -import org.jooq.impl.DSL; - -public class FirstValueConverter implements SelectConverter { - - public Field convert(FirstValueSelect select, ConversionContext context) { - SqlFunctionProvider fn = context.getSqlDialect().getFunction(); - return fn.first(DSL.name(select.getColumn().getName())); - } - - @Override - public Class getConversionClass() { - return FirstValueSelect.class; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverterService.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverterService.java deleted file mode 100644 index 88527eede2..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/select/SelectConverterService.java +++ /dev/null @@ -1,14 +0,0 @@ -package com.bakdata.conquery.sql.conversion.select; - -import java.util.List; - -import com.bakdata.conquery.models.datasets.concepts.select.Select; -import com.bakdata.conquery.sql.conversion.ConverterService; -import org.jooq.Field; - -public class SelectConverterService extends ConverterService> { - - public SelectConverterService(List> converters) { - super(converters); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java index e847611ad5..3608462b21 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/execution/SqlExecutionService.java @@ -45,6 +45,10 @@ private SqlExecutionResult createStatementAndExecute(SqlManagedQuery sqlQuery, C catch (SQLException e) { throw new ConqueryError.SqlError(e); } + // not all DB vendors throw SQLExceptions + catch (RuntimeException e) { + throw new ConqueryError.SqlError(new SQLException(e)); + } } private List createResultTable(ResultSet resultSet, int columnCount) throws SQLException { diff --git a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java index 72072df65e..f159ec3c37 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java @@ -23,13 +23,13 @@ import com.bakdata.conquery.TestTags; import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.integration.sql.SqlIntegrationTest; +import com.bakdata.conquery.integration.sql.dialect.TestSqlDialect; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.SqlConnectorConfig; -import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import com.bakdata.conquery.util.support.ConfigOverride; import com.bakdata.conquery.util.support.TestConquery; import com.codahale.metrics.SharedMetricRegistries; @@ -137,7 +137,7 @@ public Stream programmaticTests() { @SneakyThrows - public Stream sqlTests(SqlDialect sqlDialect, SqlConnectorConfig sqlConfig) { + public Stream sqlTests(TestSqlDialect sqlDialect, SqlConnectorConfig sqlConfig) { final Path testRootDir = Path.of(Objects.requireNonNullElse( System.getenv(TestTags.SQL_BACKEND_TEST_DIRECTORY_ENVIRONMENT_VARIABLE), SqlIntegrationTest.SQL_TEST_DIR diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java index 3af71cc613..0bcd83e183 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/CsvTableImporter.java @@ -16,9 +16,11 @@ import com.bakdata.conquery.integration.common.RequiredColumn; import com.bakdata.conquery.integration.common.RequiredTable; import com.bakdata.conquery.integration.common.ResourceFile; +import com.bakdata.conquery.integration.sql.dialect.TestSqlDialect; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.config.CSVConfig; import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.config.SqlConnectorConfig; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.preproc.parser.specific.DateRangeParser; import com.bakdata.conquery.models.query.results.EntityResult; @@ -33,7 +35,6 @@ import org.jooq.Record; import org.jooq.RowN; import org.jooq.Table; -import org.jooq.conf.ParamType; import org.jooq.impl.BuiltInDataType; import org.jooq.impl.DSL; import org.jooq.impl.SQLDataType; @@ -46,11 +47,15 @@ public class CsvTableImporter { private final DSLContext dslContext; private final DateRangeParser dateRangeParser; private final CsvParser csvReader; + private final TestSqlDialect testSqlDialect; + private final SqlConnectorConfig sqlConnectorConfig; - public CsvTableImporter(DSLContext dslContext) { + public CsvTableImporter(DSLContext dslContext, TestSqlDialect testSqlDialect, SqlConnectorConfig sqlConnectorConfig) { this.dslContext = dslContext; this.dateRangeParser = new DateRangeParser(new ConqueryConfig()); this.csvReader = new CSVConfig().withSkipHeader(true).createParser(); + this.testSqlDialect = testSqlDialect; + this.sqlConnectorConfig = sqlConnectorConfig; } /** @@ -85,33 +90,24 @@ public List readExpectedEntities(Path csv) throws IOException { } private void insertValuesIntoTable(Table table, List> columns, List content, Statement statement) throws SQLException { - for (RowN rowN : content) { - // e.g. HANA does not support bulk insert, so we insert row by row - String insertRowStatement = dslContext.insertInto(table, columns) - .values(rowN) - .getSQL(ParamType.INLINED); - log.info("Inserting into table: {}", insertRowStatement); - statement.execute(insertRowStatement); - } + log.debug("Inserting into table: {}", content); + testSqlDialect.getTestFunctionProvider().insertValuesIntoTable(table, columns, content, statement, dslContext); } private void createTable(Table table, List> columns, Statement statement) throws SQLException { - String createTableStatement = dslContext.createTable(table) - .columns(columns) - .getSQL(ParamType.INLINED); - log.info("Creating table: {}", createTableStatement); + String createTableStatement = testSqlDialect.getTestFunctionProvider().createTableStatement(table, columns, dslContext); + + log.debug("Creating table: {}", createTableStatement); statement.execute(createTableStatement); } private void dropTable(Table table, Statement statement) { try { - // DROP TABLE IF EXISTS is not supported in HANA, we just ignore possible errors if the table does not exist - String dropTableStatement = dslContext.dropTable(table) - .getSQL(ParamType.INLINED); + String dropTableStatement = testSqlDialect.getTestFunctionProvider().createDropTableStatement(table, dslContext); statement.execute(dropTableStatement); } catch (SQLException e) { - log.info("Dropping table {} failed.", table.getName(), e); + log.debug("Dropping table {} failed.", table.getName(), e); } } @@ -134,11 +130,17 @@ private Field createField(RequiredColumn requiredColumn) { case INTEGER -> SQLDataType.INTEGER; case BOOLEAN -> SQLDataType.BOOLEAN; // TODO: temporary workaround until we cast ResultSet elements back - case REAL -> SQLDataType.DECIMAL(10,2); + case REAL -> SQLDataType.DECIMAL(10, 2); case DECIMAL, MONEY -> SQLDataType.DECIMAL; case DATE -> SQLDataType.DATE; case DATE_RANGE -> new BuiltInDataType<>(DateRange.class, "daterange"); }; + + // Set all columns except 'pid' to nullable, important for ClickHouse compatibility + if (!requiredColumn.getName().equals(sqlConnectorConfig.getPrimaryColumn())) { + dataType = dataType.nullable(true); + } + return DSL.field(DSL.name(requiredColumn.getName()), dataType); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTest.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTest.java index c64c775951..d887cc283b 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTest.java @@ -3,9 +3,9 @@ import java.io.IOException; import java.nio.file.Path; +import com.bakdata.conquery.integration.sql.dialect.TestSqlDialect; import com.bakdata.conquery.models.config.SqlConnectorConfig; import com.bakdata.conquery.models.exceptions.JSONException; -import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import lombok.AllArgsConstructor; import lombok.Getter; import org.junit.jupiter.api.function.Executable; @@ -24,7 +24,7 @@ public void execute() throws IOException, JSONException { testSpec.executeTest(support); } - public static SqlIntegrationTest fromPath(final Path path, final SqlDialect sqlDialect, final SqlConnectorConfig sqlConfig) { + public static SqlIntegrationTest fromPath(final Path path, final TestSqlDialect sqlDialect, final SqlConnectorConfig sqlConfig) { return new SqlIntegrationTest(new SqlStandaloneSupport(sqlDialect, sqlConfig), SqlIntegrationTestSpec.fromJsonSpec(path)); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java index c6340f3227..45a7558a47 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlIntegrationTestSpec.java @@ -81,11 +81,6 @@ public static SqlIntegrationTestSpec fromJsonSpec(Path path) { return test; } - private static SqlIntegrationTestSpec readSpecFromJson(Path path) throws IOException { - final ObjectReader objectReader = Jackson.MAPPER.readerFor(SqlIntegrationTestSpec.class); - return objectReader.readValue(Files.readString(path)); - } - @Override public void executeTest(SqlStandaloneSupport support) throws IOException { for (RequiredTable table : content.getTables()) { @@ -114,6 +109,11 @@ public void importRequiredData(SqlStandaloneSupport support) throws IOException, setQuery(parsedQuery); } + private static SqlIntegrationTestSpec readSpecFromJson(Path path) throws IOException { + final ObjectReader objectReader = Jackson.MAPPER.readerFor(SqlIntegrationTestSpec.class); + return objectReader.readValue(Files.readString(path)); + } + private void importTables(SqlStandaloneSupport support) { for (RequiredTable rTable : getContent().getTables()) { final Table table = rTable.toTable(support.getDataset(), support.getNamespaceStorage().getCentralRegistry()); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlStandaloneSupport.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlStandaloneSupport.java index 32fb9525d9..badf981308 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlStandaloneSupport.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/SqlStandaloneSupport.java @@ -3,6 +3,7 @@ import javax.validation.Validator; import com.bakdata.conquery.integration.IntegrationTests; +import com.bakdata.conquery.integration.sql.dialect.TestSqlDialect; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; @@ -17,7 +18,6 @@ import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.sql.SqlContext; import com.bakdata.conquery.sql.conquery.SqlExecutionManager; -import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.bakdata.conquery.util.support.TestSupport; import io.dropwizard.jersey.validation.Validators; @@ -36,7 +36,7 @@ public class SqlStandaloneSupport implements TestSupport { CsvTableImporter tableImporter; SqlExecutionManager executionManager; - public SqlStandaloneSupport(final SqlDialect sqlDialect, final SqlConnectorConfig sqlConfig) { + public SqlStandaloneSupport(final TestSqlDialect sqlDialect, final SqlConnectorConfig sqlConfig) { this.dataset = new Dataset("test"); NamespaceStorage storage = new NamespaceStorage(new NonPersistentStoreFactory(), "", VALIDATOR) { }; @@ -57,7 +57,7 @@ public SqlStandaloneSupport(final SqlDialect sqlDialect, final SqlConnectorConfi testUser = getConfig().getAuthorizationRealms().getInitialUsers().get(0).createOrOverwriteUser(metaStorage); metaStorage.updateUser(testUser); namespace = registry.createNamespace(storage); - tableImporter = new CsvTableImporter(sqlDialect.getDSLContext()); + tableImporter = new CsvTableImporter(sqlDialect.getDSLContext(), sqlDialect, sqlConfig); executionManager = (SqlExecutionManager) namespace.getExecutionManager(); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java index 8da1528554..7e71043baa 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/HanaSqlIntegrationTests.java @@ -6,6 +6,9 @@ import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.nio.file.attribute.PosixFilePermission; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.Set; @@ -18,14 +21,19 @@ import com.bakdata.conquery.models.config.SqlConnectorConfig; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.sql.DslContextFactory; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.DateDistanceSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectConverter; import com.bakdata.conquery.sql.conversion.dialect.HanaSqlDialect; -import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; -import com.bakdata.conquery.sql.conversion.select.SelectConverter; import com.google.common.base.Strings; import lombok.Getter; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.RowN; +import org.jooq.Table; +import org.jooq.conf.ParamType; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DynamicTest; @@ -91,13 +99,13 @@ public static void tearDownClass() { return; } try (Stream walk = Files.walk(TMP_HANA_MOUNT_DIR)) { - walk.sorted((p1, p2) -> - p1.compareTo(p2)) + walk.sorted(Comparator.naturalOrder()) .map(Path::toFile) .forEach(File::delete); } } - private static class TestHanaDialect extends HanaSqlDialect { + private static class TestHanaDialect extends HanaSqlDialect implements TestSqlDialect { public TestHanaDialect(DSLContext dslContext) { super(dslContext); @@ -106,10 +114,36 @@ public TestHanaDialect(DSLContext dslContext) { @Override public List> getSelectConverters() { return this.customizeSelectConverters(List.of( - new DateDistanceConverter(new MockDateNowSupplier()) + new DateDistanceSelectConverter(new MockDateNowSupplier()) )); } + public TestFunctionProvider getTestFunctionProvider() { + return new HanaTestFunctionProvider(); + } + + } + + private static class HanaTestFunctionProvider implements TestFunctionProvider { + + @Override + public void insertValuesIntoTable(Table table, List> columns, List content, Statement statement, DSLContext dslContext) + throws SQLException { + for (RowN rowN : content) { + String insertRowStatement = dslContext.insertInto(table, columns) + .values(rowN) + .getSQL(ParamType.INLINED); + + statement.execute(insertRowStatement); + } + } + + @Override + public String createDropTableStatement(Table table, DSLContext dslContext) { + return dslContext.dropTable(table) + .getSQL(ParamType.INLINED); + } + } @Getter diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/PostgreSqlIntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/PostgreSqlIntegrationTests.java index d4804c3076..0e26dd51b0 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/PostgreSqlIntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/PostgreSqlIntegrationTests.java @@ -14,9 +14,9 @@ import com.bakdata.conquery.sql.DslContextFactory; import com.bakdata.conquery.sql.SqlQuery; import com.bakdata.conquery.sql.conquery.SqlManagedQuery; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.DateDistanceSelectConverter; +import com.bakdata.conquery.sql.conversion.cqelement.concept.select.SelectConverter; import com.bakdata.conquery.sql.conversion.dialect.PostgreSqlDialect; -import com.bakdata.conquery.sql.conversion.select.DateDistanceConverter; -import com.bakdata.conquery.sql.conversion.select.SelectConverter; import com.bakdata.conquery.sql.execution.SqlExecutionService; import lombok.extern.slf4j.Slf4j; import org.assertj.core.api.Assertions; @@ -90,7 +90,7 @@ public Stream sqlBackendTests() { return super.sqlTests(new TestPostgreSqlDialect(dslContext), sqlConfig); } - private static class TestPostgreSqlDialect extends PostgreSqlDialect { + private static class TestPostgreSqlDialect extends PostgreSqlDialect implements TestSqlDialect { public TestPostgreSqlDialect(DSLContext dslContext) { super(dslContext); @@ -99,10 +99,17 @@ public TestPostgreSqlDialect(DSLContext dslContext) { @Override public List> getSelectConverters() { return this.customizeSelectConverters(List.of( - new DateDistanceConverter(new MockDateNowSupplier()) + new DateDistanceSelectConverter(new MockDateNowSupplier()) )); } + public TestFunctionProvider getTestFunctionProvider() { + return new PostgreSqlTestFunctionProvider(); + } + } + + private static class PostgreSqlTestFunctionProvider implements TestFunctionProvider { + } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java index a4971a7ff5..c111f83d8b 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestContextProvider.java @@ -6,6 +6,7 @@ public interface TestContextProvider { SqlConnectorConfig getSqlConnectorConfig(); + DSLContext getDslContext(); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestFunctionProvider.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestFunctionProvider.java new file mode 100644 index 0000000000..3d7f1351c1 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestFunctionProvider.java @@ -0,0 +1,35 @@ +package com.bakdata.conquery.integration.sql.dialect; + +import java.sql.SQLException; +import java.sql.Statement; +import java.util.List; + +import org.jooq.DSLContext; +import org.jooq.Field; +import org.jooq.Record; +import org.jooq.RowN; +import org.jooq.Table; +import org.jooq.conf.ParamType; + +public interface TestFunctionProvider { + + default String createTableStatement(Table table, List> columns, DSLContext dslContext) { + return dslContext.createTable(table) + .columns(columns) + .getSQL(ParamType.INLINED); + } + + default void insertValuesIntoTable(Table table, List> columns, List content, Statement statement, DSLContext dslContext) + throws SQLException { + String insertIntoTableStatement = dslContext.insertInto(table, columns) + .valuesOfRows(content) + .getSQL(ParamType.INLINED); + statement.execute(insertIntoTableStatement); + } + + default String createDropTableStatement(Table table, DSLContext dslContext) { + return dslContext.dropTableIfExists(table) + .getSQL(ParamType.INLINED); + } + +} diff --git a/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestSqlDialect.java b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestSqlDialect.java new file mode 100644 index 0000000000..3dc1e18c23 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/integration/sql/dialect/TestSqlDialect.java @@ -0,0 +1,9 @@ +package com.bakdata.conquery.integration.sql.dialect; + +import com.bakdata.conquery.sql.conversion.dialect.SqlDialect; + +public interface TestSqlDialect extends SqlDialect { + + TestFunctionProvider getTestFunctionProvider(); + +} diff --git a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java index 3d68b80131..2a6b4f3f50 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java @@ -507,7 +507,7 @@ public void meInformation() throws IOException, JSONException { .userName(user.getLabel()) .hideLogoutButton(false) .groups(List.of(new IdLabel<>(new GroupId("test_group"), "test_group_label"))) - .datasetAbilities(Map.of(new DatasetId("testdataset"), new MeProcessor.FrontendDatasetAbility(true))) + .datasetAbilities(Map.of(new DatasetId("testdataset"), new MeProcessor.FrontendDatasetAbility(true, true, true))) .build(); SerializationTestUtil diff --git a/backend/src/test/resources/logback-test.xml b/backend/src/test/resources/logback-test.xml index 55be843dad..5de928a926 100644 --- a/backend/src/test/resources/logback-test.xml +++ b/backend/src/test/resources/logback-test.xml @@ -11,6 +11,6 @@ - + - \ No newline at end of file + diff --git a/backend/src/test/resources/tests/sql/and/different_concept/and.json b/backend/src/test/resources/tests/sql/and/different_concept/and.json index 67b6bdf742..ec34f0f061 100644 --- a/backend/src/test/resources/tests/sql/and/different_concept/and.json +++ b/backend/src/test/resources/tests/sql/and/different_concept/and.json @@ -27,7 +27,7 @@ } ], "selects": [ - "number.number_connector.value" + "number.number_connector.first_value" ] } ] @@ -51,7 +51,7 @@ } ], "selects": [ - "geschlecht_select.geschlecht_connector.geschlecht" + "geschlecht_select.geschlecht_connector.first_geschlecht" ] } ] @@ -75,7 +75,7 @@ } ], "selects": [ - "language_select.language_connector.language" + "language_select.language_connector.first_language" ] } ] @@ -103,7 +103,7 @@ "type": "NUMBER" }, "selects": { - "name": "value", + "name": "first_value", "column": "table1.value", "type": "FIRST" } @@ -128,7 +128,7 @@ "type": "SELECT" }, "selects": { - "name": "geschlecht", + "name": "first_geschlecht", "column": "table2.geschlecht", "type": "FIRST" } @@ -153,7 +153,7 @@ "type": "SELECT" }, "selects": { - "name": "language", + "name": "first_language", "column": "table2.language", "type": "FIRST" } diff --git a/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv b/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv index 244295a111..2a4fce0beb 100644 --- a/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv +++ b/backend/src/test/resources/tests/sql/and/different_concept/content_1.csv @@ -2,7 +2,7 @@ pid,value,datum_start,datum_end 1,1,2014-06-30,2015-06-30 2,1.01,2014-06-30,2015-06-30 1,1,2015-02-03,2015-06-30 -1,0.5,2014-06-30,2015-06-30 +1,1,2016-02-05,2016-02-05 3,0.5,2014-04-30,2014-06-30 4,1,2014-06-30,2015-06-30 5,0.5,2014-04-30,2014-06-30 diff --git a/backend/src/test/resources/tests/sql/and/different_concept/expected.csv b/backend/src/test/resources/tests/sql/and/different_concept/expected.csv index c1885bafaf..b74c795e0b 100644 --- a/backend/src/test/resources/tests/sql/and/different_concept/expected.csv +++ b/backend/src/test/resources/tests/sql/and/different_concept/expected.csv @@ -1,4 +1,2 @@ -pid,validity_date_1value,geschlecht,language -1,"[2014-06-30,2015-07-01)",1.00,f,de -1,"[2015-02-03,2015-07-01)",1.00,f,de -1,"[2014-06-30,2015-07-01)",0.50,f,de +pid,dates,value,geschlecht,language +1,"{[2012-01-01,2012-01-02),[2014-06-30,2015-07-01),[2016-02-05,2016-02-06)}",1.00,f,de diff --git a/backend/src/test/resources/tests/sql/and/same_concept/expected.csv b/backend/src/test/resources/tests/sql/and/same_concept/expected.csv index 4d99aa75d0..3a40f84f6b 100644 --- a/backend/src/test/resources/tests/sql/and/same_concept/expected.csv +++ b/backend/src/test/resources/tests/sql/and/same_concept/expected.csv @@ -1,13 +1,7 @@ -pid -1 -1 -1 -1 -1 -1 -4 -5 -5 -6 -7 -8 +pid,dates +1,{} +4,{} +5,{} +6,{} +7,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv index 69f66cbc3e..130eee1588 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/content.csv @@ -1,9 +1,9 @@ -pid,datum_start,datum_end,datum_alt,geschlecht -1,2012-06-30,2015-06-30,2012-01-01,"f" -2,2012-06-30,2015-06-30,2010-07-15,"m" -3,2012-02-03,2012-06-30,2012-11-10,"f" -4,2010-06-30,2015-06-30,2012-11-11,"m" -5,2011-04-30,2014-06-30,2007-11-11,"" -6,2015-06-30,2016-06-30,2012-11-11,"" -7,2014-04-30,2015-06-30,2012-11-11,"mf" -8,2012-04-30,2014-06-30,2012-11-11,"fm" +pid,datum_start,datum_end,datum_alt +1,2012-06-30,2015-06-30,2012-01-01 +2,2012-06-30,2015-06-30,2010-07-15 +3,2012-02-03,2012-06-30,2012-11-10 +4,2010-06-30,2015-06-30,2012-11-11 +5,2011-04-30,2014-06-30,2007-11-11 +6,2015-06-30,2016-06-30,2012-11-11 +7,2014-04-30,2015-06-30,2012-11-11 +8,2012-04-30,2014-06-30,2012-11-11 diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json index 08897212ea..ed5bd21617 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/date_restriction_date_column.json @@ -22,15 +22,6 @@ "tables": [ { "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ], "dateColumn" : { "value" : "geschlecht_select.geschlecht_connector.datum_alt" } @@ -59,13 +50,7 @@ "label": "datum_alt", "column": "table1.datum_alt" } - ], - "filters": { - "label": "geschlecht", - "description": "Geschlecht zur gegebenen Datumseinschränkung", - "column": "table1.geschlecht", - "type": "SELECT" - } + ] } ] } @@ -91,10 +76,6 @@ { "name": "datum_alt", "type": "DATE" - }, - { - "name": "geschlecht", - "type": "STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/expected.csv index 2b81453f9f..0fdd6fa1e8 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/expected.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_date_column/expected.csv @@ -1,3 +1,7 @@ -pid,datum_alt -1,"[2012-01-01,2012-01-02)" -3,"[2012-11-10,2012-11-11)" +pid,dates +1,"{[2012-01-01,2012-01-02)}" +3,"{[2012-11-10,2012-11-11)}" +4,"{[2012-11-11,2012-11-12)}" +6,"{[2012-11-11,2012-11-12)}" +7,"{[2012-11-11,2012-11-12)}" +8,"{[2012-11-11,2012-11-12)}" diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/content.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/content.csv index 212025dec4..27f0f13ef9 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/content.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2012-11-10,"f" -4,2012-11-11,"m" -5,2007-11-11,"" -6,2012-11-11,"" -7,2012-11-11,"mf" -8,2012-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2012-11-10 +4,2012-11-11 +5,2007-11-11 +6,2012-11-11 +7,2012-11-11 +8,2012-11-11 diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/date_restriction_no_validity_date.json b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/date_restriction_no_validity_date.json index bd3ee6c472..071c542987 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/date_restriction_no_validity_date.json +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/date_restriction_no_validity_date.json @@ -22,16 +22,7 @@ "label": "Geschlecht SELECT", "tables": [ { - "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ] + "id": "geschlecht_select.geschlecht_connector" } ] } @@ -46,13 +37,7 @@ "connectors": [ { "label": "geschlecht_connector", - "table": "table1", - "filters": { - "label": "geschlecht", - "description": "Geschlecht zur gegebenen Datumseinschränkung", - "column": "table1.geschlecht", - "type": "SELECT" - } + "table": "table1" } ] } @@ -70,10 +55,6 @@ { "name": "datum", "type": "DATE" - }, - { - "name": "geschlecht", - "type": "STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/expected.csv index b6a85aedc6..f22a7fd7d4 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/expected.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/date_restriction_no_validity_date/expected.csv @@ -1,3 +1,9 @@ -pid -1 -3 +pid,dates +1,"{}" +2,"{}" +3,"{}" +4,"{}" +5,"{}" +6,"{}" +7,"{}" +8,"{}" diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv b/backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv index 212025dec4..7370260880 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/only_max/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2012-11-10,"f" -4,2012-11-11,"m" -5,2007-11-11,"" -6,2012-11-11,"" -7,2012-11-11,"mf" -8,2012-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2013-11-10 +4,2012-11-11 +5,2007-11-11 +6,2012-11-11 +7,2013-11-11 +8,2015-11-11 diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv index c34539ab81..b17e26a3cc 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/only_max/expected.csv @@ -1,3 +1,6 @@ -pid,datum -1,"[2012-01-01,2012-01-02)" -3,"[2012-11-10,2012-11-11)" +pid,dates +2,"{[2010-07-15,2010-07-16)}" +4,"{[2012-11-11,2012-11-12)}" +6,"{[2012-11-11,2012-11-12)}" +5,"{[2007-11-11,2007-11-12)}" +1,"{[2012-01-01,2012-01-02)}" diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json b/backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json index fbb6d2e1d0..268c6a5664 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json +++ b/backend/src/test/resources/tests/sql/date_restriction/only_max/only_max.spec.json @@ -20,16 +20,7 @@ "label": "Geschlecht SELECT", "tables": [ { - "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ] + "id": "geschlecht_select.geschlecht_connector" } ] } @@ -48,12 +39,6 @@ "validityDates": { "label": "datum", "column": "table1.datum" - }, - "filters": { - "label": "geschlecht", - "description": "Geschlecht zur gegebenen Datumseinschränkung", - "column": "table1.geschlecht", - "type": "SELECT" } } ] @@ -62,7 +47,7 @@ "content": { "tables": [ { - "csv": "tests/sql/date_restriction/simple_date/content.csv", + "csv": "tests/sql/date_restriction/only_max/content.csv", "name": "table1", "primaryColumn": { "name": "pid", @@ -72,10 +57,6 @@ { "name": "datum", "type": "DATE" - }, - { - "name": "geschlecht", - "type": "STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv b/backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv index 212025dec4..27f0f13ef9 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/only_min/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2012-11-10,"f" -4,2012-11-11,"m" -5,2007-11-11,"" -6,2012-11-11,"" -7,2012-11-11,"mf" -8,2012-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2012-11-10 +4,2012-11-11 +5,2007-11-11 +6,2012-11-11 +7,2012-11-11 +8,2012-11-11 diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv index c34539ab81..0fdd6fa1e8 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/only_min/expected.csv @@ -1,3 +1,7 @@ -pid,datum -1,"[2012-01-01,2012-01-02)" -3,"[2012-11-10,2012-11-11)" +pid,dates +1,"{[2012-01-01,2012-01-02)}" +3,"{[2012-11-10,2012-11-11)}" +4,"{[2012-11-11,2012-11-12)}" +6,"{[2012-11-11,2012-11-12)}" +7,"{[2012-11-11,2012-11-12)}" +8,"{[2012-11-11,2012-11-12)}" diff --git a/backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json b/backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json index 76a93dafde..51fbaf0d8d 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json +++ b/backend/src/test/resources/tests/sql/date_restriction/only_min/only_min.spec.json @@ -20,16 +20,7 @@ "label": "Geschlecht SELECT", "tables": [ { - "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ] + "id": "geschlecht_select.geschlecht_connector" } ] } @@ -48,12 +39,6 @@ "validityDates": { "label": "datum", "column": "table1.datum" - }, - "filters": { - "label": "geschlecht", - "description": "Geschlecht zur gegebenen Datumseinschränkung", - "column": "table1.geschlecht", - "type": "SELECT" } } ] @@ -72,10 +57,6 @@ { "name": "datum", "type": "DATE" - }, - { - "name": "geschlecht", - "type": "STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/content.csv b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/content.csv index f3da646264..5ac7b2e2d1 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/content.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,"2012-06-30/2015-06-30","f" -2,"2012-06-30/2015-06-30","m" -3,"2012-02-03/2012-06-30","f" -4,"2010-06-30/2015-06-30","m" -5,"2011-04-30/2014-06-30","" -6,"2015-06-30/2016-06-30","" -7,"2014-04-30/2015-06-30","mf" -8,"2012-04-30/2014-06-30","fm" +pid,datum +1,"2012-06-30/2015-06-30" +2,"2012-06-30/2015-06-30" +3,"2012-02-03/2012-06-30" +4,"2010-06-30/2015-06-30" +5,"2011-04-30/2014-06-30" +6,"2015-06-30/2016-06-30" +7,"2014-04-30/2015-06-30" +8,"2012-04-30/2014-06-30" diff --git a/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json index 62965cefe5..5b7d1bba98 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json +++ b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/daterange_column.spec.json @@ -3,7 +3,7 @@ "supportedDialects": [ "POSTGRESQL" ], - "label": "Date restriction with a daterange column validity date (PostgreSQL only)", + "label": "Date restriction with a daterange column validity date (PostgreSQL only) ", "expectedCsv": "expected.csv", "query": { "type": "CONCEPT_QUERY", @@ -21,60 +21,41 @@ "label": "Geschlecht SELECT", "tables": [ { - "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ] + "id": "geschlecht_select.geschlecht_connector" } ] } } }, - "concepts":[ + "concepts": [ { - "label":"geschlecht_select", - "type":"TREE", - "connectors":[ + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ { - "label":"geschlecht_connector", - "table":"table1", - "validityDates":{ - "label":"datum", - "column":"table1.datum" - }, - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" } } ] } ], - "content":{ - "tables":[ + "content": { + "tables": [ { - "csv":"tests/sql/date_restriction/postgres_daterange/content.csv", - "name":"table1", - "primaryColumn":{ - "name":"pid", - "type":"STRING" + "csv": "tests/sql/date_restriction/postgres_daterange/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" }, - "columns":[ + "columns": [ { - "name":"datum", - "type":"DATE_RANGE" - }, - { - "name":"geschlecht", - "type":"STRING" + "name": "datum", + "type": "DATE_RANGE" } ] } diff --git a/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/expected.csv index 777e8d3e55..8af92000f9 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/expected.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/postgres_daterange/expected.csv @@ -1,3 +1,7 @@ -pid,datum_start,datum_end -1,"[2012-06-30,2015-06-30)" -3,"[2012-02-03,2012-06-30)" +pid,dates +2,"{[2012-06-30,2015-06-30)}" +4,"{[2010-06-30,2015-06-30)}" +3,"{[2012-02-03,2012-06-30)}" +5,"{[2011-04-30,2014-06-30)}" +1,"{[2012-06-30,2015-06-30)}" +8,"{[2012-04-30,2014-06-30)}" diff --git a/backend/src/test/resources/tests/sql/date_restriction/simple_date/content.csv b/backend/src/test/resources/tests/sql/date_restriction/simple_date/content.csv index 212025dec4..27f0f13ef9 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/simple_date/content.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/simple_date/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2012-11-10,"f" -4,2012-11-11,"m" -5,2007-11-11,"" -6,2012-11-11,"" -7,2012-11-11,"mf" -8,2012-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2012-11-10 +4,2012-11-11 +5,2007-11-11 +6,2012-11-11 +7,2012-11-11 +8,2012-11-11 diff --git a/backend/src/test/resources/tests/sql/date_restriction/simple_date/date_restriction_simple_date.json b/backend/src/test/resources/tests/sql/date_restriction/simple_date/date_restriction_simple_date.json index 4749faac8d..ee32a80e82 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/simple_date/date_restriction_simple_date.json +++ b/backend/src/test/resources/tests/sql/date_restriction/simple_date/date_restriction_simple_date.json @@ -21,16 +21,7 @@ "label": "Geschlecht SELECT", "tables": [ { - "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ] + "id": "geschlecht_select.geschlecht_connector" } ] } @@ -49,12 +40,6 @@ "validityDates": { "label": "datum", "column": "table1.datum" - }, - "filters": { - "label": "geschlecht", - "description": "Geschlecht zur gegebenen Datumseinschränkung", - "column": "table1.geschlecht", - "type": "SELECT" } } ] @@ -73,10 +58,6 @@ { "name": "datum", "type": "DATE" - }, - { - "name": "geschlecht", - "type": "STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/date_restriction/simple_date/expected.csv b/backend/src/test/resources/tests/sql/date_restriction/simple_date/expected.csv index c34539ab81..dc8397cb16 100644 --- a/backend/src/test/resources/tests/sql/date_restriction/simple_date/expected.csv +++ b/backend/src/test/resources/tests/sql/date_restriction/simple_date/expected.csv @@ -1,3 +1,7 @@ -pid,datum -1,"[2012-01-01,2012-01-02)" -3,"[2012-11-10,2012-11-11)" +pid,dates +4,"{[2012-11-11,2012-11-12)}" +7,"{[2012-11-11,2012-11-12)}" +3,"{[2012-11-10,2012-11-11)}" +6,"{[2012-11-11,2012-11-12)}" +1,"{[2012-01-01,2012-01-02)}" +8,"{[2012-11-11,2012-11-12)}" diff --git a/backend/src/test/resources/tests/sql/filter/big_multi_select/big_multi_select.spec.json b/backend/src/test/resources/tests/sql/filter/big_multi_select/big_multi_select.spec.json new file mode 100644 index 0000000000..6134aadae5 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/big_multi_select/big_multi_select.spec.json @@ -0,0 +1,72 @@ +{ + "label": "BIG_MULTI_SELECT filter", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f", + "", + null + ] + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "BIG_MULTI_SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/big_multi_select/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/select/content.csv b/backend/src/test/resources/tests/sql/filter/big_multi_select/content.csv similarity index 100% rename from backend/src/test/resources/tests/sql/filter/select/content.csv rename to backend/src/test/resources/tests/sql/filter/big_multi_select/content.csv diff --git a/backend/src/test/resources/tests/sql/filter/big_multi_select/expected.csv b/backend/src/test/resources/tests/sql/filter/big_multi_select/expected.csv new file mode 100644 index 0000000000..03ece7a7c3 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/big_multi_select/expected.csv @@ -0,0 +1,5 @@ +pid,dates +1,{} +3,{} +5,{} +6,{} diff --git a/backend/src/test/resources/tests/sql/filter/count/content.csv b/backend/src/test/resources/tests/sql/filter/count/content.csv new file mode 100644 index 0000000000..3c3c196695 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/count/content.csv @@ -0,0 +1,9 @@ +pid,value +1,0.9 +1,0.8 +1,0.7 +2,0.1 +2,0.1 +3,1.0 +3,0.5 +4,19.0 diff --git a/backend/src/test/resources/tests/sql/filter/count/count.json b/backend/src/test/resources/tests/sql/filter/count/count.json new file mode 100644 index 0000000000..366bdb72b5 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/count/count.json @@ -0,0 +1,70 @@ +{ + "label": "COUNT filter", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 2 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "COUNT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/count/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/count/expected.csv b/backend/src/test/resources/tests/sql/filter/count/expected.csv new file mode 100644 index 0000000000..05fbecc897 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/count/expected.csv @@ -0,0 +1,4 @@ +pid,dates +1,{} +2,{} +3,{} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/centuries/centuries.spec.json b/backend/src/test/resources/tests/sql/filter/date_distance/centuries/centuries.spec.json new file mode 100644 index 0000000000..c2d1c38fd3 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/centuries/centuries.spec.json @@ -0,0 +1,73 @@ +{ + "label": "DATE_DISTANCE filter query with timeUnit CENTURIES", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.date_distance_centuries", + "type": "INTEGER_RANGE", + "value": { + "max": 1 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": [ + { + "column": "table1.datum", + "label": "date_distance_centuries", + "name": "date_distance_centuries", + "timeUnit": "CENTURIES", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/date_distance/centuries/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/centuries/content.csv b/backend/src/test/resources/tests/sql/filter/date_distance/centuries/content.csv new file mode 100644 index 0000000000..cb125007de --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/centuries/content.csv @@ -0,0 +1,9 @@ +pid,datum +1,1920-01-01 +2,2010-07-15 +3,2010-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/centuries/expected.csv b/backend/src/test/resources/tests/sql/filter/date_distance/centuries/expected.csv new file mode 100644 index 0000000000..560413d16b --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/centuries/expected.csv @@ -0,0 +1,9 @@ +pid,dates +2,{} +4,{} +7,{} +3,{} +6,{} +5,{} +1,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/content.csv b/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/content.csv new file mode 100644 index 0000000000..2bbb467931 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/content.csv @@ -0,0 +1,9 @@ +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2012-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json b/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json new file mode 100644 index 0000000000..bca03338b0 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json @@ -0,0 +1,84 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE filter query with timeUnit DAYS and date restriction set", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "type": "DATE_RESTRICTION", + "dateRange": { + "min": "2012-01-01", + "max": "2012-12-31" + }, + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.date_distance_days", + "type": "INTEGER_RANGE", + "value": { + "min": 100 + } + } + ] + } + ] + } + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "filters": [ + { + "column": "table1.datum", + "label": "date_distance_days", + "name": "date_distance_days", + "timeUnit": "DAYS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/date_distance/days_with_date_restriction/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/expected.csv b/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/expected.csv new file mode 100644 index 0000000000..11c96bea09 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/days_with_date_restriction/expected.csv @@ -0,0 +1,2 @@ +pid,dates +1,"{[2012-01-01,2012-01-02)}" diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/content.csv b/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/content.csv new file mode 100644 index 0000000000..096aed0aaa --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/content.csv @@ -0,0 +1,9 @@ +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2013-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/days_without_date_restriction.json b/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/days_without_date_restriction.json new file mode 100644 index 0000000000..2e56615c33 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/days_without_date_restriction.json @@ -0,0 +1,73 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE filter query with timeUnit DAYS and without date restriction set", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.date_distance_days", + "type": "INTEGER_RANGE", + "value": { + "min": 4000 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": [ + { + "column": "table1.datum", + "label": "date_distance_days", + "name": "date_distance_days", + "timeUnit": "DAYS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/date_distance/days_without_date_restriction/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/expected.csv b/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/expected.csv new file mode 100644 index 0000000000..f0bb0740b3 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/days_without_date_restriction/expected.csv @@ -0,0 +1,5 @@ +pid,dates +2,{} +5,{} +1,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/decades/content.csv b/backend/src/test/resources/tests/sql/filter/date_distance/decades/content.csv new file mode 100644 index 0000000000..5feec5fd01 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/decades/content.csv @@ -0,0 +1,9 @@ +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2020-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/decades/decades.spec.json b/backend/src/test/resources/tests/sql/filter/date_distance/decades/decades.spec.json new file mode 100644 index 0000000000..c1d22efb5d --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/decades/decades.spec.json @@ -0,0 +1,73 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE filter query with timeUnit DECADES", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.date_distance_decades", + "type": "INTEGER_RANGE", + "value": { + "min": 1 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": [ + { + "column": "table1.datum", + "label": "date_distance_decades", + "name": "date_distance_decades", + "timeUnit": "DECADES", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/date_distance/decades/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/decades/expected.csv b/backend/src/test/resources/tests/sql/filter/date_distance/decades/expected.csv new file mode 100644 index 0000000000..f0bb0740b3 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/decades/expected.csv @@ -0,0 +1,5 @@ +pid,dates +2,{} +5,{} +1,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/months/content.csv b/backend/src/test/resources/tests/sql/filter/date_distance/months/content.csv new file mode 100644 index 0000000000..43ba1a9aa1 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/months/content.csv @@ -0,0 +1,9 @@ +pid,datum +1,2012-01-29 +2,2010-07-15 +3,2010-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/months/expected.csv b/backend/src/test/resources/tests/sql/filter/date_distance/months/expected.csv new file mode 100644 index 0000000000..6183c4b4fc --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/months/expected.csv @@ -0,0 +1,2 @@ +pid,dates +7,{} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/months/months.spec.json b/backend/src/test/resources/tests/sql/filter/date_distance/months/months.spec.json new file mode 100644 index 0000000000..ef1c1eedd4 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/months/months.spec.json @@ -0,0 +1,71 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE filter query with timeUnit MONTHS", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": { + "filter": "geschlecht_select.geschlecht_connector.date_distance_months", + "type": "INTEGER_RANGE", + "value": { + "max": 100 + } + } + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": [ + { + "column": "table1.datum", + "label": "date_distance_months", + "name": "date_distance_months", + "timeUnit": "MONTHS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/date_distance/months/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/years/content.csv b/backend/src/test/resources/tests/sql/filter/date_distance/years/content.csv new file mode 100644 index 0000000000..8c2f303145 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/years/content.csv @@ -0,0 +1,9 @@ +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2010-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/years/expected.csv b/backend/src/test/resources/tests/sql/filter/date_distance/years/expected.csv new file mode 100644 index 0000000000..e98765737e --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/years/expected.csv @@ -0,0 +1,6 @@ +pid,dates +2,{} +3,{} +5,{} +1,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/filter/date_distance/years/years.spec.json b/backend/src/test/resources/tests/sql/filter/date_distance/years/years.spec.json new file mode 100644 index 0000000000..82415606bc --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/date_distance/years/years.spec.json @@ -0,0 +1,71 @@ +{ + "type": "SQL_TEST", + "label": "DATE_DISTANCE filter query with timeUnit YEARS", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": { + "filter": "geschlecht_select.geschlecht_connector.date_distance_years", + "type": "INTEGER_RANGE", + "value": { + "min": 10 + } + } + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": [ + { + "column": "table1.datum", + "label": "date_distance_years", + "name": "date_distance_years", + "timeUnit": "YEARS", + "type": "DATE_DISTANCE" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/date_distance/years/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/multi_select/content.csv b/backend/src/test/resources/tests/sql/filter/multi_select/content.csv new file mode 100644 index 0000000000..7d719b498c --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/multi_select/content.csv @@ -0,0 +1,9 @@ +pid,geschlecht +1,"f" +2,"m" +3,"f" +4,"m" +5,"" +6,"" +7,"mf" +8,"fm" diff --git a/backend/src/test/resources/tests/sql/filter/multi_select/expected.csv b/backend/src/test/resources/tests/sql/filter/multi_select/expected.csv new file mode 100644 index 0000000000..03ece7a7c3 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/multi_select/expected.csv @@ -0,0 +1,5 @@ +pid,dates +1,{} +3,{} +5,{} +6,{} diff --git a/backend/src/test/resources/tests/sql/filter/select/select.spec.json b/backend/src/test/resources/tests/sql/filter/multi_select/multi_select.spec.json similarity index 89% rename from backend/src/test/resources/tests/sql/filter/select/select.spec.json rename to backend/src/test/resources/tests/sql/filter/multi_select/multi_select.spec.json index c545967747..81e348b80e 100644 --- a/backend/src/test/resources/tests/sql/filter/select/select.spec.json +++ b/backend/src/test/resources/tests/sql/filter/multi_select/multi_select.spec.json @@ -1,5 +1,5 @@ { - "label": "Single Big-Multi-Select Filter Query", + "label": "SELECT filter", "type": "SQL_TEST", "expectedCsv": "expected.csv", "query": { @@ -21,7 +21,9 @@ "filter":"geschlecht_select.geschlecht_connector.geschlecht", "type":"BIG_MULTI_SELECT", "value":[ - "f" + "f", + "", + null ] } ] @@ -52,7 +54,7 @@ "content":{ "tables":[ { - "csv":"tests/sql/filter/select/content.csv", + "csv": "tests/sql/filter/multi_select/content.csv", "name":"table1", "primaryColumn":{ "name":"pid", diff --git a/backend/src/test/resources/tests/sql/filter/number/expected.csv b/backend/src/test/resources/tests/sql/filter/number/expected.csv index ce715976a6..572f851f3b 100644 --- a/backend/src/test/resources/tests/sql/filter/number/expected.csv +++ b/backend/src/test/resources/tests/sql/filter/number/expected.csv @@ -1,11 +1,8 @@ -pid -1 -1 -1 -3 -4 -5 -5 -6 -7 -8 +pid,dates +1,{} +3,{} +4,{} +5,{} +6,{} +7,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/filter/number_only_max/expected.csv b/backend/src/test/resources/tests/sql/filter/number_only_max/expected.csv index ce668fc0b4..cec31c2e72 100644 --- a/backend/src/test/resources/tests/sql/filter/number_only_max/expected.csv +++ b/backend/src/test/resources/tests/sql/filter/number_only_max/expected.csv @@ -1,5 +1,5 @@ -pid -1 -3 -5 -7 +pid,dates +1,{} +3,{} +5,{} +7,{} diff --git a/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv b/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv index 5ceffe16ca..731f077a25 100644 --- a/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv +++ b/backend/src/test/resources/tests/sql/filter/number_only_min/content.csv @@ -5,8 +5,7 @@ pid,value 1,0.5 3,0.5 4,1 -5,0.5 -5,1 +5,0.2 6,1 7,1 8,1 diff --git a/backend/src/test/resources/tests/sql/filter/number_only_min/expected.csv b/backend/src/test/resources/tests/sql/filter/number_only_min/expected.csv index 9b1503f7a8..7677296b77 100644 --- a/backend/src/test/resources/tests/sql/filter/number_only_min/expected.csv +++ b/backend/src/test/resources/tests/sql/filter/number_only_min/expected.csv @@ -1,12 +1,8 @@ -pid -1 -2 -1 -1 -3 -4 -5 -5 -6 -7 -8 +pid,dates +1,{} +2,{} +3,{} +4,{} +6,{} +7,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/filter/select/expected.csv b/backend/src/test/resources/tests/sql/filter/select/expected.csv deleted file mode 100644 index b5b5670cd1..0000000000 --- a/backend/src/test/resources/tests/sql/filter/select/expected.csv +++ /dev/null @@ -1,3 +0,0 @@ -pid -1 -3 \ No newline at end of file diff --git a/backend/src/test/resources/tests/sql/filter/single_select/content.csv b/backend/src/test/resources/tests/sql/filter/single_select/content.csv new file mode 100644 index 0000000000..7d719b498c --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/single_select/content.csv @@ -0,0 +1,9 @@ +pid,geschlecht +1,"f" +2,"m" +3,"f" +4,"m" +5,"" +6,"" +7,"mf" +8,"fm" diff --git a/backend/src/test/resources/tests/sql/filter/single_select/expected.csv b/backend/src/test/resources/tests/sql/filter/single_select/expected.csv new file mode 100644 index 0000000000..09e5bcda1c --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/single_select/expected.csv @@ -0,0 +1,3 @@ +pid,dates +1,{} +3,{} diff --git a/backend/src/test/resources/tests/sql/filter/single_select/single_select.spec.json b/backend/src/test/resources/tests/sql/filter/single_select/single_select.spec.json new file mode 100644 index 0000000000..2231c4f72d --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/single_select/single_select.spec.json @@ -0,0 +1,68 @@ +{ + "label": "SINGLE_SELECT filter", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "SELECT", + "value": "f" + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SINGLE_SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/single_select/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/filter/sum/content.csv b/backend/src/test/resources/tests/sql/filter/sum/content.csv new file mode 100644 index 0000000000..d199bdf995 --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/sum/content.csv @@ -0,0 +1,8 @@ +pid,value +1,0.9 +1,0.8 +2,0.1 +2,0.1 +3,1.0 +3,0.5 +4,19.0 diff --git a/backend/src/test/resources/tests/sql/filter/sum/expected.csv b/backend/src/test/resources/tests/sql/filter/sum/expected.csv new file mode 100644 index 0000000000..b4d0d327fe --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/sum/expected.csv @@ -0,0 +1,4 @@ +pid,dates +1,{} +3,{} +4,{} diff --git a/backend/src/test/resources/tests/sql/filter/sum/sum.json b/backend/src/test/resources/tests/sql/filter/sum/sum.json new file mode 100644 index 0000000000..2e0ea8850f --- /dev/null +++ b/backend/src/test/resources/tests/sql/filter/sum/sum.json @@ -0,0 +1,70 @@ +{ + "label": "SUM filter", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 1.5 + } + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "SUM" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/filter/sum/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/not/expected.csv b/backend/src/test/resources/tests/sql/not/expected.csv index 416640214f..02496cb270 100644 --- a/backend/src/test/resources/tests/sql/not/expected.csv +++ b/backend/src/test/resources/tests/sql/not/expected.csv @@ -1,5 +1,7 @@ -pid -2 -4 -7 -8 +pid,dates +2,{} +4,{} +5,{} +6,{} +7,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv b/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv index 244295a111..1e967d5638 100644 --- a/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv +++ b/backend/src/test/resources/tests/sql/or/different_concept/content_1.csv @@ -8,6 +8,7 @@ pid,value,datum_start,datum_end 5,0.5,2014-04-30,2014-06-30 5,1,2014-06-30,2015-06-30 6,1,2014-04-30,2014-06-30 -7,1,2014-02-05,2014-02-20 +7,1,2014-02-05,2015-02-20 +7,-1,2016-02-05,2016-02-05 8,1,2014-04-30,2014-06-30 -7,-1,2014-06-30,2015-06-30 +7,-1,2015-01-15,2015-06-30 diff --git a/backend/src/test/resources/tests/sql/or/different_concept/content_2.csv b/backend/src/test/resources/tests/sql/or/different_concept/content_2.csv index dc012de238..d6987f7996 100644 --- a/backend/src/test/resources/tests/sql/or/different_concept/content_2.csv +++ b/backend/src/test/resources/tests/sql/or/different_concept/content_2.csv @@ -5,5 +5,5 @@ pid,datum,geschlecht,language 4,2012-11-11,"m","" 5,2007-11-11,"","" 6,2012-11-11,"","de" -7,2012-11-11,"mf","de" +7,2015-07-01,"mf","de" 8,2012-11-11,"fm","fr" diff --git a/backend/src/test/resources/tests/sql/or/different_concept/expected.csv b/backend/src/test/resources/tests/sql/or/different_concept/expected.csv index e5155afc92..fc78f96fe8 100644 --- a/backend/src/test/resources/tests/sql/or/different_concept/expected.csv +++ b/backend/src/test/resources/tests/sql/or/different_concept/expected.csv @@ -1,4 +1,4 @@ -pid,validity_date_1,,value,geschlecht,language -7,"[2014-06-30,2015-07-01)",-1.00,mf, -8,,,,fr -2,,,,fr +pid,dates,value,geschlecht,language +7,"{[2014-02-05,2015-07-02),[2016-02-05,2016-02-06)}",-1.00,mf, +8,"{[2012-11-11,2012-11-12)}",,,fr +2,"{[2010-07-15,2010-07-16)}",,,fr diff --git a/backend/src/test/resources/tests/sql/or/same_concept/expected.csv b/backend/src/test/resources/tests/sql/or/same_concept/expected.csv index 4869420d12..25c268e6e0 100644 --- a/backend/src/test/resources/tests/sql/or/same_concept/expected.csv +++ b/backend/src/test/resources/tests/sql/or/same_concept/expected.csv @@ -1,3 +1,3 @@ -pid -7 -2 +pid,dates +7,{} +2,{} diff --git a/backend/src/test/resources/tests/sql/selects/count/content.csv b/backend/src/test/resources/tests/sql/selects/count/content.csv new file mode 100644 index 0000000000..c7f2a0b196 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/count/content.csv @@ -0,0 +1,8 @@ +pid,value +1,0.9 +2, +2,0.1 +2,0.3 +3,0.1 +3,1.0 +4,1.0 diff --git a/backend/src/test/resources/tests/sql/selects/count/count.json b/backend/src/test/resources/tests/sql/selects/count/count.json new file mode 100644 index 0000000000..4d21f98606 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/count/count.json @@ -0,0 +1,63 @@ +{ + "label": "COUNT select", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "count" + ], + "type": "CONCEPT", + "label": "count", + "tables": [ + { + "id": "count.count_connector", + "selects": [ + "count.count_connector.count_select" + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "count", + "type": "TREE", + "connectors": [ + { + "label": "count_connector", + "table": "table1", + "selects": { + "type": "COUNT", + "name": "count_select", + "column": "table1.value" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/count/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/count/expected.csv b/backend/src/test/resources/tests/sql/selects/count/expected.csv new file mode 100644 index 0000000000..6ae22ec295 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/count/expected.csv @@ -0,0 +1,5 @@ +pid,dates,count +1,{},1 +2,{},2 +3,{},2 +4,{},1 diff --git a/backend/src/test/resources/tests/sql/selects/count_distinct/content.csv b/backend/src/test/resources/tests/sql/selects/count_distinct/content.csv new file mode 100644 index 0000000000..4d8d6e602f --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/count_distinct/content.csv @@ -0,0 +1,11 @@ +pid,value +1,0.9 +2,1.1 +2,1.1 +2,0.3 +3,1.1 +3,5.0 +3, +4,3.0 +4,3.0 +4,3.0 diff --git a/backend/src/test/resources/tests/sql/selects/count_distinct/count_distinct.json b/backend/src/test/resources/tests/sql/selects/count_distinct/count_distinct.json new file mode 100644 index 0000000000..8e588fef47 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/count_distinct/count_distinct.json @@ -0,0 +1,64 @@ +{ + "label": "COUNT DISTINCT select", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "count" + ], + "type": "CONCEPT", + "label": "count", + "tables": [ + { + "id": "count.count_connector", + "selects": [ + "count.count_connector.count_select" + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "count", + "type": "TREE", + "connectors": [ + { + "label": "count_connector", + "table": "table1", + "selects": { + "type": "COUNT", + "distinct": "true", + "name": "count_select", + "column": "table1.value" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/count/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/count_distinct/expected.csv b/backend/src/test/resources/tests/sql/selects/count_distinct/expected.csv new file mode 100644 index 0000000000..6ae22ec295 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/count_distinct/expected.csv @@ -0,0 +1,5 @@ +pid,dates,count +1,{},1 +2,{},2 +3,{},2 +4,{},1 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/centuries.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/centuries.spec.json index 97e6b8f6e1..6e3ac04891 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/centuries.spec.json +++ b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/centuries.spec.json @@ -1,5 +1,5 @@ { - "label": "DATE_DISTANCE select query with timeUnit CENTURIES", + "label": "DATE_DISTANCE select with timeUnit CENTURIES", "type": "SQL_TEST", "expectedCsv": "expected.csv", "query": { @@ -16,15 +16,6 @@ "tables":[ { "id":"geschlecht_select.geschlecht_connector", - "filters":[ - { - "filter":"geschlecht_select.geschlecht_connector.geschlecht", - "type":"BIG_MULTI_SELECT", - "value":[ - "f" - ] - } - ], "selects": [ "geschlecht_select.geschlecht_connector.date_distance_centuries" ] @@ -42,12 +33,6 @@ { "label":"geschlecht_connector", "table":"table1", - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" - }, "selects": [ { "column": "table1.datum", @@ -75,10 +60,6 @@ { "name":"datum", "type":"DATE" - }, - { - "name":"geschlecht", - "type":"STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/content.csv index 5058be62da..cb125007de 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/content.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,1920-01-01,"f" -2,2010-07-15,"m" -3,2010-11-10,"f" -4,2013-11-11,"m" -5,2007-11-11,"" -6,2014-11-11,"" -7,2015-11-11,"mf" -8,2011-11-11,"fm" +pid,datum +1,1920-01-01 +2,2010-07-15 +3,2010-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/expected.csv index c0043272fe..68556c342c 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/centuries/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/centuries/expected.csv @@ -1,3 +1,9 @@ -pid,date_distance_centuries -1,1 -3,0 +pid,dates,date_distance_centuries +2,{},0 +4,{},0 +7,{},0 +3,{},0 +6,{},0 +5,{},0 +1,{},1 +8,{},0 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/content.csv index 77a2fec9a1..2bbb467931 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/content.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2012-11-10,"f" -4,2013-11-11,"m" -5,2007-11-11,"" -6,2014-11-11,"" -7,2015-11-11,"mf" -8,2011-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2012-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json index f18f1934b2..af0f31a13b 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/days_with_date_restriction.spec.json @@ -1,6 +1,6 @@ { "type": "SQL_TEST", - "label": "DATE_DISTANCE select query with timeUnit DAYS and date restriction set", + "label": "DATE_DISTANCE select with timeUnit DAYS and date restriction set", "expectedCsv": "expected.csv", "query": { "type": "CONCEPT_QUERY", @@ -22,15 +22,6 @@ "tables":[ { "id":"geschlecht_select.geschlecht_connector", - "filters":[ - { - "filter":"geschlecht_select.geschlecht_connector.geschlecht", - "type":"BIG_MULTI_SELECT", - "value":[ - "f" - ] - } - ], "selects": [ "geschlecht_select.geschlecht_connector.date_distance_days" ] @@ -53,12 +44,6 @@ "label": "datum", "column": "table1.datum" }, - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" - }, "selects": [ { "column": "table1.datum", @@ -86,10 +71,6 @@ { "name":"datum", "type":"DATE" - }, - { - "name":"geschlecht", - "type":"STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/expected.csv index f434fa729b..35a2ab9fd7 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_with_date_restriction/expected.csv @@ -1,3 +1,3 @@ -pid,datum,date_distance_days -1,"[2012-01-01,2012-01-02)",365 -3,"[2012-11-10,2012-11-11)",51 +pid,dates,date_distance_days +3,"{[2012-11-10,2012-11-11)}",51 +1,"{[2012-01-01,2012-01-02)}",365 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/content.csv index 5195678965..096aed0aaa 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/content.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2013-11-10,"f" -4,2013-11-11,"m" -5,2007-11-11,"" -6,2014-11-11,"" -7,2015-11-11,"mf" -8,2011-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2013-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/days_without_date_restriction.json b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/days_without_date_restriction.json index 4c54123139..c770fdd283 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/days_without_date_restriction.json +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/days_without_date_restriction.json @@ -1,6 +1,6 @@ { "type": "SQL_TEST", - "label": "DATE_DISTANCE select query with timeUnit DAYS and without date restriction set", + "label": "DATE_DISTANCE select with timeUnit DAYS and without date restriction set", "expectedCsv": "expected.csv", "query": { "type": "CONCEPT_QUERY", @@ -16,15 +16,6 @@ "tables":[ { "id":"geschlecht_select.geschlecht_connector", - "filters":[ - { - "filter":"geschlecht_select.geschlecht_connector.geschlecht", - "type":"BIG_MULTI_SELECT", - "value":[ - "f" - ] - } - ], "selects": [ "geschlecht_select.geschlecht_connector.date_distance_days" ] @@ -42,12 +33,6 @@ { "label":"geschlecht_connector", "table":"table1", - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" - }, "selects": [ { "column": "table1.datum", @@ -75,10 +60,6 @@ { "name":"datum", "type":"DATE" - }, - { - "name":"geschlecht", - "type":"STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/expected.csv index f6fe682a1e..97809f31c6 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/days_without_date_restriction/expected.csv @@ -1,3 +1,9 @@ -pid,date_distance_days -1,4104 -3,3425 +pid,dates,date_distance_days +2,{},4639 +4,{},3424 +7,{},2694 +3,{},3425 +6,{},3059 +5,{},5616 +1,{},4104 +8,{},4155 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/decades/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/decades/content.csv index a27a9a8243..5feec5fd01 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/decades/content.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/decades/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2020-11-10,"f" -4,2013-11-11,"m" -5,2007-11-11,"" -6,2014-11-11,"" -7,2015-11-11,"mf" -8,2011-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2020-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/decades/decades.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/decades/decades.spec.json index 586cc74a15..e9bb2976fd 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/decades/decades.spec.json +++ b/backend/src/test/resources/tests/sql/selects/date_distance/decades/decades.spec.json @@ -1,6 +1,6 @@ { "type": "SQL_TEST", - "label": "DATE_DISTANCE select query with timeUnit DECADES", + "label": "DATE_DISTANCE select with timeUnit DECADES", "expectedCsv": "expected.csv", "query": { "type": "CONCEPT_QUERY", @@ -16,15 +16,6 @@ "tables":[ { "id":"geschlecht_select.geschlecht_connector", - "filters":[ - { - "filter":"geschlecht_select.geschlecht_connector.geschlecht", - "type":"BIG_MULTI_SELECT", - "value":[ - "f" - ] - } - ], "selects": [ "geschlecht_select.geschlecht_connector.date_distance_decades" ] @@ -42,12 +33,6 @@ { "label":"geschlecht_connector", "table":"table1", - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" - }, "selects": [ { "column": "table1.datum", @@ -75,10 +60,6 @@ { "name":"datum", "type":"DATE" - }, - { - "name":"geschlecht", - "type":"STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/decades/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/decades/expected.csv index 171337e385..bea17bd178 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/decades/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/decades/expected.csv @@ -1,3 +1,9 @@ -pid,date_distance_decades -1,1 -3,0 +pid,dates,date_distance_decades +2,{},1 +4,{},0 +7,{},0 +3,{},0 +6,{},0 +5,{},1 +1,{},1 +8,{},1 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv index 2dfbea2947..43ba1a9aa1 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-29,"f" -2,2010-07-15,"m" -3,2010-11-10,"f" -4,2013-11-11,"m" -5,2007-11-11,"" -6,2014-11-11,"" -7,2015-11-11,"mf" -8,2011-11-11,"fm" +pid,datum +1,2012-01-29 +2,2010-07-15 +3,2010-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv index 62ac211ec8..e1d5a45540 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/expected.csv @@ -1,3 +1,9 @@ -pid,date_distance_months -1,133 -3,148 +pid,dates,date_distance_months +2,{},152 +4,{},112 +7,{},88 +3,{},148 +6,{},100 +5,{},184 +1,{},133 +8,{},136 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/months/months.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/months/months.spec.json index 467c8aa561..3f98866824 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/months/months.spec.json +++ b/backend/src/test/resources/tests/sql/selects/date_distance/months/months.spec.json @@ -1,6 +1,6 @@ { "type": "SQL_TEST", - "label": "DATE_DISTANCE select query with timeUnit MONTHS", + "label": "DATE_DISTANCE select with timeUnit MONTHS", "expectedCsv": "expected.csv", "query": { "type": "CONCEPT_QUERY", @@ -16,15 +16,6 @@ "tables":[ { "id":"geschlecht_select.geschlecht_connector", - "filters":[ - { - "filter":"geschlecht_select.geschlecht_connector.geschlecht", - "type":"BIG_MULTI_SELECT", - "value":[ - "f" - ] - } - ], "selects": [ "geschlecht_select.geschlecht_connector.date_distance_months" ] @@ -42,12 +33,6 @@ { "label":"geschlecht_connector", "table":"table1", - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" - }, "selects": [ { "column": "table1.datum", @@ -75,10 +60,6 @@ { "name":"datum", "type":"DATE" - }, - { - "name":"geschlecht", - "type":"STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/years/content.csv b/backend/src/test/resources/tests/sql/selects/date_distance/years/content.csv index c2d4f04aef..8c2f303145 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/years/content.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/years/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2010-11-10,"f" -4,2013-11-11,"m" -5,2007-11-11,"" -6,2014-11-11,"" -7,2015-11-11,"mf" -8,2011-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2010-11-10 +4,2013-11-11 +5,2007-11-11 +6,2014-11-11 +7,2015-11-11 +8,2011-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv b/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv index 41a6149bff..bffdfa367f 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/date_distance/years/expected.csv @@ -1,3 +1,9 @@ -pid,date_distance_years -1,11 -3,12 +pid,dates,date_distance_years +2,{},12 +4,{},9 +7,{},7 +3,{},12 +6,{},8 +5,{},15 +1,{},11 +8,{},11 diff --git a/backend/src/test/resources/tests/sql/selects/date_distance/years/years.spec.json b/backend/src/test/resources/tests/sql/selects/date_distance/years/years.spec.json index fba8c65151..a5449cf42d 100644 --- a/backend/src/test/resources/tests/sql/selects/date_distance/years/years.spec.json +++ b/backend/src/test/resources/tests/sql/selects/date_distance/years/years.spec.json @@ -1,6 +1,6 @@ { "type": "SQL_TEST", - "label": "DATE_DISTANCE select query with timeUnit YEARS", + "label": "DATE_DISTANCE select with timeUnit YEARS", "expectedCsv": "expected.csv", "query": { "type": "CONCEPT_QUERY", @@ -16,15 +16,6 @@ "tables":[ { "id":"geschlecht_select.geschlecht_connector", - "filters":[ - { - "filter":"geschlecht_select.geschlecht_connector.geschlecht", - "type":"BIG_MULTI_SELECT", - "value":[ - "f" - ] - } - ], "selects": [ "geschlecht_select.geschlecht_connector.date_distance_years" ] @@ -42,12 +33,6 @@ { "label":"geschlecht_connector", "table":"table1", - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" - }, "selects": [ { "column": "table1.datum", @@ -75,10 +60,6 @@ { "name":"datum", "type":"DATE" - }, - { - "name":"geschlecht", - "type":"STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/content.csv b/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/content.csv new file mode 100644 index 0000000000..c1f3e56378 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/content.csv @@ -0,0 +1,8 @@ +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 +2,1,2014-06-30,2015-06-30 +2,2,2014-06-30,2015-06-30 +3,3,2014-06-30,2015-06-30 +4,,2014-06-30,2015-06-30 +5,1,2014-06-30,2015-06-30 +5,,2014-06-30,2015-06-30 diff --git a/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/exists_with_other_selects.spec.json b/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/exists_with_other_selects.spec.json new file mode 100644 index 0000000000..d148dbb14e --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/exists_with_other_selects.spec.json @@ -0,0 +1,78 @@ +{ + "type": "SQL_TEST", + "label": "EXISTS select with other selects", + "description": "EXISTS as concept level select should be in before table-level selects like first in final result set", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "ids": [ + "concept" + ], + "type": "CONCEPT", + "tables": [ + { + "id": "concept.connector", + "selects": "concept.connector.first_value" + } + ], + "selects": "concept.exists" + } + }, + "concepts": [ + { + "name": "concept", + "type": "TREE", + "selects": { + "type": "EXISTS", + "name": "exists" + }, + "connectors": [ + { + "label": "connector", + "table": "exists_table", + "validityDates": [ + { + "label": "datum", + "startColumn": "exists_table.datum_start", + "endColumn": "exists_table.datum_end" + } + ], + "selects": [ + { + "name": "first_value", + "column": "exists_table.value", + "type": "FIRST" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/exists/exists_with_other_selects/content.csv", + "name": "exists_table", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/expected.csv b/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/expected.csv new file mode 100644 index 0000000000..5a7bee102d --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/exists/exists_with_other_selects/expected.csv @@ -0,0 +1,6 @@ +result,dates,concept_exists,first_value +2,"{[2014-06-30,2015-07-01)}",1,1.00 +4,"{[2014-06-30,2015-07-01)}",1, +3,"{[2014-06-30,2015-07-01)}",1,3.00 +5,"{[2014-06-30,2015-07-01)}",1,1.00 +1,"{[2014-06-30,2015-07-01)}",1,1.00 diff --git a/backend/src/test/resources/tests/sql/selects/exists/single_exists/content.csv b/backend/src/test/resources/tests/sql/selects/exists/single_exists/content.csv new file mode 100644 index 0000000000..f3fe352607 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/exists/single_exists/content.csv @@ -0,0 +1,8 @@ +pid,datum_start,datum_end +1,2014-06-30,2015-06-30 +2,2014-06-30,2015-06-30 +2,2014-06-30,2015-06-30 +3,2014-06-30,2015-06-30 +4,2014-06-30,2015-06-30 +5,2014-06-30,2015-06-30 +5,2014-06-30,2015-06-30 diff --git a/backend/src/test/resources/tests/sql/selects/exists/single_exists/exists.spec.json b/backend/src/test/resources/tests/sql/selects/exists/single_exists/exists.spec.json new file mode 100644 index 0000000000..d9d96299ee --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/exists/single_exists/exists.spec.json @@ -0,0 +1,65 @@ +{ + "type": "SQL_TEST", + "label": "Single Concept EXISTS", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "ids": [ + "concept" + ], + "type": "CONCEPT", + "tables": [ + { + "id": "concept.connector" + } + ], + "selects": "concept.exists" + } + }, + "concepts": [ + { + "name": "concept", + "type": "TREE", + "selects": { + "type": "EXISTS", + "name": "exists" + }, + "connectors": [ + { + "label": "connector", + "table": "exists_table", + "validityDates": [ + { + "label": "datum", + "startColumn": "exists_table.datum_start", + "endColumn": "exists_table.datum_end" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/exists/single_exists/content.csv", + "name": "exists_table", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/exists/single_exists/expected.csv b/backend/src/test/resources/tests/sql/selects/exists/single_exists/expected.csv new file mode 100644 index 0000000000..0e10e26305 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/exists/single_exists/expected.csv @@ -0,0 +1,6 @@ +result,dates,concept_exists +2,"{[2014-06-30,2015-07-01)}",1 +4,"{[2014-06-30,2015-07-01)}",1 +3,"{[2014-06-30,2015-07-01)}",1 +5,"{[2014-06-30,2015-07-01)}",1 +1,"{[2014-06-30,2015-07-01)}",1 diff --git a/backend/src/test/resources/tests/sql/selects/first/content.csv b/backend/src/test/resources/tests/sql/selects/first/content.csv new file mode 100644 index 0000000000..ca38692d77 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/first/content.csv @@ -0,0 +1,11 @@ +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 +2,1.01,2014-06-30,2015-06-30 +1,1,2015-02-03,2015-06-30 +1,0.5,2014-06-30,2015-06-30 +3,0.5,2014-04-30,2014-06-30 +4,1,2014-06-30,2015-06-30 +5,0.5,2014-04-30,2014-06-30 +5,1,2014-06-30,2015-06-30 +6,1,2014-04-30,2014-06-30 +8,1,2014-04-30,2014-06-30 diff --git a/backend/src/test/resources/tests/sql/selects/first/expected.csv b/backend/src/test/resources/tests/sql/selects/first/expected.csv new file mode 100644 index 0000000000..9aabbad17b --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/first/expected.csv @@ -0,0 +1,8 @@ +pid,dates,first_value +2,"{[2014-06-30,2015-07-01)}",1.01 +4,"{[2014-06-30,2015-07-01)}",1.00 +3,"{[2014-04-30,2014-07-01)}",0.50 +6,"{[2014-04-30,2014-07-01)}",1.00 +5,"{[2014-04-30,2015-07-01)}",0.50 +1,"{[2014-06-30,2015-07-01)}",1.00 +8,"{[2014-04-30,2014-07-01)}",1.00 diff --git a/backend/src/test/resources/tests/sql/selects/first/first.spec.json b/backend/src/test/resources/tests/sql/selects/first/first.spec.json new file mode 100644 index 0000000000..8df194923a --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/first/first.spec.json @@ -0,0 +1,74 @@ +{ + "label": "FIRST select with validity date", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "selects": "number.number_connector.first_value" + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "selects": { + "label": "first_value", + "column": "table1.value", + "type": "FIRST" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/first/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/last/content.csv b/backend/src/test/resources/tests/sql/selects/last/content.csv new file mode 100644 index 0000000000..4b46b64c0f --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/last/content.csv @@ -0,0 +1,4 @@ +pid,value,datum_start,datum_end +1,1,2010-01-01,2010-12-31 +1,1,2012-01-01,2012-12-31 +1,0.5,2014-01-01,2014-12-31 diff --git a/backend/src/test/resources/tests/sql/selects/last/expected.csv b/backend/src/test/resources/tests/sql/selects/last/expected.csv new file mode 100644 index 0000000000..69cf8e3955 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/last/expected.csv @@ -0,0 +1,2 @@ +pid,dates,last_value +1,"{[2010-01-01,2011-01-01),[2012-01-01,2013-01-01),[2014-01-01,2015-01-01)}",0.50 diff --git a/backend/src/test/resources/tests/sql/selects/last/last.spec.json b/backend/src/test/resources/tests/sql/selects/last/last.spec.json new file mode 100644 index 0000000000..0fb5e72f65 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/last/last.spec.json @@ -0,0 +1,74 @@ +{ + "label": "LAST select with validity date", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "selects": "number.number_connector.last_value" + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "selects": { + "label": "last_value", + "column": "table1.value", + "type": "LAST" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/last/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/random/content.csv b/backend/src/test/resources/tests/sql/selects/random/content.csv new file mode 100644 index 0000000000..48dc801a0c --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/random/content.csv @@ -0,0 +1,4 @@ +pid,value,datum_start,datum_end +1,1,2010-01-01,2010-12-31 +1,1,2010-01-01,2010-12-31 +1,1,2010-01-01,2010-12-31 diff --git a/backend/src/test/resources/tests/sql/selects/random/expected.csv b/backend/src/test/resources/tests/sql/selects/random/expected.csv new file mode 100644 index 0000000000..6bc3829e34 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/random/expected.csv @@ -0,0 +1,2 @@ +pid,dates,last_value +1,"{[2010-01-01,2011-01-01)}",1.00 diff --git a/backend/src/test/resources/tests/sql/selects/random/random.spec.json b/backend/src/test/resources/tests/sql/selects/random/random.spec.json new file mode 100644 index 0000000000..68817a5151 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/random/random.spec.json @@ -0,0 +1,74 @@ +{ + "label": "RANDOM select with validity date", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "tables": [ + { + "id": "number.number_connector", + "selects": "number.number_connector.random_value" + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "selects": { + "label": "random_value", + "column": "table1.value", + "type": "RANDOM" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/random/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/same_select_2_times/content.csv b/backend/src/test/resources/tests/sql/selects/same_select_2_times/content.csv new file mode 100644 index 0000000000..8d38491842 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/same_select_2_times/content.csv @@ -0,0 +1,13 @@ +pid,value,datum +1,1,2012-01-01 +2,1.01,2010-07-15 +1,1,2013-11-10 +1,0.5,2012-11-11 +3,0.5,2007-11-11 +4,1,2012-11-11 +5,0.5,2012-11-11 +5,1,2012-11-11 +6,1,2012-01-01 +7,1,2010-07-15 +8,1,2013-11-10 +7,-1,2012-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/same_select_2_times/expected.csv b/backend/src/test/resources/tests/sql/selects/same_select_2_times/expected.csv new file mode 100644 index 0000000000..64edf91c81 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/same_select_2_times/expected.csv @@ -0,0 +1,9 @@ +pid,dates,value_1,value_2 +2,{},1.01,1.01 +4,{},1.00,1.00 +7,{},1.00,1.00 +3,{},0.50,0.50 +6,{},1.00,1.00 +5,{},0.50,0.50 +1,{},1.00,1.00 +8,{},1.00,1.00 diff --git a/backend/src/test/resources/tests/sql/selects/same_select_2_times/same_select.json b/backend/src/test/resources/tests/sql/selects/same_select_2_times/same_select.json new file mode 100644 index 0000000000..2b3d239d27 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/same_select_2_times/same_select.json @@ -0,0 +1,85 @@ +{ + "label": "Same select 2 times", + "description": "Selecting the same select 2 times should not cause a name collision", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "excludeFromTimeAggregation": true, + "tables": [ + { + "id": "number.number_connector", + "selects": "number.number_connector.value" + } + ] + }, + { + "ids": [ + "number" + ], + "type": "CONCEPT", + "label": "vs", + "excludeFromTimeAggregation": true, + "tables": [ + { + "id": "number.number_connector", + "selects": "number.number_connector.value" + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "column": "table1.datum" + }, + "selects": { + "name": "value", + "column": "table1.value", + "type": "FIRST" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/same_select_2_times/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum", + "type": "DATE" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/sum/content.csv b/backend/src/test/resources/tests/sql/selects/sum/content.csv new file mode 100644 index 0000000000..d199bdf995 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/sum/content.csv @@ -0,0 +1,8 @@ +pid,value +1,0.9 +1,0.8 +2,0.1 +2,0.1 +3,1.0 +3,0.5 +4,19.0 diff --git a/backend/src/test/resources/tests/sql/selects/sum/expected.csv b/backend/src/test/resources/tests/sql/selects/sum/expected.csv new file mode 100644 index 0000000000..b6c5a8e4e8 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/sum/expected.csv @@ -0,0 +1,5 @@ +pid,dates,sum +2,{},0.20 +4,{},19.00 +3,{},1.50 +1,{},1.70 diff --git a/backend/src/test/resources/tests/sql/selects/sum/sum.json b/backend/src/test/resources/tests/sql/selects/sum/sum.json new file mode 100644 index 0000000000..4b8d24b182 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/sum/sum.json @@ -0,0 +1,63 @@ +{ + "label": "SUM select", + "type": "SQL_TEST", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "sum" + ], + "type": "CONCEPT", + "label": "sum", + "tables": [ + { + "id": "sum.sum_connector", + "selects": [ + "sum.sum_connector.sum_select" + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "sum", + "type": "TREE", + "connectors": [ + { + "label": "sum_connector", + "table": "table1", + "selects": { + "type": "SUM", + "name": "sum_select", + "column": "table1.value" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/sum/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/content_1.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/content_1.csv new file mode 100644 index 0000000000..463e92bd1c --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/content_1.csv @@ -0,0 +1,7 @@ +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 +1,1,2014-01-01,2014-06-30 +1,1,2013-01-01,2014-01-01 +3,0.5,2014-01-01,2014-12-31 +3,0.5,2015-06-01,2015-12-31 +4,1,2013-01-01,2013-01-01 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/content_2.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/content_2.csv new file mode 100644 index 0000000000..d86a976578 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/content_2.csv @@ -0,0 +1,5 @@ +pid,datum_start,datum_end,geschlecht +1,2015-01-01,2016-12-31,"f" +2,2010-07-15,2010-07-15,"m" +3,2017-01-01,2017-12-31,"f" +4,2012-12-31,2012-12-31,"f" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/expected.csv new file mode 100644 index 0000000000..b4d0d327fe --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/expected.csv @@ -0,0 +1,4 @@ +pid,dates +1,{} +3,{} +4,{} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/none.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/none.spec.json new file mode 100644 index 0000000000..6ffa2dd89c --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/and/none.spec.json @@ -0,0 +1,149 @@ +{ + "label": "NONE date aggregation of 2 concepts", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "dateAggregationMode": "NONE", + "root": { + "type": "AND", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0, + "max": 1.0 + } + } + ] + } + ] + }, + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + } + } + ] + }, + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table2", + "validityDates": { + "label": "datum", + "startColumn": "table2.datum_start", + "endColumn": "table2.datum_end" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table2.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/aggregation/block/and/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + }, + { + "csv": "tests/sql/selects/validity_date/aggregation/block/and/content_2.csv", + "name": "table2", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/content_1.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/content_1.csv new file mode 100644 index 0000000000..8f5c77f449 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/content_1.csv @@ -0,0 +1,3 @@ +pid,datum_start,datum_end,geschlecht +1,2012-01-01,2012-12-31,"m" +1,2015-01-01,2015-12-31,"m" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/expected.csv new file mode 100644 index 0000000000..901dd909ba --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/expected.csv @@ -0,0 +1,2 @@ +pid,dates +1,{} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/negate.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/negate.spec.json new file mode 100644 index 0000000000..e12b55102c --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/block/not/negate.spec.json @@ -0,0 +1,81 @@ +{ + "label": "BLOCK as default date aggregation of a NEGATION CONCEPT node", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "NEGATION", + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/aggregation/block/not/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/content_1.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/content_1.csv new file mode 100644 index 0000000000..9b59b2022a --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/content_1.csv @@ -0,0 +1,6 @@ +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 +1,1,2015-01-01,2015-06-30 +1,1,2015-05-05,2016-02-05 +3,0.5,2014-06-01,2014-12-31 +4,1,2013-01-01,2013-01-01 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/content_2.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/content_2.csv new file mode 100644 index 0000000000..61dc197095 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/content_2.csv @@ -0,0 +1,4 @@ +pid,datum_start,datum_end,geschlecht +1,2015-01-01,2015-12-31,"f" +3,2013-01-01,2013-06-31,"f" +4,2012-12-31,2012-12-31,"f" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/expected.csv new file mode 100644 index 0000000000..331badd1ac --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/expected.csv @@ -0,0 +1,4 @@ +pid,dates +1,"{[2015-01-01,2016-01-01)}" +3,"{}" +4,"{}" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/intersect.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/intersect.spec.json new file mode 100644 index 0000000000..b5e61ae400 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/intersect/intersect.spec.json @@ -0,0 +1,149 @@ +{ + "label": "INTERSECT date aggregation of 2 concepts", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "dateAggregationMode": "INTERSECT", + "root": { + "type": "AND", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0, + "max": 1.0 + } + } + ] + } + ] + }, + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + } + } + ] + }, + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table2", + "validityDates": { + "label": "datum", + "startColumn": "table2.datum_start", + "endColumn": "table2.datum_end" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table2.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/aggregation/intersect/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + }, + { + "csv": "tests/sql/selects/validity_date/aggregation/intersect/content_2.csv", + "name": "table2", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/content_1.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/content_1.csv new file mode 100644 index 0000000000..d1c1c91dcb --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/content_1.csv @@ -0,0 +1,8 @@ +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 +1,1,2014-01-01,2014-06-30 +1,1,2013-01-01,2014-01-01 +3,0.5,2014-01-01,2014-12-31 +3,0.5,2015-06-01,2015-12-31 +4,1,2013-01-01,2013-01-01 +9,1,2013-01-02,2013-01-02 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/content_2.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/content_2.csv new file mode 100644 index 0000000000..f9c652b028 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/content_2.csv @@ -0,0 +1,6 @@ +pid,datum_start,datum_end,geschlecht +1,2015-01-01,2016-12-31,"f" +2,2010-07-15,2010-07-15,"m" +3,2017-01-01,2017-12-31,"f" +4,2012-12-31,2012-12-31,"f" +9,2012-12-31,2012-12-31,"f" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/expected.csv new file mode 100644 index 0000000000..e02b5ee9a1 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/expected.csv @@ -0,0 +1,5 @@ +pid,dates +1,"{[2013-01-01,2017-01-01)}",1.00,f +3,"{[2014-01-01,2015-01-01),[2015-06-01,2016-01-01),[2017-01-01,2018-01-01)}",0.50,f +4,"{[2012-12-31,2013-01-02)}",1.00,f +9,"{[2012-12-31,2013-01-01),[2013-01-02,2013-01-03)}",1.00,f diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/merge.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/merge.spec.json new file mode 100644 index 0000000000..47d6d75ece --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/merge/merge.spec.json @@ -0,0 +1,165 @@ +{ + "label": "MERGE date aggregation of 2 concepts", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "dateAggregationMode": "MERGE", + "root": { + "type": "AND", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0, + "max": 1.0 + } + } + ], + "selects": "number.number_connector.first_value" + } + ] + }, + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ], + "selects": "geschlecht_select.geschlecht_connector.first_geschlecht" + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + }, + "selects": [ + { + "column": "table1.value", + "label": "first_value", + "type": "FIRST" + } + ] + } + ] + }, + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table2", + "validityDates": { + "label": "datum", + "startColumn": "table2.datum_start", + "endColumn": "table2.datum_end" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table2.geschlecht", + "type": "SELECT" + }, + "selects": [ + { + "column": "table2.geschlecht", + "label": "first_geschlecht", + "type": "FIRST" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/aggregation/merge/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + }, + { + "csv": "tests/sql/selects/validity_date/aggregation/merge/content_2.csv", + "name": "table2", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/content_1.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/content_1.csv new file mode 100644 index 0000000000..8f5c77f449 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/content_1.csv @@ -0,0 +1,3 @@ +pid,datum_start,datum_end,geschlecht +1,2012-01-01,2012-12-31,"m" +1,2015-01-01,2015-12-31,"m" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/expected.csv new file mode 100644 index 0000000000..f9a2624946 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/expected.csv @@ -0,0 +1,2 @@ +pid,dates +1,"{[-∞,2012-01-01),[2013-01-01,2015-01-01),[2016-01-01,∞]}" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/negate.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/negate.spec.json new file mode 100644 index 0000000000..ff3262a892 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/negate/negate.spec.json @@ -0,0 +1,82 @@ +{ + "label": "LOGICAL date aggregation of a NEGATION CONCEPT node", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "dateAggregationMode": "LOGICAL", + "root": { + "type": "NEGATION", + "child": { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/aggregation/negate/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/content_1.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/content_1.csv new file mode 100644 index 0000000000..2cc6ce5d2e --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/content_1.csv @@ -0,0 +1,2 @@ +pid,value +1,1 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/content_2.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/content_2.csv new file mode 100644 index 0000000000..33e0f95e29 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/content_2.csv @@ -0,0 +1,2 @@ +pid,geschlecht +1,"f" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/expected.csv new file mode 100644 index 0000000000..2bfda778e7 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/expected.csv @@ -0,0 +1,2 @@ +pid,dates,value,geschlecht +1,{},1.00,f diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/no_validity_date.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/no_validity_date.spec.json new file mode 100644 index 0000000000..46ff81977e --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/no_validity_date/no_validity_date.spec.json @@ -0,0 +1,139 @@ +{ + "label": "MERGE date aggregation of 2 concepts but no one has a validity date", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "dateAggregationMode": "MERGE", + "root": { + "type": "AND", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0, + "max": 1.0 + } + } + ], + "selects": "number.number_connector.first_value" + } + ] + }, + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ], + "selects": "geschlecht_select.geschlecht_connector.first_geschlecht" + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + }, + "selects": [ + { + "column": "table1.value", + "label": "first_value", + "type": "FIRST" + } + ] + } + ] + }, + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table2", + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table2.geschlecht", + "type": "SELECT" + }, + "selects": [ + { + "column": "table2.geschlecht", + "label": "first_geschlecht", + "type": "FIRST" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/aggregation/no_validity_date/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + } + ] + }, + { + "csv": "tests/sql/selects/validity_date/aggregation/no_validity_date/content_2.csv", + "name": "table2", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/content_1.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/content_1.csv new file mode 100644 index 0000000000..a74c45c89b --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/content_1.csv @@ -0,0 +1,2 @@ +pid,value,datum_start,datum_end +1,1,2014-06-30,2015-06-30 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/content_2.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/content_2.csv new file mode 100644 index 0000000000..33e0f95e29 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/content_2.csv @@ -0,0 +1,2 @@ +pid,geschlecht +1,"f" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/expected.csv new file mode 100644 index 0000000000..a217653a41 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/expected.csv @@ -0,0 +1,2 @@ +pid,dates +1,"{[2014-06-30,2015-07-01)}",1.00,f diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/only_1_date.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/only_1_date.spec.json new file mode 100644 index 0000000000..d2aa30dc10 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/aggregation/only_1_date/only_1_date.spec.json @@ -0,0 +1,152 @@ +{ + "label": "MERGE date aggregation of 2 concepts but only 1 has a validity date", + "expectedCsv": "expected.csv", + "type": "SQL_TEST", + "query": { + "type": "CONCEPT_QUERY", + "dateAggregationMode": "MERGE", + "root": { + "type": "AND", + "children": [ + { + "type": "CONCEPT", + "label": "vs", + "ids": [ + "number" + ], + "tables": [ + { + "id": "number.number_connector", + "filters": [ + { + "filter": "number.number_connector.value", + "type": "REAL_RANGE", + "value": { + "min": 0, + "max": 1.0 + } + } + ], + "selects": "number.number_connector.first_value" + } + ] + }, + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ], + "selects": "geschlecht_select.geschlecht_connector.first_geschlecht" + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "number", + "type": "TREE", + "connectors": [ + { + "label": "number_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "filters": { + "label": "value", + "description": "xy", + "column": "table1.value", + "type": "NUMBER" + }, + "selects": [ + { + "column": "table1.value", + "label": "first_value", + "type": "FIRST" + } + ] + } + ] + }, + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table2", + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table2.geschlecht", + "type": "SELECT" + }, + "selects": [ + { + "column": "table2.geschlecht", + "label": "first_geschlecht", + "type": "FIRST" + } + ] + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/aggregation/only_1_date/content_1.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "value", + "type": "REAL" + }, + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + } + ] + }, + { + "csv": "tests/sql/selects/validity_date/aggregation/only_1_date/content_2.csv", + "name": "table2", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/default/content.csv b/backend/src/test/resources/tests/sql/selects/validity_date/default/content.csv index db93b08bd4..6af477b0c4 100644 --- a/backend/src/test/resources/tests/sql/selects/validity_date/default/content.csv +++ b/backend/src/test/resources/tests/sql/selects/validity_date/default/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2013-11-10,"f" -4,2012-11-11,"m" -5,2007-11-11,"" -6,2012-11-11,"" -7,2012-11-11,"mf" -8,2012-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3, +4,2012-11-11 +5,2007-11-11 +6,2012-11-11 +7,2012-11-11 +8,2012-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/default/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/default/expected.csv index a09d9ed8b4..2bf14a4736 100644 --- a/backend/src/test/resources/tests/sql/selects/validity_date/default/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/validity_date/default/expected.csv @@ -1,3 +1,9 @@ -pid,datum -1,"[2012-01-01,2012-01-02)" -3,"[2013-11-10,2013-11-11)" +pid,dates +2,"{[2010-07-15,2010-07-16)}" +4,"{[2012-11-11,2012-11-12)}" +7,"{[2012-11-11,2012-11-12)}" +3,"{[-∞,∞]}" +6,"{[2012-11-11,2012-11-12)}" +5,"{[2007-11-11,2007-11-12)}" +1,"{[2012-01-01,2012-01-02)}" +8,"{[2012-11-11,2012-11-12)}" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/default/validity_date_default.json b/backend/src/test/resources/tests/sql/selects/validity_date/default/validity_date_default.json index 705aeb28f5..b32515eef0 100644 --- a/backend/src/test/resources/tests/sql/selects/validity_date/default/validity_date_default.json +++ b/backend/src/test/resources/tests/sql/selects/validity_date/default/validity_date_default.json @@ -16,16 +16,7 @@ "label":"Geschlecht SELECT", "tables":[ { - "id":"geschlecht_select.geschlecht_connector", - "filters":[ - { - "filter":"geschlecht_select.geschlecht_connector.geschlecht", - "type":"BIG_MULTI_SELECT", - "value":[ - "f" - ] - } - ] + "id": "geschlecht_select.geschlecht_connector" } ] } @@ -43,12 +34,6 @@ "validityDates":{ "label":"datum", "column":"table1.datum" - }, - "filters":{ - "label":"geschlecht", - "description":"Geschlecht zur gegebenen Datumseinschränkung", - "column":"table1.geschlecht", - "type":"SELECT" } } ] @@ -57,7 +42,7 @@ "content":{ "tables":[ { - "csv":"tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv", + "csv": "tests/sql/selects/validity_date/default/content.csv", "name":"table1", "primaryColumn":{ "name":"pid", @@ -67,10 +52,6 @@ { "name":"datum", "type":"DATE" - }, - { - "name":"geschlecht", - "type":"STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv index db93b08bd4..3ef844914c 100644 --- a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv +++ b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/content.csv @@ -1,9 +1,9 @@ -pid,datum,geschlecht -1,2012-01-01,"f" -2,2010-07-15,"m" -3,2013-11-10,"f" -4,2012-11-11,"m" -5,2007-11-11,"" -6,2012-11-11,"" -7,2012-11-11,"mf" -8,2012-11-11,"fm" +pid,datum +1,2012-01-01 +2,2010-07-15 +3,2013-11-10 +4,2012-11-11 +5,2007-11-11 +6,2012-11-11 +7,2012-11-11 +8,2012-11-11 diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/expected.csv index b6a85aedc6..560413d16b 100644 --- a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/expected.csv +++ b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/expected.csv @@ -1,3 +1,9 @@ -pid -1 -3 +pid,dates +2,{} +4,{} +7,{} +3,{} +6,{} +5,{} +1,{} +8,{} diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/validity_date_excluded.json b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/validity_date_excluded.json index 152f6f6c7d..bcd29f0b63 100644 --- a/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/validity_date_excluded.json +++ b/backend/src/test/resources/tests/sql/selects/validity_date/excluded_from_time_aggregation/validity_date_excluded.json @@ -17,16 +17,7 @@ "excludeFromTimeAggregation": true, "tables": [ { - "id": "geschlecht_select.geschlecht_connector", - "filters": [ - { - "filter": "geschlecht_select.geschlecht_connector.geschlecht", - "type": "BIG_MULTI_SELECT", - "value": [ - "f" - ] - } - ] + "id": "geschlecht_select.geschlecht_connector" } ] } @@ -44,12 +35,6 @@ "validityDates": { "label": "datum", "column": "table1.datum" - }, - "filters": { - "label": "geschlecht", - "description": "Geschlecht zur gegebenen Datumseinschränkung", - "column": "table1.geschlecht", - "type": "SELECT" } } ] @@ -68,10 +53,6 @@ { "name": "datum", "type": "DATE" - }, - { - "name": "geschlecht", - "type": "STRING" } ] } diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/multirange/content.csv b/backend/src/test/resources/tests/sql/selects/validity_date/multirange/content.csv new file mode 100644 index 0000000000..5190a64216 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/multirange/content.csv @@ -0,0 +1,14 @@ +pid,datum_start,datum_end,geschlecht +1,2012-01-01,2012-07-01,"f" +1,2012-06-01,2012-12-31,"f" +1,2014-01-01,2014-12-31,"f" +2,2010-07-15,2010-07-15,"m" +3,2013-11-10,2013-11-10,"f" +3,2013-11-11,2013-11-11,"f" +4,2012-11-11,2012-11-11,"m" +5,2007-11-11,2007-11-11,"" +6,2012-11-11,2012-11-11,"" +7,2012-11-11,2012-11-11,"mf" +8,2012-11-11,2012-11-11,"fm" +9,,2012-12-31,"f" +9,2013-01-02,,"f" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/multirange/expected.csv b/backend/src/test/resources/tests/sql/selects/validity_date/multirange/expected.csv new file mode 100644 index 0000000000..30492873f8 --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/multirange/expected.csv @@ -0,0 +1,4 @@ +pid,dates +1,"{[2012-01-01,2013-01-01),[2014-01-01,2015-01-01)}" +3,"{[2013-11-10,2013-11-12)}" +9,"{[-∞,2013-01-01),[2013-01-02,∞]}" diff --git a/backend/src/test/resources/tests/sql/selects/validity_date/multirange/multi_range.spec.json b/backend/src/test/resources/tests/sql/selects/validity_date/multirange/multi_range.spec.json new file mode 100644 index 0000000000..01da6111ba --- /dev/null +++ b/backend/src/test/resources/tests/sql/selects/validity_date/multirange/multi_range.spec.json @@ -0,0 +1,84 @@ +{ + "type": "SQL_TEST", + "label": "Validity date with multiple ranges for subjects", + "description": "If multiple ranges of a concepts' validity date exist, that they are aggregated properly.", + "expectedCsv": "expected.csv", + "query": { + "type": "CONCEPT_QUERY", + "root": { + "type": "AND", + "children": [ + { + "ids": [ + "geschlecht_select" + ], + "type": "CONCEPT", + "label": "Geschlecht SELECT", + "tables": [ + { + "id": "geschlecht_select.geschlecht_connector", + "filters": [ + { + "filter": "geschlecht_select.geschlecht_connector.geschlecht", + "type": "BIG_MULTI_SELECT", + "value": [ + "f" + ] + } + ] + } + ] + } + ] + } + }, + "concepts": [ + { + "label": "geschlecht_select", + "type": "TREE", + "connectors": [ + { + "label": "geschlecht_connector", + "table": "table1", + "validityDates": { + "label": "datum", + "startColumn": "table1.datum_start", + "endColumn": "table1.datum_end" + }, + "filters": { + "label": "geschlecht", + "description": "Geschlecht zur gegebenen Datumseinschränkung", + "column": "table1.geschlecht", + "type": "SELECT" + } + } + ] + } + ], + "content": { + "tables": [ + { + "csv": "tests/sql/selects/validity_date/multirange/content.csv", + "name": "table1", + "primaryColumn": { + "name": "pid", + "type": "STRING" + }, + "columns": [ + { + "name": "datum_start", + "type": "DATE" + }, + { + "name": "datum_end", + "type": "DATE" + }, + { + "name": "geschlecht", + "type": "STRING" + } + ] + } + ] + } +} diff --git a/frontend/mock-api/mockApi.ts b/frontend/mock-api/mockApi.ts index 590bb65514..d6c42fc344 100644 --- a/frontend/mock-api/mockApi.ts +++ b/frontend/mock-api/mockApi.ts @@ -494,6 +494,8 @@ export default function mockApi(app: Application) { datasetAbilities: { imdb: { canUpload: true, + canViewEntityPreview: true, + canViewQueryPreview: true, }, }, groups: [], diff --git a/frontend/package.json b/frontend/package.json index 742c47ff5f..5bef82954a 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -69,6 +69,7 @@ "react-i18next": "^12.2.0", "react-list": "^0.8.16", "react-markdown": "^8.0.0", + "react-merge-refs": "^2.0.2", "react-number-format": "^5.1.4", "react-redux": "^8.0.5", "react-router-dom": "^6.9.0", diff --git a/frontend/src/js/api/types.ts b/frontend/src/js/api/types.ts index a9f45f45f8..83e76d48c2 100644 --- a/frontend/src/js/api/types.ts +++ b/frontend/src/js/api/types.ts @@ -414,6 +414,7 @@ export interface GetQueryResponseDoneT { columnDescriptions: ColumnDescription[] | null; queryType: "CONCEPT_QUERY" | "SECONDARY_ID_QUERY"; requiredTime: number; // In ms, unused at the moment + containsDates: boolean; } export interface GetQueryRunningResponseT { diff --git a/frontend/src/js/entity-history/timeline/util.ts b/frontend/src/js/entity-history/timeline/util.ts index 83a5c1b94f..59d327654e 100644 --- a/frontend/src/js/entity-history/timeline/util.ts +++ b/frontend/src/js/entity-history/timeline/util.ts @@ -25,12 +25,11 @@ export const isMoneyColumn = (columnDescription: ColumnDescription) => export const isSecondaryIdColumn = (columnDescription: ColumnDescription) => columnDescription.semantics.some((s) => s.type === "SECONDARY_ID"); -export const formatCurrency = (value: number) => +export const formatCurrency = (value: number, digits?: number) => value.toLocaleString(navigator.language, { style: "currency", - currency: "EUR", unitDisplay: "short", - minimumFractionDigits: 0, - maximumFractionDigits: 0, + minimumFractionDigits: digits, + maximumFractionDigits: digits, }); diff --git a/frontend/src/js/external-forms/form-components/DropzoneList.tsx b/frontend/src/js/external-forms/form-components/DropzoneList.tsx index 953d16e1e2..0b423035a1 100644 --- a/frontend/src/js/external-forms/form-components/DropzoneList.tsx +++ b/frontend/src/js/external-forms/form-components/DropzoneList.tsx @@ -13,7 +13,6 @@ import DropzoneWithFileInput, { DragItemFile, } from "../../ui-components/DropzoneWithFileInput"; import Label from "../../ui-components/Label"; -import Optional from "../../ui-components/Optional"; import DropzoneBetweenElements from "./DropzoneBetweenElements"; @@ -56,7 +55,6 @@ interface PropsT { className?: string; label?: ReactNode; tooltip?: string; - optional?: boolean; dropzoneChildren: (args: ChildArgs) => ReactNode; items: ReactNode[]; acceptedDropTypes: string[]; @@ -67,7 +65,7 @@ interface PropsT { monitor: DropTargetMonitor, ) => void; onDropFile: (file: File) => void; - onImportLines: (lines: string[]) => void; + onImportLines: (lines: string[], filename?: string) => void; dropBetween: ( i: number, ) => (item: PossibleDroppableObject, monitor: DropTargetMonitor) => void; @@ -78,7 +76,6 @@ const DropzoneList = ( className, label, tooltip, - optional, dropzoneChildren, items, acceptedDropTypes, @@ -97,12 +94,7 @@ const DropzoneList = ( return (
- {label && ( - - )} + {label && } {tooltip && } {items && items.length > 0 && ( diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx index 8821c30203..e351408f6f 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptGroup.tsx @@ -70,7 +70,6 @@ interface Props { tooltip?: string; newValue: FormConceptGroupT; isSingle?: boolean; - optional?: boolean; disallowMultipleColumns?: boolean; blocklistedTables?: string[]; allowlistedTables?: string[]; @@ -88,6 +87,7 @@ interface Props { row: FormConceptGroupT; i: number; }) => ReactNode; + rowPrefixFieldname?: string; } const DropzoneListItem = styled("div")``; @@ -189,7 +189,6 @@ const FormConceptGroup = (props: Props) => { */ ref={dropzoneRef} tooltip={props.tooltip} - optional={props.optional} label={ <> {props.label} @@ -218,27 +217,38 @@ const FormConceptGroup = (props: Props) => { const concept = isMovedObject(item) ? copyConcept(item) : initializeConcept(item, defaults, tableConfig); - let newPropsValue = props.value; + let insertIndex = i; + let newPropsValue = props.value; + let newValue = JSON.parse(JSON.stringify(props.newValue)); + if (isMovedObject(item)) { const { movedFromFieldName, movedFromAndIdx, movedFromOrIdx } = item.dragContext; if (movedFromFieldName === props.fieldName) { - const willConceptMoveDown = - i > movedFromAndIdx && + const movedConceptWasLast = props.value[movedFromAndIdx].concepts.length === 1; + const willConceptMoveDown = + i > movedFromAndIdx && movedConceptWasLast; + if (willConceptMoveDown) { insertIndex = i - 1; } - newPropsValue = - props.value[movedFromAndIdx].concepts.length === 1 - ? removeValue(props.value, movedFromAndIdx) - : removeConcept( - props.value, - movedFromAndIdx, - movedFromOrIdx, - ); + newPropsValue = movedConceptWasLast + ? removeValue(props.value, movedFromAndIdx) + : removeConcept(props.value, movedFromAndIdx, movedFromOrIdx); + + // rowPrefixField is a special property that is only used in an edge case form, + // used for tagging concepts. We only need to pass it back into the value + // if the concept is moved to a different position in the same field. + if (props.rowPrefixFieldname) { + newValue[props.rowPrefixFieldname] = + // since rowPrefixFieldname is dynamic, and since it's an edge case, + // we're not typing this + // @ts-ignore + props.value[movedFromAndIdx][props.rowPrefixFieldname]; + } } else { if (exists(item.dragContext.deleteFromOtherField)) { item.dragContext.deleteFromOtherField(); @@ -261,8 +271,8 @@ const FormConceptGroup = (props: Props) => { onDropFile={(file) => onDropFile(file, { valueIdx: props.value.length }) } - onImportLines={(lines) => - onImportLines(lines, { valueIdx: props.value.length }) + onImportLines={(lines, filename) => + onImportLines({ lines, filename }, { valueIdx: props.value.length }) } onDrop={(item: DragItemFile | DragItemConceptTreeNode) => { setScrollToDropzone(true); @@ -274,6 +284,23 @@ const FormConceptGroup = (props: Props) => { if (props.isValidConcept && !props.isValidConcept(item)) return; + let newValue = JSON.parse(JSON.stringify(props.newValue)); + + // rowPrefixField is a special property that is only used in an edge case form, + // for a detailed explanation see the comment in the dropBetween function + if (isMovedObject(item)) { + const { movedFromFieldName, movedFromAndIdx } = item.dragContext; + + if ( + movedFromFieldName === props.fieldName && + props.rowPrefixFieldname + ) { + newValue[props.rowPrefixFieldname] = + // @ts-ignore + props.value[movedFromAndIdx][props.rowPrefixFieldname]; + } + } + const concept = isMovedObject(item) ? copyConcept(item) : initializeConcept(item, defaults, tableConfig); @@ -356,6 +383,10 @@ const FormConceptGroup = (props: Props) => { : removeConcept(props.value, i, j), ); }} + // row_prefix is a special property that is only used in an edge case form. + // To support reordering of concepts this property needs + // to be passed to the concept node + rowPrefixFieldname={props.rowPrefixFieldname} expand={{ onClick: () => props.onChange( @@ -376,8 +407,11 @@ const FormConceptGroup = (props: Props) => { ) : ( */ acceptedDropTypes={DROP_TYPES} - onImportLines={(lines) => - onImportLines(lines, { valueIdx: i, conceptIdx: j }) + onImportLines={(lines, filename) => + onImportLines( + { lines, filename }, + { valueIdx: i, conceptIdx: j }, + ) } onDrop={(item: DragItemConceptTreeNode | DragItemFile) => { if (item.type === "__NATIVE_FILE__") { diff --git a/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx b/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx index 7f4dd09473..49a5ec3584 100644 --- a/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx +++ b/frontend/src/js/external-forms/form-concept-group/FormConceptNode.tsx @@ -83,6 +83,7 @@ interface PropsT { }; deleteFromOtherField: () => void; fieldName: string; + rowPrefixFieldname?: string; } // generalized node to handle concepts queried in forms @@ -96,6 +97,7 @@ const FormConceptNode: FC = ({ expand, deleteFromOtherField, fieldName, + rowPrefixFieldname, }) => { const { t } = useTranslation(); const rootNodeLabel = getRootNodeLabel(conceptNode); @@ -108,6 +110,7 @@ const FormConceptNode: FC = ({ movedFromOrIdx: conceptIdx, width: 0, height: 0, + rowPrefixFieldname: rowPrefixFieldname, }, }; const [, drag] = useDrag({ @@ -119,6 +122,7 @@ const FormConceptNode: FC = ({ ...getWidthAndHeight(ref), deleteFromOtherField, movedFromFieldName: fieldName, + rowPrefixFieldname: rowPrefixFieldname, }, }), }); diff --git a/frontend/src/js/external-forms/form-concept-group/useUploadConceptListModal.ts b/frontend/src/js/external-forms/form-concept-group/useUploadConceptListModal.ts index cf8b472145..32fe6242d7 100644 --- a/frontend/src/js/external-forms/form-concept-group/useUploadConceptListModal.ts +++ b/frontend/src/js/external-forms/form-concept-group/useUploadConceptListModal.ts @@ -75,14 +75,14 @@ export const useUploadConceptListModal = ({ }; const onImportLines = ( - lines: string[], + { lines, filename }: { lines: string[]; filename?: string }, { valueIdx, conceptIdx }: UploadConceptListModalContext, ) => { setModalContext({ valueIdx, conceptIdx }); dispatch( initUploadConceptListModal({ rows: lines, - filename: t("importModal.pasted"), + filename: filename || t("importModal.pasted"), }), ); diff --git a/frontend/src/js/external-forms/form-query-dropzone/FormQueryDropzone.tsx b/frontend/src/js/external-forms/form-query-dropzone/FormQueryDropzone.tsx index 831017a533..67a3dfbbe5 100644 --- a/frontend/src/js/external-forms/form-query-dropzone/FormQueryDropzone.tsx +++ b/frontend/src/js/external-forms/form-query-dropzone/FormQueryDropzone.tsx @@ -7,7 +7,6 @@ import type { DragItemQuery } from "../../standard-query-editor/types"; import InfoTooltip from "../../tooltip/InfoTooltip"; import Dropzone from "../../ui-components/Dropzone"; import Label from "../../ui-components/Label"; -import Optional from "../../ui-components/Optional"; import ValidatedFormQueryResult from "./ValidatedFormQueryResult"; @@ -23,7 +22,6 @@ const DROP_TYPES = [ interface PropsT { label: string; tooltip?: string; - optional?: boolean; dropzoneText: string; className?: string; value: DragItemQuery | null; @@ -33,7 +31,6 @@ interface PropsT { const FormQueryDropzone: FC = ({ label, tooltip, - optional, dropzoneText, className, value, @@ -60,7 +57,6 @@ const FormQueryDropzone: FC = ({ return (
diff --git a/frontend/src/js/external-forms/form/Field.tsx b/frontend/src/js/external-forms/form/Field.tsx index ad657d4e2f..b05bf13c4c 100644 --- a/frontend/src/js/external-forms/form/Field.tsx +++ b/frontend/src/js/external-forms/form/Field.tsx @@ -33,12 +33,7 @@ import FormConceptGroup from "../form-concept-group/FormConceptGroup"; import type { FormConceptGroupT } from "../form-concept-group/formConceptGroupState"; import FormQueryDropzone from "../form-query-dropzone/FormQueryDropzone"; import FormTabNavigation from "../form-tab-navigation/FormTabNavigation"; -import { - getFieldKey, - getInitialValue, - isFormField, - isOptionalField, -} from "../helper"; +import { getFieldKey, getInitialValue, isFormField } from "../helper"; import { getErrorForField } from "../validators"; import type { DynamicFormValues } from "./Form"; @@ -73,7 +68,11 @@ type Props = T & { noContainer?: boolean; noLabel?: boolean; }; -const FieldContainer = styled("div")<{ noLabel?: boolean; hasError?: boolean }>` +const FieldContainer = styled("div")<{ + noLabel?: boolean; + hasError?: boolean; + red?: boolean; +}>` display: flex; flex-direction: column; gap: 5px; @@ -81,12 +80,16 @@ const FieldContainer = styled("div")<{ noLabel?: boolean; hasError?: boolean }>` background-color: white; border-radius: ${({ theme }) => theme.borderRadius}; border: 1px solid - ${({ theme, hasError }) => - hasError ? theme.col.blueGrayDark : theme.col.grayLight}; + ${({ theme, hasError, red }) => + hasError + ? red + ? theme.col.red + : theme.col.blueGrayDark + : theme.col.grayLight}; `; -const ErrorContainer = styled("div")` - color: ${({ theme }) => theme.col.blueGrayDark}; +const ErrorContainer = styled("div")<{ red?: boolean }>` + color: ${({ theme, red }) => (red ? theme.col.red : theme.col.blueGrayDark)}; font-weight: 700; font-size: ${({ theme }) => theme.font.sm}; `; @@ -114,12 +117,21 @@ const ConnectedField = ({ // TODO: REFINE COLORS // const color = useColorByField(formField.type); + const requiredMsg = t("externalForms.formValidation.isRequired"); + const isRedError = fieldState.error?.message !== requiredMsg; + return noContainer ? (
{children({ ...field, ...props })}
) : ( - + {children({ ...field, ...props })} - {fieldState.error?.message} + + {fieldState.error?.message} + ); }; @@ -148,35 +160,28 @@ const NestedFields = styled("div")` border-radius: ${({ theme }) => theme.borderRadius}; `; -interface PropsT { +const setValueConfig = { + shouldValidate: true, + shouldDirty: true, + shouldTouch: true, +}; + +const Field = ({ + field, + ...commonProps +}: { formType: string; h1Index?: number; field: GeneralField; locale: Language; availableDatasets: SelectOptionT[]; - optional?: boolean; register: UseFormRegister; setValue: UseFormSetValue; control: Control; -} - -const setValueConfig = { - shouldValidate: true, - shouldDirty: true, - shouldTouch: true, -}; - -const Field = ({ field, ...commonProps }: PropsT) => { +}) => { const datasetId = useDatasetId(); - const { - formType, - h1Index, - optional, - locale, - availableDatasets, - setValue, - control, - } = commonProps; + const { formType, h1Index, locale, availableDatasets, setValue, control } = + commonProps; const { t } = useTranslation(); const defaultValue = @@ -221,7 +226,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { value={fieldProps.value as string} onChange={(value) => setValue(field.name, value, setValueConfig)} tooltip={field.tooltip ? field.tooltip[locale] : undefined} - optional={optional} /> )} @@ -246,7 +250,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { setValue(field.name, value, setValueConfig); }} tooltip={field.tooltip ? field.tooltip[locale] : undefined} - optional={optional} /> )} @@ -275,7 +278,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { max: field.max, }} tooltip={field.tooltip ? field.tooltip[locale] : undefined} - optional={optional} /> )} @@ -293,7 +295,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { inline={true} label={field.label[locale]} tooltip={field.tooltip ? field.tooltip[locale] : undefined} - optional={optional} value={fieldProps.value as DateStringMinMax} onChange={(value) => setValue(field.name, value, setValueConfig) @@ -315,7 +316,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { label={field.label[locale] || ""} dropzoneText={field.dropzoneLabel[locale] || ""} tooltip={field.tooltip ? field.tooltip[locale] : undefined} - optional={optional} value={fieldProps.value as DragItemQuery} onChange={(value) => setValue(field.name, value, setValueConfig)} /> @@ -357,7 +357,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { label={field.label[locale]} options={options} tooltip={field.tooltip ? field.tooltip[locale] : undefined} - optional={optional} value={fieldProps.value as SelectOptionT | null} onChange={(value) => setValue(field.name, value, setValueConfig)} /> @@ -383,7 +382,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { label={field.label[locale]} options={availableDatasets} tooltip={field.tooltip ? field.tooltip[locale] : undefined} - optional={optional} value={fieldProps.value as SelectOptionT | null} onChange={(value) => setValue(field.name, value, setValueConfig) @@ -411,16 +409,8 @@ const Field = ({ field, ...commonProps }: PropsT) => { > {field.fields.map((f, i) => { const key = getFieldKey(formType, f, i); - const nestedFieldOptional = isOptionalField(f); - return ( - - ); + return ; })} @@ -455,16 +445,8 @@ const Field = ({ field, ...commonProps }: PropsT) => { {tabToShow.fields.map((f, i) => { const key = getFieldKey(formType, f, i); - const nestedFieldOptional = isOptionalField(f); - return ( - - ); + return ; })} ) : ( @@ -507,7 +489,6 @@ const Field = ({ field, ...commonProps }: PropsT) => { blocklistedSelects={field.blocklistedSelects} allowlistedSelects={field.allowlistedSelects} defaults={field.defaults} - optional={optional} isValidConcept={(item) => !nodeIsInvalid( item, @@ -530,6 +511,7 @@ const Field = ({ field, ...commonProps }: PropsT) => { } : { concepts: [], connector: "OR" } } + rowPrefixFieldname={field.rowPrefixField?.name} renderRowPrefix={ exists(field.rowPrefixField) ? ({ value: fieldValue, onChange, row, i }) => ( diff --git a/frontend/src/js/external-forms/form/Form.tsx b/frontend/src/js/external-forms/form/Form.tsx index 28be70f607..05d8a16c30 100644 --- a/frontend/src/js/external-forms/form/Form.tsx +++ b/frontend/src/js/external-forms/form/Form.tsx @@ -6,7 +6,7 @@ import type { SelectOptionT } from "../../api/types"; import { useActiveLang } from "../../localization/useActiveLang"; import FormHeader from "../FormHeader"; import type { Form as FormType } from "../config-types"; -import { getFieldKey, getH1Index, isOptionalField } from "../helper"; +import { getFieldKey, getH1Index } from "../helper"; import Field from "./Field"; @@ -44,7 +44,6 @@ const Form = memo(({ config, datasetOptions, methods }: Props) => { )} {config.fields.map((field, i) => { const key = getFieldKey(config.type, field, i); - const optional = isOptionalField(field); const h1Index = getH1Index(config.fields, field); return ( @@ -58,7 +57,6 @@ const Form = memo(({ config, datasetOptions, methods }: Props) => { setValue={methods.setValue} availableDatasets={datasetOptions} locale={activeLang} - optional={optional} /> ); })} diff --git a/frontend/src/js/external-forms/helper.ts b/frontend/src/js/external-forms/helper.ts index 0efdfc901a..c1ff843e84 100644 --- a/frontend/src/js/external-forms/helper.ts +++ b/frontend/src/js/external-forms/helper.ts @@ -42,15 +42,6 @@ export const getH1Index = (fields: GeneralField[], field: GeneralField) => { return h1Fields.indexOf(field); }; -export const isOptionalField = (field: GeneralField) => { - return ( - isFormField(field) && - (!("validations" in field) || - ("validations" in field && - (!field.validations || !field.validations.includes("NOT_EMPTY")))) - ); -}; - export const isFormField = (field: GeneralField): field is FormField => { return !nonFormFieldTypes.has(field.type); }; diff --git a/frontend/src/js/header/Header.tsx b/frontend/src/js/header/Header.tsx index eeef5a2fee..6fbaa852d9 100644 --- a/frontend/src/js/header/Header.tsx +++ b/frontend/src/js/header/Header.tsx @@ -6,7 +6,7 @@ import { useSelector } from "react-redux"; import type { StateT } from "../app/reducers"; import { HistoryButton } from "../button/HistoryButton"; import DatasetSelector from "../dataset/DatasetSelector"; -import { canUploadResult, useHideLogoutButton } from "../user/selectors"; +import { canViewEntityPreview, useHideLogoutButton } from "../user/selectors"; import { HelpMenu } from "./HelpMenu"; import LogoutButton from "./LogoutButton"; @@ -70,7 +70,7 @@ const Headline = styled("h1")` const Header: FC = () => { const { t } = useTranslation(); - const canUpload = useSelector(canUploadResult); + const canViewHistory = useSelector(canViewEntityPreview); const hideLogoutButton = useHideLogoutButton(); const { manualUrl, contactEmail } = useSelector< StateT, @@ -86,7 +86,7 @@ const Header: FC = () => { - {canUpload && } + {canViewHistory && } {(manualUrl || contactEmail) && ( )} diff --git a/frontend/src/js/preview/ColumnStats.tsx b/frontend/src/js/preview/ColumnStats.tsx index c6f1dd1e21..451c8f4ceb 100644 --- a/frontend/src/js/preview/ColumnStats.tsx +++ b/frontend/src/js/preview/ColumnStats.tsx @@ -2,31 +2,28 @@ import styled from "@emotion/styled"; import { FC } from "react"; import { useTranslation } from "react-i18next"; +import { formatCurrency } from "../entity-history/timeline/util"; + import { ColumnDescriptionType } from "./Preview"; -const Stats = styled("div")` - padding: 0 10px 10px 0; -`; -const Stat = styled("code")` - display: block; - margin: 0; -`; const Name = styled("code")` display: block; font-weight: 700; font-size: ${({ theme }) => theme.font.xs}; - max-width: 130px; - margin-bottom: 8px; + max-width: 200px; `; const Label = styled("span")` font-style: italic; - padding-right: 10px; `; const Values = styled("div")` + display: grid; + grid-template-columns: auto 1fr; + gap: 0px 10px; font-size: ${({ theme }) => theme.font.xs}; `; const Value = styled("span")` font-weight: 700; + text-align: right; `; interface Props { colName: string; @@ -71,10 +68,6 @@ function toRoundedDecimalsString(num: number, decimals: number) { return rounded.toFixed(decimals).replace(".", ","); } -function toLocalizedNumberString(num: number) { - return num.toString().replace(".", ","); -} - const ColumnStats: FC = ({ colName, columnType, rawColumnData }) => { const { t } = useTranslation(); @@ -107,40 +100,35 @@ const ColumnStats: FC = ({ colName, columnType, rawColumnData }) => { const decimals = 2; // Might come in handy at some point // const variance = getVarianceFromAvg(cleanSortedData, avg); - const toMoneyMaybe = (num: number) => { - return columnType === "MONEY" ? num / 100 : num; + const formatValue = ( + num: number, + { alwaysDecimals }: { alwaysDecimals?: boolean } = {}, + ) => { + return columnType === "MONEY" + ? formatCurrency(num / 100, decimals) + : alwaysDecimals + ? toRoundedDecimalsString(num, decimals) + : num; }; return ( - + <> {colName} - - - - {toRoundedDecimalsString(toMoneyMaybe(avg), decimals)} - - - - - {toLocalizedNumberString(toMoneyMaybe(median))} - - - - {toLocalizedNumberString(toMoneyMaybe(min))} - - - - {toLocalizedNumberString(toMoneyMaybe(max))} - - - - - {toRoundedDecimalsString(toMoneyMaybe(std), decimals)} - - + + {formatValue(avg, { alwaysDecimals: true })} + + {formatValue(median)} + + {formatValue(min)} + + {formatValue(max)} + + {formatValue(std, { alwaysDecimals: true })} + + {formatValue(sum)} - + ); } default: diff --git a/frontend/src/js/preview/PreviewInfo.tsx b/frontend/src/js/preview/PreviewInfo.tsx index c3899919bc..1b7e9a4066 100644 --- a/frontend/src/js/preview/PreviewInfo.tsx +++ b/frontend/src/js/preview/PreviewInfo.tsx @@ -55,13 +55,19 @@ const SxIconButton = styled(IconButton)` background-color: white; `; -const StatsContainer = styled("div")` - display: flex; - flex-wrap: wrap; +const StatsCard = styled("div")` padding: 10px; box-shadow: 0 0 10px 0 rgba(0, 0, 0, 0.2); background-color: white; `; +const StatsContainer = styled("div")` + display: grid; + grid-template-columns: minmax(120px, 200px); + grid-template-rows: 1fr auto; + grid-auto-flow: column; + gap: 5px 20px; + overflow-x: auto; +`; const COLUMN_TYPES_WITH_SUPPORTED_STATS = new Set([ "MONEY", @@ -147,16 +153,18 @@ const PreviewInfo: FC = ({
{t("preview.statisticsHeadline")} {t("preview.statisticsSubline")} - - {rawPreviewData[0].map((col, j) => ( - row[j])} - /> - ))} - + + + {rawPreviewData[0].map((col, j) => ( + row[j])} + /> + ))} + +
)}
diff --git a/frontend/src/js/previous-queries/list/ProjectItem.tsx b/frontend/src/js/previous-queries/list/ProjectItem.tsx index b586985b12..a8bb14e9be 100644 --- a/frontend/src/js/previous-queries/list/ProjectItem.tsx +++ b/frontend/src/js/previous-queries/list/ProjectItem.tsx @@ -1,5 +1,8 @@ import styled from "@emotion/styled"; -import { faUser as faUserRegular } from "@fortawesome/free-regular-svg-icons"; +import { + faCalendar, + faUser as faUserRegular, +} from "@fortawesome/free-regular-svg-icons"; import { faFolder as faFolderRegular } from "@fortawesome/free-regular-svg-icons"; import { faFolder, @@ -18,6 +21,7 @@ import IconButton from "../../button/IconButton"; import { formatDate } from "../../common/helpers/dateHelper"; import { exists } from "../../common/helpers/exists"; import { useFormLabelByType } from "../../external-forms/stateSelectors"; +import FaIcon from "../../icon/FaIcon"; import FormSymbol from "../../symbols/FormSymbol"; import QuerySymbol from "../../symbols/QuerySymbol"; import WithTooltip from "../../tooltip/WithTooltip"; @@ -129,6 +133,16 @@ const SxDownloadButton = styled(DownloadButton)` } `; +const Row = styled("div")` + display: flex; + align-items: flex-start; + gap: 5px; +`; + +const SxFaIcon = styled(FaIcon)` + opacity: 0.7; +`; + const FoldersButton = styled(IconButton)` margin-right: 10px; `; @@ -220,17 +234,24 @@ const ProjectItem = forwardRef< /> {!isFormConfig(item) && item.resultUrls.length > 0 ? ( - - - {topLeftLabel} - - + + + + {topLeftLabel} + + + {!item.containsDates && ( + + + + )} + ) : ( {topLeftLabel} )} diff --git a/frontend/src/js/previous-queries/list/ProjectItemDragContainer.tsx b/frontend/src/js/previous-queries/list/ProjectItemDragContainer.tsx index 5e0d23b550..89bdbb8f8a 100644 --- a/frontend/src/js/previous-queries/list/ProjectItemDragContainer.tsx +++ b/frontend/src/js/previous-queries/list/ProjectItemDragContainer.tsx @@ -1,4 +1,4 @@ -import { FC, useRef } from "react"; +import { useRef } from "react"; import { useDrag } from "react-dnd"; import { getWidthAndHeight } from "../../app/DndProvider"; @@ -10,19 +10,20 @@ import ProjectItem, { ProjectItemT } from "./ProjectItem"; import { isFormConfig } from "./helpers"; import { PreviousQueryT } from "./reducer"; -interface PropsT { - item: ProjectItemT; - onIndicateShare: () => void; - onIndicateEditFolders: () => void; -} - const getDragType = (item: PreviousQueryT) => { return item.queryType === "CONCEPT_QUERY" ? DNDType.PREVIOUS_QUERY : DNDType.PREVIOUS_SECONDARY_ID_QUERY; }; -const ProjectItemDragContainer: FC = ({ item, ...props }) => { +const ProjectItemDragContainer = ({ + item, + ...props +}: { + item: ProjectItemT; + onIndicateShare: () => void; + onIndicateEditFolders: () => void; +}) => { const ref = useRef(null); const dragItemBase = { diff --git a/frontend/src/js/previous-queries/list/ProjectItems.tsx b/frontend/src/js/previous-queries/list/ProjectItems.tsx index 3e3652d0b7..64db88bce8 100644 --- a/frontend/src/js/previous-queries/list/ProjectItems.tsx +++ b/frontend/src/js/previous-queries/list/ProjectItems.tsx @@ -4,7 +4,6 @@ import { useState, useCallback, useLayoutEffect, - FC, useEffect, } from "react"; import { FixedSizeList } from "react-window"; @@ -17,11 +16,6 @@ import type { ProjectItemT } from "./ProjectItem"; import ProjectItemDragContainer from "./ProjectItemDragContainer"; import ShareProjectItemModal from "./ShareProjectItemModal"; -interface PropsT { - datasetId: DatasetT["id"] | null; - items: ProjectItemT[]; -} - const ROW_SIZE = 62; const ROOT_PADDING_Y = 4; @@ -31,7 +25,13 @@ const Root = styled("div")` padding: ${ROOT_PADDING_Y}px 0; `; -const ProjectItems: FC = ({ datasetId, items }) => { +export const ProjectItems = ({ + datasetId, + items, +}: { + items: ProjectItemT[]; + datasetId: DatasetT["id"] | null; +}) => { const [itemToShare, setItemToShare] = useState(null); const [itemToEditFolders, setItemToEditFolders] = useState(null); @@ -124,5 +124,3 @@ const ProjectItems: FC = ({ datasetId, items }) => { ); }; - -export default ProjectItems; diff --git a/frontend/src/js/previous-queries/list/ProjectItemsTab.tsx b/frontend/src/js/previous-queries/list/ProjectItemsTab.tsx index d7d5ffa657..737a530f42 100644 --- a/frontend/src/js/previous-queries/list/ProjectItemsTab.tsx +++ b/frontend/src/js/previous-queries/list/ProjectItemsTab.tsx @@ -22,7 +22,7 @@ import UploadQueryResults from "../upload/UploadQueryResults"; import Folders from "./Folders"; import FoldersToggleButton from "./FoldersToggleButton"; import { ProjectItemT } from "./ProjectItem"; -import PreviousQueries from "./ProjectItems"; +import { ProjectItems } from "./ProjectItems"; import { useLoadFormConfigs, useLoadQueries } from "./actions"; import type { FormConfigT, PreviousQueryT } from "./reducer"; import { selectPreviousQueries } from "./selector"; @@ -148,7 +148,7 @@ const ProjectItemsTab = ({ datasetId }: PropsT) => { )} - + diff --git a/frontend/src/js/previous-queries/list/reducer.ts b/frontend/src/js/previous-queries/list/reducer.ts index b7799584c4..a934f1e9ea 100644 --- a/frontend/src/js/previous-queries/list/reducer.ts +++ b/frontend/src/js/previous-queries/list/reducer.ts @@ -53,6 +53,7 @@ export interface PreviousQueryT { groups?: UserGroupIdT[]; queryType: "CONCEPT_QUERY" | "SECONDARY_ID_QUERY"; secondaryId?: string | null; + containsDates: boolean; } export interface PreviousQueriesStateT { diff --git a/frontend/src/js/query-runner/QueryResults.tsx b/frontend/src/js/query-runner/QueryResults.tsx index 008185ce81..e3ebd0df85 100644 --- a/frontend/src/js/query-runner/QueryResults.tsx +++ b/frontend/src/js/query-runner/QueryResults.tsx @@ -11,7 +11,7 @@ import { QueryResultHistoryButton } from "../button/QueryResultHistoryButton"; import { isEmpty } from "../common/helpers/commonHelper"; import { exists } from "../common/helpers/exists"; import FaIcon from "../icon/FaIcon"; -import { canUploadResult } from "../user/selectors"; +import { canViewEntityPreview, canViewQueryPreview } from "../user/selectors"; import DownloadResultsDropdownButton from "./DownloadResultsDropdownButton"; @@ -54,7 +54,8 @@ const QueryResults: FC = ({ }) => { const { t } = useTranslation(); const csvUrl = resultUrls.find((ru) => ru.url.endsWith("csv")); - const canUpload = useSelector(canUploadResult); + const canViewHistory = useSelector(canViewEntityPreview); + const canViewPreview = useSelector(canViewQueryPreview); return ( @@ -73,8 +74,10 @@ const QueryResults: FC = ({ )} {!!csvUrl && exists(resultColumns) && ( <> - - {canUpload && ( + {canViewPreview && ( + + )} + {canViewHistory && ( { }, []); const onImportLines = useCallback( - (lines: string[], andIdx?: number) => { + (lines: string[], filename?: string, andIdx?: number) => { dispatch( initUploadConceptListModal({ rows: lines, - filename: t("importModal.pasted"), + filename: filename || t("importModal.pasted"), }), ); diff --git a/frontend/src/js/standard-query-editor/QueryEditorDropzone.tsx b/frontend/src/js/standard-query-editor/QueryEditorDropzone.tsx index 895c263b8a..d0eb909c16 100644 --- a/frontend/src/js/standard-query-editor/QueryEditorDropzone.tsx +++ b/frontend/src/js/standard-query-editor/QueryEditorDropzone.tsx @@ -50,7 +50,7 @@ interface Props { onDropNode: (node: StandardQueryNodeT) => void; onDropFile: (file: File) => void; onLoadPreviousQuery: (id: QueryIdT) => void; - onImportLines?: (lines: string[]) => void; + onImportLines?: (lines: string[], filename?: string) => void; } const QueryEditorDropzone = forwardRef( diff --git a/frontend/src/js/standard-query-editor/QueryGroup.tsx b/frontend/src/js/standard-query-editor/QueryGroup.tsx index ae8aeb4160..0b8a6ec7cb 100644 --- a/frontend/src/js/standard-query-editor/QueryGroup.tsx +++ b/frontend/src/js/standard-query-editor/QueryGroup.tsx @@ -53,7 +53,7 @@ interface PropsT { andIdx: number; onDropOrNode: (node: StandardQueryNodeT, andIdx: number) => void; onDropFile: (file: File, andIdx: number) => void; - onImportLines: (lines: string[], andIdx?: number) => void; + onImportLines: (lines: string[], filename?: string, andIdx?: number) => void; onDeleteNode: (andIdx: number, orIdx: number) => void; onEditClick: (andIdx: number, orIdx: number) => void; onExpandClick: (q: QueryT) => void; @@ -106,7 +106,8 @@ const QueryGroup = ({ [andIdx, onDropFile], ); const importLines = useCallback( - (lines: string[]) => onImportLines(lines, andIdx), + (lines: string[], filename?: string) => + onImportLines(lines, filename, andIdx), [andIdx, onImportLines], ); diff --git a/frontend/src/js/standard-query-editor/types.ts b/frontend/src/js/standard-query-editor/types.ts index 184fe511a9..60c4c96fc3 100644 --- a/frontend/src/js/standard-query-editor/types.ts +++ b/frontend/src/js/standard-query-editor/types.ts @@ -58,6 +58,7 @@ export interface DragContext { movedFromOrIdx?: number; deleteFromOtherField?: () => void; movedFromFieldName?: string; + rowPrefixFieldname?: string; } export interface DragItemQuery extends PreviousQueryQueryNodeType { diff --git a/frontend/src/js/ui-components/DropzoneWithFileInput.tsx b/frontend/src/js/ui-components/DropzoneWithFileInput.tsx index 76fe4ee9af..86d541dc73 100644 --- a/frontend/src/js/ui-components/DropzoneWithFileInput.tsx +++ b/frontend/src/js/ui-components/DropzoneWithFileInput.tsx @@ -70,7 +70,7 @@ interface PropsT { showImportButton?: boolean; importButtonOutside?: boolean; - onImportLines?: (lines: string[]) => void; + onImportLines?: (lines: string[], filename?: string) => void; importPlaceholder?: string; importDescription?: string; } @@ -111,8 +111,8 @@ const DropzoneWithFileInput = < const [importModalOpen, setImportModalOpen] = useState(false); - function onSubmitImport(lines: string[]) { - onImportLines?.(lines); + function onSubmitImport(lines: string[], filename?: string) { + onImportLines?.(lines, filename); } function onOpenFileDialog() { diff --git a/frontend/src/js/ui-components/ImportModal.tsx b/frontend/src/js/ui-components/ImportModal.tsx index 26cfe32d64..71aa081293 100644 --- a/frontend/src/js/ui-components/ImportModal.tsx +++ b/frontend/src/js/ui-components/ImportModal.tsx @@ -69,10 +69,11 @@ export const ImportModal = ({ description?: string; placeholder?: string; onClose: () => void; - onSubmit: (lines: string[]) => void; + onSubmit: (lines: string[], filename?: string) => void; }) => { const { t } = useTranslation(); const [textInput, setTextInput] = useState(""); + const [droppedFilename, setDroppedFilename] = useState(); const canReadClipboard = useCanReadClipboard(); const fileInputRef = useRef(null); @@ -84,7 +85,7 @@ export const ImportModal = ({ const lines = textInput.split("\n").map((line) => line.trim()); - onSubmit(lines); + onSubmit(lines, droppedFilename); onClose(); }; @@ -114,6 +115,7 @@ export const ImportModal = ({ const onSelectFile = async (file: File) => { const rows = await getUniqueFileRows(file); + setDroppedFilename(file.name); autoFormatAndSet(rows.join("\n")); }; diff --git a/frontend/src/js/ui-components/InputDate/CustomHeader.tsx b/frontend/src/js/ui-components/InputDate/CustomHeader.tsx index 83b0b594c0..8c3e45420b 100644 --- a/frontend/src/js/ui-components/InputDate/CustomHeader.tsx +++ b/frontend/src/js/ui-components/InputDate/CustomHeader.tsx @@ -5,10 +5,8 @@ import { } from "@fortawesome/free-solid-svg-icons"; import { useState } from "react"; import { ReactDatePickerCustomHeaderProps } from "react-datepicker"; -import { useSelector } from "react-redux"; import { SelectOptionT } from "../../api/types"; -import { StateT } from "../../app/reducers"; import IconButton from "../../button/IconButton"; import { TransparentButton } from "../../button/TransparentButton"; import { useMonthName, useMonthNames } from "../../common/helpers/dateHelper"; @@ -27,12 +25,20 @@ export const SelectMenuContainer = styled("div")` width: 100%; `; -export const OptionList = styled(List)` +export const MonthListContainer = styled(List)` display: grid; grid-template-columns: auto auto; gap: 5px; `; +export const YearListContainer = styled(List)` + display: flex; + flex-direction: column-reverse; + gap: 5px; + height: 200px; + overflow: auto; +`; + export const MonthYearLabel = styled("div")` font-weight: bold; cursor: pointer; @@ -46,32 +52,38 @@ export const MonthYearLabel = styled("div")` const SelectMenu = ({ date, + layout, options, onSelect, }: Pick & { options: SelectOptionT[]; + layout: "oneColumn" | "twoColumns"; onSelect: (n: number) => void; -}) => ( - - - - {options.map((option) => ( - onSelect(option.value as number)} - > - {option.label} - - ))} - - - -); +}) => { + const OptionList = + layout === "twoColumns" ? MonthListContainer : YearListContainer; + return ( + + + + {options.map((option) => ( + onSelect(option.value as number)} + > + {option.label} + + ))} + + + + ); +}; const YearMonthSelect = ({ date, @@ -81,20 +93,9 @@ const YearMonthSelect = ({ ReactDatePickerCustomHeaderProps, "date" | "changeYear" | "changeMonth" >) => { - const numLastYearsToShow = useSelector((state) => { - if (state.startup.config.observationPeriodStart) { - return ( - new Date().getFullYear() - - new Date(state.startup.config.observationPeriodStart).getFullYear() - ); - } else { - return 10; - } - }); - const yearOptions: SelectOptionT[] = [...Array(numLastYearsToShow).keys()] + const yearOptions: SelectOptionT[] = [...Array(100).keys()] .map((n) => new Date().getFullYear() - n) - .map((year) => ({ label: String(year), value: year })) - .reverse(); + .map((year) => ({ label: String(year), value: year })); const monthNames = useMonthNames(); const monthOptions: SelectOptionT[] = monthNames.map((month, i) => ({ @@ -102,7 +103,7 @@ const YearMonthSelect = ({ value: i, })); - const [yearSelectOpen, setYearSelectOpen] = useState(false); + const [yearSelectOpen, setYearSelectOpen] = useState(true); const [monthSelectOpen, setMonthSelectOpen] = useState(false); const handleClick = () => { if (yearSelectOpen || monthSelectOpen) { @@ -121,6 +122,7 @@ const YearMonthSelect = ({ {yearSelectOpen && ( { changeYear(year); @@ -132,6 +134,7 @@ const YearMonthSelect = ({ {monthSelectOpen && ( { changeMonth(month); diff --git a/frontend/src/js/ui-components/InputDate/InputDate.tsx b/frontend/src/js/ui-components/InputDate/InputDate.tsx index 00e57bc56b..6d035fc919 100644 --- a/frontend/src/js/ui-components/InputDate/InputDate.tsx +++ b/frontend/src/js/ui-components/InputDate/InputDate.tsx @@ -1,8 +1,11 @@ import styled from "@emotion/styled"; -import { createElement, forwardRef, useRef, useState } from "react"; +import { faCalendar } from "@fortawesome/free-regular-svg-icons"; +import { createElement, forwardRef, useRef } from "react"; import ReactDatePicker from "react-datepicker"; import "react-datepicker/dist/react-datepicker.css"; +import { mergeRefs } from "react-merge-refs"; +import IconButton from "../../button/IconButton"; import { formatDate, parseDate } from "../../common/helpers/dateHelper"; import BaseInput, { Props as BaseInputProps } from "../BaseInput"; @@ -19,11 +22,22 @@ const Root = styled("div")` } .react-datepicker-popper[data-placement^="bottom"] { padding-top: 4px; - transform: translate3d(0, 32px, 0) !important; } .react-datepicker-popper[data-placement^="top"] { padding-bottom: 0; - transform: translate3d(0, 32px, 0) !important; + } +`; + +const CalendarButton = styled(IconButton)` + position: absolute; + left: 0; + top: 0; + padding: 8px 10px; +`; + +const StyledBaseInput = styled(BaseInput)` + input { + padding-left: 28px; } `; @@ -45,28 +59,12 @@ type Props = Omit & { onCalendarSelect?: (val: string) => void; }; -// TODO: Remove this once we have solved -// - that the date picker overlays other fields in forms -const TEMPORARILY_DISABLED_DATE_PICKER = true; - -const InputDate = forwardRef( +const InputDate = forwardRef( ( - { - className, - value, - dateFormat, - onChange, - onCalendarSelect, - onFocus, - onBlur, - onClick, - ...props - }, + { className, value, dateFormat, onChange, onCalendarSelect, ...props }, ref, ) => { const datePickerRef = useRef(null); - const [hasFocus, setHasFocus] = useState(false); - const [focusBlocked, setFocusBlocked] = useState(false); return ( ( if (e.key === "Escape") datePickerRef.current?.setOpen(false); }} > - { onChange(val as string); }} - onFocus={(e) => { - if (focusBlocked) { - e.currentTarget.blur(); - setFocusBlocked(false); - } else { - onFocus?.(e); - setHasFocus(true); - datePickerRef.current?.setOpen(true); - } - }} - onBlur={(e) => { - onBlur?.(e); - setHasFocus(false); - }} - onClick={(e) => { - onClick?.(e); - if (hasFocus) { - datePickerRef.current?.setOpen(true); - } - }} inputProps={{ ...props?.inputProps, onKeyPress: (e) => { @@ -111,8 +88,12 @@ const InputDate = forwardRef( }, }} /> + datePickerRef.current?.setOpen(true)} + /> { if (!val) { @@ -122,7 +103,6 @@ const InputDate = forwardRef( const selectedDate = formatDate(val, dateFormat); onChange(selectedDate); onCalendarSelect?.(selectedDate); - setFocusBlocked(true); datePickerRef.current?.setOpen(false); }} onClickOutside={() => datePickerRef.current?.setOpen(false)} @@ -130,7 +110,16 @@ const InputDate = forwardRef( customInput={createElement(HiddenInput)} calendarContainer={StyledCalendar} calendarStartDay={1} - disabled={TEMPORARILY_DISABLED_DATE_PICKER} + popperProps={{ + modifiers: [ + { + name: "preventOverflow", + options: { + mainAxis: false, + }, + }, + ], + }} /> ); diff --git a/frontend/src/js/ui-components/InputDateRange.tsx b/frontend/src/js/ui-components/InputDateRange.tsx index e58a00745e..9327482ad1 100644 --- a/frontend/src/js/ui-components/InputDateRange.tsx +++ b/frontend/src/js/ui-components/InputDateRange.tsx @@ -2,6 +2,7 @@ import { css } from "@emotion/react"; import styled from "@emotion/styled"; import { faCalendar } from "@fortawesome/free-regular-svg-icons"; import { FC, ReactNode, createRef, useMemo } from "react"; +import ReactDatePicker from "react-datepicker"; import { useTranslation } from "react-i18next"; import { IndexPrefix } from "../common/components/IndexPrefix"; @@ -19,7 +20,6 @@ import InfoTooltip from "../tooltip/InfoTooltip"; import InputDate from "./InputDate/InputDate"; import Label from "./Label"; import Labeled from "./Labeled"; -import Optional from "./Optional"; const Root = styled("div")<{ center?: boolean }>` text-align: ${({ center }) => (center ? "center" : "left")}; @@ -87,7 +87,6 @@ interface PropsT { center?: boolean; autoFocus?: boolean; tooltip?: string; - optional?: boolean; value: DateStringMinMax; onChange: (value: DateStringMinMax) => void; } @@ -114,7 +113,6 @@ const InputDateRange: FC = ({ labelSuffix, value, onChange, - optional, tooltip, }) => { const { t } = useTranslation(); @@ -166,7 +164,7 @@ const InputDateRange: FC = ({ const min = getDisplayDate("min", value, displayDateFormat); const max = getDisplayDate("max", value, displayDateFormat); - const maxRef = createRef(); + const maxRef = createRef(); const isMinValid = exists(value.min && parseDate(min, displayDateFormat)); const isMaxValid = exists(value.max && parseDate(max, displayDateFormat)); @@ -178,7 +176,6 @@ const InputDateRange: FC = ({ {exists(indexPrefix) && # {indexPrefix}} - {optional && } {label} = ({ {labelSuffix && labelSuffix} ); - }, [t, label, labelSuffix, large, optional, tooltip, indexPrefix]); + }, [t, label, labelSuffix, large, tooltip, indexPrefix]); return ( @@ -213,7 +210,7 @@ const InputDateRange: FC = ({ onChange={(val) => onChangeRaw("min", val as string, displayDateFormat) } - onCalendarSelect={() => maxRef.current?.focus()} + onCalendarSelect={() => maxRef.current?.setOpen(true)} onBlur={(e) => applyDate("min", e.target.value, displayDateFormat)} inputProps={{ autoFocus, diff --git a/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.tsx b/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.tsx index 4fefe99913..463358b7c6 100644 --- a/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.tsx +++ b/frontend/src/js/ui-components/InputMultiSelect/InputMultiSelect.tsx @@ -10,6 +10,7 @@ import { useTranslation } from "react-i18next"; import type { SelectOptionT } from "../../api/types"; import { exists } from "../../common/helpers/exists"; +import { getFileRows } from "../../common/helpers/fileHelper"; import { useDebounce } from "../../common/helpers/useDebounce"; import FaIcon from "../../icon/FaIcon"; import InfoTooltip from "../../tooltip/InfoTooltip"; @@ -363,7 +364,11 @@ const InputMultiSelect = ({ ...selectedItems, ...optionsWithoutCreatable, ]); - setInputValue(""); + setTimeout(() => { + // To let the above state change propagage + // before triggering another "load more" request + setInputValue(""); + }, 100); } }} /> @@ -410,7 +415,12 @@ const InputMultiSelect = ({ {!hasTooManyValues && !onResolve && Select} {!hasTooManyValues && !!onResolve && ( {}} + onDrop={async (item) => { + if (item.files) { + const rows = await getFileRows(item.files[0]); + onResolve(rows); + } + }} disableClick tight importButtonOutside diff --git a/frontend/src/js/ui-components/InputPlain/InputPlain.tsx b/frontend/src/js/ui-components/InputPlain/InputPlain.tsx index 2fb4df3375..7bedb969b8 100644 --- a/frontend/src/js/ui-components/InputPlain/InputPlain.tsx +++ b/frontend/src/js/ui-components/InputPlain/InputPlain.tsx @@ -17,7 +17,6 @@ const SxBaseInput = styled(BaseInput)<{ fullWidth?: boolean }>` interface Props { label: string; indexPrefix?: number; - optional?: boolean; inputType?: string; money?: boolean; className?: string; @@ -44,7 +43,6 @@ const InputPlain = forwardRef( large, indexPrefix, tooltip, - optional, inputType = "text", money, placeholder, @@ -65,7 +63,6 @@ const InputPlain = forwardRef( largeLabel={large} indexPrefix={indexPrefix} tooltip={tooltip} - optional={optional} > void; sortOptions?: (a: SelectOptionT, b: SelectOptionT, query: string) => number; }) => { @@ -347,7 +345,6 @@ const InputSelect = ({ } indexPrefix={indexPrefix} className={className} - optional={optional} > {Select} diff --git a/frontend/src/js/ui-components/InputTextarea/InputTextarea.tsx b/frontend/src/js/ui-components/InputTextarea/InputTextarea.tsx index 80980be2c6..ba97a43f0c 100644 --- a/frontend/src/js/ui-components/InputTextarea/InputTextarea.tsx +++ b/frontend/src/js/ui-components/InputTextarea/InputTextarea.tsx @@ -35,7 +35,6 @@ interface OtherProps { fullWidth?: boolean; indexPrefix?: number; tooltip?: string; - optional?: boolean; onChange: (value: string | null) => void; } @@ -49,16 +48,7 @@ export const InputTextarea = forwardRef< InputTextareaProps & OtherProps >( ( - { - label, - className, - fullWidth, - indexPrefix, - tooltip, - optional, - onChange, - ...props - }, + { label, className, fullWidth, indexPrefix, tooltip, onChange, ...props }, ref, ) => { const { t } = useTranslation(); @@ -70,7 +60,6 @@ export const InputTextarea = forwardRef< className={className} fullWidth tooltip={tooltip} - optional={optional} >