diff --git a/autodoc/src/main/java/com/bakdata/conquery/Constants.java b/autodoc/src/main/java/com/bakdata/conquery/Constants.java index 405dc6d19c..30fc07cd0a 100644 --- a/autodoc/src/main/java/com/bakdata/conquery/Constants.java +++ b/autodoc/src/main/java/com/bakdata/conquery/Constants.java @@ -31,10 +31,6 @@ import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.apiv1.query.concept.filter.ValidityDateContainer; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRef; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRefCollection; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.model.Base; import com.bakdata.conquery.model.Group; import com.bakdata.conquery.models.common.Range; @@ -182,8 +178,6 @@ public class Constants { public static final String JSON_CREATOR = JsonCreator.class.getName(); public static final String CPS_TYPE = CPSType.class.getName(); - public static final Set ID_REF = Set.of(NsIdRef.class.getName(), MetaIdRef.class.getName()); - public static final Set ID_REF_COL = Set.of(NsIdRefCollection.class.getName(), MetaIdRefCollection.class.getName()); public static final String JSON_IGNORE = JsonIgnore.class.getName(); public static final String JSON_BACK_REFERENCE = JsonBackReference.class.getName(); public static final String PATH = Path.class.getName(); diff --git a/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java b/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java index ea99912a73..7a36c1973b 100644 --- a/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java +++ b/autodoc/src/main/java/com/bakdata/conquery/handler/GroupHandler.java @@ -267,16 +267,7 @@ private void handleField(ClassInfo currentType, FieldInfo field) throws IOExcept final TypeSignature typeSignature = field.getTypeSignatureOrTypeDescriptor(); final Ctx ctx = new Ctx().withField(field); - final String type; - if (ID_REF.stream().anyMatch(field::hasAnnotation)) { - type = ID_OF + printType(ctx.withIdOf(true), typeSignature); - } - else if (ID_REF_COL.stream().anyMatch(field::hasAnnotation)) { - type = LIST_OF + ID_OF + StringUtils.removeStart(printType(ctx.withIdOf(true), typeSignature), LIST_OF); - } - else { - type = printType(ctx, typeSignature); - } + final String type = printType(ctx, typeSignature); out.table( editLink(introspec), diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java b/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java index 96e6205e84..3c034c2d1e 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/FilterTemplate.java @@ -6,11 +6,10 @@ import com.bakdata.conquery.apiv1.frontend.FrontendValue; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.config.IndexConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.SearchIndexId; import com.bakdata.conquery.models.index.FrontendValueIndex; import com.bakdata.conquery.models.index.FrontendValueIndexKey; @@ -41,9 +40,10 @@ @CPSType(id = "CSV_TEMPLATE", base = SearchIndex.class) public class FilterTemplate extends IdentifiableImpl implements Searchable, SearchIndex { + private static final long serialVersionUID = 1L; + @NotNull - @NsIdRef - private Dataset dataset; + private DatasetId dataset; @NotEmpty private final String name; @@ -106,6 +106,6 @@ public TrieSearch createTrieSearch(IndexConfig config) throws Ind @Override public SearchIndexId createId() { - return new SearchIndexId(dataset.getId(), name); + return new SearchIndexId(dataset, name); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java index 069a2ddfe6..52a007fd55 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java @@ -19,6 +19,12 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; +import jakarta.inject.Inject; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.validation.Validator; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.apiv1.execution.ExecutionStatus; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; @@ -52,16 +58,16 @@ import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; @@ -82,12 +88,6 @@ import com.bakdata.conquery.util.QueryUtils; import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.validation.Validator; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.UriBuilder; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -108,17 +108,17 @@ public class QueryProcessor { public Stream getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) { - final Collection allQueries = storage.getAllExecutions(); + final Stream allQueries = storage.getAllExecutions(); return getQueriesFiltered(dataset.getId(), RequestAwareUriBuilder.fromRequest(req), subject, allQueries, allProviders); } - public Stream getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Collection allQueries, boolean allProviders) { + public Stream getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Stream allQueries, boolean allProviders) { - return allQueries.stream() + return allQueries // The following only checks the dataset, under which the query was submitted, but a query can target more that // one dataset. - .filter(q -> q.getDataset().getId().equals(datasetId)) + .filter(q -> q.getDataset().equals(datasetId)) // to exclude subtypes from somewhere else .filter(QueryProcessor::canFrontendRender) .filter(Predicate.not(ManagedExecution::isSystem)) @@ -200,10 +200,10 @@ public static boolean isFrontendStructure(CQElement root) { /** * Cancel a running query: Sending cancellation to shards, which will cause them to stop executing them, results are not sent back, and incoming results will be discarded. */ - public void cancel(Subject subject, Dataset dataset, ManagedExecution query) { + public void cancel(Subject subject, ManagedExecution query) { // Does not make sense to cancel a query that isn't running. - ExecutionManager executionManager = datasetRegistry.get(dataset.getId()).getExecutionManager(); + ExecutionManager executionManager = datasetRegistry.get(query.getDataset()).getExecutionManager(); if (!query.getState().equals(ExecutionState.RUNNING)) { return; } @@ -221,20 +221,19 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc if (patch.getGroups() != null && !patch.getGroups().isEmpty()) { - for (ManagedExecutionId managedExecutionId : execution.getSubmitted().collectRequiredQueries()) { - final ManagedExecution subQuery = storage.getExecution(managedExecutionId); + for (ManagedExecutionId subExecutionId : execution.getSubmitted().collectRequiredQueries()) { - if (!subject.isPermitted(subQuery, Ability.READ)) { - log.warn("Not sharing {} as User {} is not allowed to see it themselves.", subQuery.getId(), subject); + if (!subject.isPermitted(subExecutionId, Ability.READ)) { + log.warn("Not sharing {} as User {} is not allowed to see it themselves.", subExecutionId, subject); continue; } - final ConqueryPermission canReadQuery = subQuery.createPermission(Set.of(Ability.READ)); + final ConqueryPermission canReadQuery = subExecutionId.createPermission(Set.of(Ability.READ)); final Set groupsToShareWith = new HashSet<>(patch.getGroups()); // Find all groups the query is already shared with, so we do not remove them, as patch is absolute - for (Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { if (groupsToShareWith.contains(group.getId())) { continue; } @@ -250,7 +249,7 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc .groups(new ArrayList<>(groupsToShareWith)) .build(); - patchQuery(subject, subQuery, sharePatch); + patchQuery(subject, subExecutionId.resolve(), sharePatch); } } @@ -261,18 +260,17 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc public void reexecute(Subject subject, ManagedExecution query) { log.info("User[{}] reexecuted Query[{}]", subject.getId(), query); - ExecutionManager executionManager = datasetRegistry.get(query.getDataset().getId()).getExecutionManager(); if (!query.getState().equals(ExecutionState.RUNNING)) { final Namespace namespace = query.getNamespace(); - namespace.getExecutionManager().execute(namespace, query, config); + namespace.getExecutionManager().execute(query, config); } } public void deleteQuery(Subject subject, ManagedExecution execution) { log.info("User[{}] deleted Query[{}]", subject.getId(), execution.getId()); - datasetRegistry.get(execution.getDataset().getId()) + datasetRegistry.get(execution.getDataset()) .getExecutionManager() // Don't go over execution#getExecutionManager() as that's only set when query is initialized .clearQueryResults(execution); @@ -280,12 +278,12 @@ public void deleteQuery(Subject subject, ManagedExecution execution) { } public ExecutionState awaitDone(ManagedExecution query, int time, TimeUnit unit) { - final Namespace namespace = datasetRegistry.get(query.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(query.getDataset()); return namespace.getExecutionManager().awaitDone(query, time, unit); } public FullExecutionStatus getQueryFullStatus(ManagedExecution query, Subject subject, UriBuilder url, Boolean allProviders) { - final Namespace namespace = datasetRegistry.get(query.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(query.getDataset()); query.initExecutable(config); @@ -326,7 +324,7 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext execution = ((ManagedQuery) namespace .getExecutionManager() - .createExecution(query, subject.getUser(), namespace, false)); + .createExecution(query, subject.getId(), namespace, false)); execution.setLastResultCount((long) statistic.getResolved().size()); @@ -342,7 +340,7 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext /** * Create and submit {@link EntityPreviewForm} for subject on to extract sources for entity, and extract some additional infos to be used as infocard. */ - public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List sources, Dataset dataset, Range dateRange) { + public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List sources, Dataset dataset, Range dateRange) { subject.authorize(dataset, Ability.ENTITY_PREVIEW); subject.authorize(dataset, Ability.PRESERVE_ID); @@ -438,7 +436,7 @@ public ManagedExecution postQuery(Dataset dataset, QueryDescription query, Subje } // Execute the query - return executionManager.runQuery(namespace, query, subject.getUser(), config, system); + return executionManager.runQuery(namespace, query, subject.getId(), config, system); } /** @@ -460,8 +458,8 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe // If SecondaryIds differ from selected and prior, we cannot reuse them. if (query instanceof SecondaryIdQuery) { - final SecondaryIdDescription selectedSecondaryId = ((SecondaryIdQuery) query).getSecondaryId(); - final SecondaryIdDescription reusedSecondaryId = ((SecondaryIdQuery) execution.getSubmitted()).getSecondaryId(); + final SecondaryIdDescriptionId selectedSecondaryId = ((SecondaryIdQuery) query).getSecondaryId(); + final SecondaryIdDescriptionId reusedSecondaryId = ((SecondaryIdQuery) execution.getSubmitted()).getSecondaryId(); if (!selectedSecondaryId.equals(reusedSecondaryId)) { return null; @@ -472,7 +470,7 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe if (!user.isOwner(execution)) { final ManagedExecution newExecution = - executionManager.createExecution(execution.getSubmitted(), user, namespace, false); + executionManager.createExecution(execution.getSubmitted(), user.getId(), namespace, false); newExecution.setLabel(execution.getLabel()); newExecution.setTags(execution.getTags().clone()); storage.updateExecution(newExecution); @@ -487,7 +485,7 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe log.trace("Re-executing Query {}", execution); - executionManager.execute(namespace, execution, config); + executionManager.execute(execution, config); return execution; diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java index 2984e904e0..bcdcfa1b1b 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/execution/FullExecutionStatus.java @@ -3,14 +3,13 @@ import java.util.Collection; import java.util.List; import java.util.Set; - import javax.annotation.Nullable; import com.bakdata.conquery.apiv1.query.QueryDescription; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.error.ConqueryErrorInfo; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.ColumnDescriptor; import lombok.Data; import lombok.EqualsAndHashCode; @@ -65,6 +64,5 @@ public class FullExecutionStatus extends ExecutionStatus { /** * Possible {@link SecondaryIdDescription}s available, of {@link com.bakdata.conquery.models.datasets.concepts.Concept}s used in this Query. */ - @NsIdRefCollection - private Set availableSecondaryIds; + private Set availableSecondaryIds; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java index e2e0141abf..13232326a8 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/ExternalForm.java @@ -16,14 +16,14 @@ import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.cps.SubTyped; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.frontendconfiguration.FormScanner; import com.bakdata.conquery.models.forms.frontendconfiguration.FormType; import com.bakdata.conquery.models.forms.managed.ExternalExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -133,7 +133,7 @@ public String getFormType() { } @Override - public ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ExternalExecution(this, user, submittedDataset, storage, datasetRegistry); } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java index 71fbff1ada..d82687fc10 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/FormConfigAPI.java @@ -5,9 +5,9 @@ import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.forms.configs.FormConfig; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.util.VariableDefaultValue; import com.fasterxml.jackson.databind.JsonNode; import lombok.AllArgsConstructor; @@ -37,7 +37,7 @@ public class FormConfigAPI { @VariableDefaultValue @Builder.Default private LocalDateTime creationTime = LocalDateTime.now(); - public FormConfig intern(User owner, DatasetId dataset) { + public FormConfig intern(UserId owner, DatasetId dataset) { FormConfig intern = new FormConfig(); intern.setFormId(formId); intern.setFormType(formType); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java index 4187562315..22b42a922c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/ExportForm.java @@ -24,15 +24,14 @@ import com.bakdata.conquery.internationalization.ExportFormC10n; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.forms.util.ResolutionShortNames; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; @@ -92,6 +91,30 @@ public class ExportForm extends Form implements InternalForm { @JsonIgnore @EqualsAndHashCode.Exclude private List resolvedResolutions; + + /** + * Maps the given resolution to a fitting alignment. It tries to use the alignment which was given as a hint. + * If the alignment does not fit to a resolution (resolution is finer than the alignment), the first alignment that + * this resolution supports is chosen (see the alignment order in {@link Resolution}) + * @param resolutions The temporal resolutions for which sub queries should be generated per entity + * @param alignmentHint The preferred calendar alignment on which the sub queries of each resolution should be aligned. + * Note that this alignment is chosen when a resolution is equal or coarser. + * @return The given resolutions mapped to a fitting calendar alignment. + */ + public static List getResolutionAlignmentMap(List resolutions, Alignment alignmentHint) { + + return resolutions.stream() + .map(r -> ResolutionAndAlignment.of(r, getFittingAlignment(alignmentHint, r))) + .collect(Collectors.toList()); + } + + private static Alignment getFittingAlignment(Alignment alignmentHint, Resolution resolution) { + if(resolution.isAlignmentSupported(alignmentHint) ) { + return alignmentHint; + } + return resolution.getDefaultAlignment(); + } + @Override public void visit(Consumer visitor) { visitor.accept(this); @@ -99,7 +122,6 @@ public void visit(Consumer visitor) { features.forEach(visitor); } - @Override public Map createSubQueries() { return Map.of( @@ -108,6 +130,16 @@ public Map createSubQueries() { ); } + @Override + public String getLocalizedTypeLabel() { + return C10N.get(ExportFormC10n.class, I18n.LOCALE.get()).getType(); + } + + @Override + public ManagedInternalForm toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); + } + @Override public Set collectRequiredQueries() { if (queryGroupId == null) { @@ -120,7 +152,7 @@ public Set collectRequiredQueries() { @Override public void resolve(QueryResolveContext context) { if(queryGroupId != null) { - queryGroup = (ManagedQuery) context.getStorage().getExecution(queryGroupId); + queryGroup = (ManagedQuery) queryGroupId.resolve(); prerequisite = queryGroup.getQuery(); } else { @@ -150,33 +182,19 @@ public void resolve(QueryResolveContext context) { } } - @Override - public String getLocalizedTypeLabel() { - return C10N.get(ExportFormC10n.class, I18n.LOCALE.get()).getType(); - } - - /** - * Maps the given resolution to a fitting alignment. It tries to use the alignment which was given as a hint. - * If the alignment does not fit to a resolution (resolution is finer than the alignment), the first alignment that - * this resolution supports is chosen (see the alignment order in {@link Resolution}) - * @param resolutions The temporal resolutions for which sub queries should be generated per entity - * @param alignmentHint The preferred calendar alignment on which the sub queries of each resolution should be aligned. - * Note that this alignment is chosen when a resolution is equal or coarser. - * @return The given resolutions mapped to a fitting calendar alignment. + * Classes that can be used as Features in ExportForm, having default-exists, are triggered this way. */ - public static List getResolutionAlignmentMap(List resolutions, Alignment alignmentHint) { - - return resolutions.stream() - .map(r -> ResolutionAndAlignment.of(r, getFittingAlignment(alignmentHint, r))) - .collect(Collectors.toList()); - } - - private static Alignment getFittingAlignment(Alignment alignmentHint, Resolution resolution) { - if(resolution.isAlignmentSupported(alignmentHint) ) { - return alignmentHint; + public static interface DefaultSelectSettable { + public static void enable(List features) { + for (CQElement feature : features) { + if(feature instanceof DefaultSelectSettable){ + ((DefaultSelectSettable) feature).setDefaultExists(); + } + } } - return resolution.getDefaultAlignment(); + + void setDefaultExists(); } /** @@ -197,25 +215,4 @@ public static ResolutionAndAlignment of(Resolution resolution, Alignment alignme return new ResolutionAndAlignment(resolution, alignment); } } - - /** - * Classes that can be used as Features in ExportForm, having default-exists, are triggered this way. - */ - public static interface DefaultSelectSettable { - public static void enable(List features) { - for (CQElement feature : features) { - if(feature instanceof DefaultSelectSettable){ - ((DefaultSelectSettable) feature).setDefaultExists(); - } - } - } - - void setDefaultExists(); - } - - - @Override - public ManagedForm toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { - return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java index 168087f4b3..83a033cc72 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/forms/export_form/FullExportForm.java @@ -23,12 +23,12 @@ import com.bakdata.conquery.internationalization.ExportFormC10n; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; @@ -126,7 +126,7 @@ public String getLocalizedTypeLabel() { @Override - public ManagedInternalForm toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedInternalForm toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); } } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java index 9769140747..0ee8f35b54 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/frontend/FrontendPreviewConfig.java @@ -3,10 +3,8 @@ import java.util.Collection; import java.util.List; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.filters.Filter; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Data; @@ -22,12 +20,10 @@ public static class Labelled { @JsonProperty("default") private final Collection defaultConnectors; - @NsIdRefCollection - private final List> searchFilters; + private final List searchFilters; /** * Search concept needs to be parent of searchFilters, so frontend can resolve the filters. */ - @NsIdRef - private final Concept searchConcept; + private final ConceptId searchConcept; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java index e81cd67dc9..7255213f29 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/Query.java @@ -6,11 +6,12 @@ import java.util.stream.Stream; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.QueryResolveContext; @@ -41,7 +42,7 @@ public Set collectRequiredQueries() { public abstract List getResultInfos(); @Override - public ManagedQuery toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedQuery toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ManagedQuery(this, user, submittedDataset, storage, datasetRegistry); } @@ -58,7 +59,7 @@ public CQElement getReusableComponents() { * * @param results * @return the number of results in the result List. - * @see ManagedExecution#finish(ExecutionState) for how it's used. + * @see ManagedExecution#finish(ExecutionState, ExecutionManager) for how it's used. */ public long countResults(Stream results) { return results.map(EntityResult::listResultLines) diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java index 3255482131..c457bde3da 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/QueryDescription.java @@ -9,14 +9,16 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.RequiredEntities; @@ -37,7 +39,7 @@ public interface QueryDescription extends Visitable { * Transforms the submitted query to an {@link ManagedExecution}. * In this step some external dependencies are resolve (such as {@link CQExternal}). * However, steps that require add or manipulates queries programmatically based on the submitted query - * should be done in an extra init procedure (see {@link ManagedExecution#doInitExecutable()}. + * should be done in an extra init procedure (see {@link ManagedExecution#doInitExecutable(Namespace)}. * These steps are executed right before the execution of the query and not necessary in this creation phase. * * @param user @@ -45,7 +47,7 @@ public interface QueryDescription extends Visitable { * @param storage * @return */ - ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry); + ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry); Set collectRequiredQueries(); @@ -81,15 +83,16 @@ static void authorizeQuery(QueryDescription queryDescription, Subject subject, D // Generate DatasetPermissions final Set datasets = nsIdCollector.getIdentifiables().stream() .map(NamespacedIdentifiable::getDataset) + .map(Id::resolve) .collect(Collectors.toSet()); subject.authorize(datasets, Ability.READ); // Generate ConceptPermissions - final Set concepts = nsIdCollector.getIdentifiables().stream() + final Set> concepts = nsIdCollector.getIdentifiables().stream() .filter(ConceptElement.class::isInstance) .map(ConceptElement.class::cast) - .map(ConceptElement::getConcept) + .>map(ConceptElement::getConcept) .collect(Collectors.toSet()); subject.authorize(concepts, Ability.READ); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java index acced1a092..ed18c4234c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/SecondaryIdQuery.java @@ -5,18 +5,21 @@ import java.util.List; import java.util.Set; import java.util.function.Consumer; +import java.util.stream.Collectors; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.error.ConqueryError; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.query.DateAggregationMode; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; @@ -28,7 +31,6 @@ import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.resultinfo.SecondaryIdResultInfo; import com.fasterxml.jackson.annotation.JsonView; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -43,29 +45,31 @@ public class SecondaryIdQuery extends Query { protected DateAggregationMode dateAggregationMode = DateAggregationMode.MERGE; @NotNull private CQElement root; - @NsIdRef @NotNull - private SecondaryIdDescription secondaryId; + private SecondaryIdDescriptionId secondaryId; /** * @apiNote not using {@link ConceptQuery} directly in the API-spec simplifies the API. */ @JsonView(View.InternalCommunication.class) private ConceptQuery query; - @NsIdRefCollection @JsonView(View.InternalCommunication.class) - private Set withSecondaryId; + private Set withSecondaryId; - @NsIdRefCollection @JsonView(View.InternalCommunication.class) - private Set withoutSecondaryId; + private Set withoutSecondaryId; @Override public SecondaryIdQueryPlan createQueryPlan(QueryPlanContext context) { - final ConceptQueryPlan queryPlan = query.createQueryPlan(context.withSelectedSecondaryId(secondaryId)); - - return new SecondaryIdQueryPlan(query, context, secondaryId, withSecondaryId, withoutSecondaryId, queryPlan, context.getSecondaryIdSubPlanRetention()); + final SecondaryIdDescription secondaryIdDescription = secondaryId.resolve(); + final ConceptQueryPlan queryPlan = query.createQueryPlan(context.withSelectedSecondaryId(secondaryIdDescription)); + + return new SecondaryIdQueryPlan(query, context, secondaryIdDescription, withSecondaryId.stream() + .map(ColumnId::resolve) + .collect(Collectors.toSet()), withoutSecondaryId.stream() + .map(TableId::resolve) + .collect(Collectors.toSet()), queryPlan, context.getSecondaryIdSubPlanRetention()); } @Override @@ -98,21 +102,19 @@ public void resolve(final QueryResolveContext context) { query.visit(queryElement -> { // We cannot check for CQExternal here and add the ALL_IDS Table because it is not serializable at the moment - if (!(queryElement instanceof CQConcept)) { + if (!(queryElement instanceof CQConcept concept)) { return; } - final CQConcept concept = (CQConcept) queryElement; - for (CQTable connector : concept.getTables()) { - final Table table = connector.getConnector().getTable(); + final Table table = connector.getConnector().resolve().getResolvedTable(); final Column secondaryIdColumn = table.findSecondaryIdColumn(secondaryId); if (secondaryIdColumn != null && !concept.isExcludeFromSecondaryId()) { - withSecondaryId.add(secondaryIdColumn); + withSecondaryId.add(secondaryIdColumn.getId()); } else { - withoutSecondaryId.add(table); + withoutSecondaryId.add(table.getId()); } } }); @@ -127,7 +129,7 @@ public void resolve(final QueryResolveContext context) { public List getResultInfos() { final List resultInfos = new ArrayList<>(); - resultInfos.add(new SecondaryIdResultInfo(secondaryId)); + resultInfos.add(new SecondaryIdResultInfo(secondaryId.resolve())); resultInfos.addAll(query.getResultInfos()); diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java index 18176a428d..236e762860 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java @@ -8,10 +8,14 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.OptionalInt; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.stream.Collectors; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.ResultHeaders; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; @@ -19,7 +23,6 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefKeys; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.common.daterange.CDateRange; @@ -28,7 +31,10 @@ import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.DateAggregationMode; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; @@ -45,9 +51,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonView; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; @@ -96,14 +99,13 @@ public class TableExportQuery extends Query { * - SecondaryIds are collected into a Column per SecondaryId * - The remaining columns are arbitrarily ordered, but usually grouped by their source table. */ - @NsIdRefKeys @JsonView(View.InternalCommunication.class) - private Map positions; + private Map positions; @JsonIgnore - private Set conceptColumns; + private Set conceptColumns; @JsonIgnore - private Map secondaryIdPositions; + private Map secondaryIdPositions; @Override @@ -150,68 +152,72 @@ public void resolve(QueryResolveContext context) { .filter(Objects::nonNull) .collect(Collectors.toSet()); - // We need to know if a column is a concept column so we can prioritize it if it is also a SecondaryId + // We need to know if a column is a concept column, so we can prioritize it, if it is also a SecondaryId conceptColumns = tables.stream() - .map(CQConcept::getTables) - .flatMap(Collection::stream) - .map(CQTable::getConnector) - .map(Connector::getColumn) - .filter(Objects::nonNull) - .collect(Collectors.toSet()); + .map(CQConcept::getTables) + .flatMap(Collection::stream) + .map(CQTable::getConnector) + .map(ConnectorId::resolve) + .map(Connector::getColumn) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); positions = calculateColumnPositions(currentPosition, tables, secondaryIdPositions, conceptColumns, validityDates); } - @Override - public List getResultInfos() { - return createResultInfos(conceptColumns); - } - - private Map calculateSecondaryIdPositions(AtomicInteger currentPosition) { - final Map secondaryIdPositions = new HashMap<>(); + private Map calculateSecondaryIdPositions(AtomicInteger currentPosition) { + final Map secondaryIdPositions = new HashMap<>(); // SecondaryIds are pulled to the front and grouped over all tables tables.stream() .flatMap(con -> con.getTables().stream()) - .flatMap(table -> Arrays.stream(table.getConnector().getTable().getColumns())) + .flatMap(table -> Arrays.stream(table.getConnector().resolve().getResolvedTable().getColumns())) .map(Column::getSecondaryId) .filter(Objects::nonNull) + .map(SecondaryIdDescriptionId::resolve) .distinct() .sorted(Comparator.comparing(SecondaryIdDescription::getLabel)) // Using for each and not a collector allows us to guarantee sorted insertion. - .forEach(secondaryId -> secondaryIdPositions.put(secondaryId, currentPosition.getAndIncrement())); + .forEach(secondaryId -> secondaryIdPositions.put(secondaryId.getId(), currentPosition.getAndIncrement())); return secondaryIdPositions; } - private static Map calculateColumnPositions(AtomicInteger currentPosition, List tables, Map secondaryIdPositions, Set conceptColumns, Set validityDates) { - final Map positions = new HashMap<>(); + private static Map calculateColumnPositions( + AtomicInteger currentPosition, + List tables, + Map secondaryIdPositions, + Collection conceptColumns, + Collection validityDates + ) { + final Map positions = new HashMap<>(); for (CQConcept concept : tables) { for (CQTable table : concept.getTables()) { // Set column positions, set SecondaryId positions to precomputed ones. - for (Column column : table.getConnector().getTable().getColumns()) { + for (Column column : table.getConnector().resolve().getResolvedTable().getColumns()) { // ValidityDates are handled separately in column=0 if (validityDates.stream().anyMatch(vd -> vd.containsColumn(column))) { continue; } - if (positions.containsKey(column)) { + final ColumnId columnId = column.getId(); + if (positions.containsKey(columnId)) { continue; } // We want to have ConceptColumns separate here. - if (column.getSecondaryId() != null && !conceptColumns.contains(column)) { - positions.putIfAbsent(column, secondaryIdPositions.get(column.getSecondaryId())); + if (column.getSecondaryId() != null && !conceptColumns.contains(column.getId())) { + positions.putIfAbsent(columnId, secondaryIdPositions.get(column.getSecondaryId())); continue; } - positions.put(column, currentPosition.getAndIncrement()); + positions.put(columnId, currentPosition.getAndIncrement()); } } } @@ -219,9 +225,18 @@ private static Map calculateColumnPositions(AtomicInteger curre return positions; } - private List createResultInfos(Set conceptColumns) { + @Override + public List getResultInfos() { + return createResultInfos(conceptColumns); + } + + private List createResultInfos(Set conceptColumns) { - final int size = positions.values().stream().mapToInt(i -> i).max().getAsInt() + 1; + OptionalInt max = positions.values().stream().mapToInt(i -> i).max(); + if (max.isEmpty()) { + throw new IllegalStateException("Unable to determine maximum position"); + } + final int size = max.getAsInt() + 1; final ResultInfo[] infos = new ResultInfo[size]; @@ -229,34 +244,37 @@ private List createResultInfos(Set conceptColumns) { infos[1] = ResultHeaders.sourceInfo(); - for (Map.Entry e : secondaryIdPositions.entrySet()) { - final SecondaryIdDescription desc = e.getKey(); + for (Map.Entry e : secondaryIdPositions.entrySet()) { + final SecondaryIdDescriptionId desc = e.getKey(); final Integer pos = e.getValue(); - infos[pos] = new SecondaryIdResultInfo(desc); + infos[pos] = new SecondaryIdResultInfo(desc.resolve()); } final Map> connectorColumns = tables.stream() .flatMap(con -> con.getTables().stream()) - .filter(tbl -> tbl.getConnector().getColumn() != null) - .collect(Collectors.toMap(tbl -> tbl.getConnector().getColumn(), tbl -> tbl.getConnector().getConcept())); + .map(CQTable::getConnector) + .map(ConnectorId::resolve) + .filter(con -> con.getColumn() != null) + .collect(Collectors.toMap(con -> con.getColumn().resolve(), Connector::getConcept)); - for (Map.Entry entry : positions.entrySet()) { + for (Map.Entry entry : positions.entrySet()) { final int position = entry.getValue(); - final Column column = entry.getKey(); + ColumnId columnId = entry.getKey(); + final Column column = columnId.resolve(); - if(position == 0) { + if (position == 0) { continue; } // SecondaryIds and date columns are pulled to the front, thus already covered. - if (column.getSecondaryId() != null && !conceptColumns.contains(column)) { - infos[secondaryIdPositions.get(column.getSecondaryId())].addSemantics(new SemanticType.ColumnT(column)); + if (column.getSecondaryId() != null && !conceptColumns.contains(columnId)) { + infos[secondaryIdPositions.get(column.getSecondaryId())].addSemantics(new SemanticType.ColumnT(columnId)); continue; } @@ -268,14 +286,14 @@ private List createResultInfos(Set conceptColumns) { columnResultInfo = new ColumnResultInfo(column, ResultType.Primitive.STRING, column.getDescription(), isRawConceptValues() ? null : concept); // Columns that are used to build concepts are marked as ConceptColumn. - columnResultInfo.addSemantics(new SemanticType.ConceptColumnT(concept)); + columnResultInfo.addSemantics(new SemanticType.ConceptColumnT(concept.getId())); infos[position] = columnResultInfo; } else { // If it's not a connector column, we just link to the source column. columnResultInfo = new ColumnResultInfo(column, ResultType.resolveResultType(column.getType()), column.getDescription(), null); - columnResultInfo.addSemantics(new SemanticType.ColumnT(column)); + columnResultInfo.addSemantics(new SemanticType.ColumnT(column.getId())); } infos[position] = columnResultInfo; diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java index f426a8c8e8..8da877e7ad 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/filter/CQTable.java @@ -4,24 +4,22 @@ import java.util.Collections; import java.util.List; import java.util.Objects; - import javax.annotation.CheckForNull; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorSelectId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; @@ -37,23 +35,21 @@ public class CQTable { private List> filters = Collections.emptyList(); @NotNull - @NsIdRefCollection - private List selects = new ArrayList<>(); + private List selects = new ArrayList<>(); private boolean excludeFromTimeAggregation; @@ -90,15 +93,16 @@ public class CQConcept extends CQElement implements NamespacedIdentifiableHoldin public static CQConcept forSelect(Select select) { final CQConcept cqConcept = new CQConcept(); - cqConcept.setElements(List.of(select.getHolder().findConcept())); + // TODO transform to use only ids here + cqConcept.setElements(List.of(select.getHolder().findConcept().getId())); if (select.getHolder() instanceof Connector) { final CQTable table = new CQTable(); cqConcept.setTables(List.of(table)); - table.setConnector(((Connector) select.getHolder())); + table.setConnector(((Connector) select.getHolder()).getId()); - table.setSelects(List.of(select)); + table.setSelects(List.of((ConnectorSelectId) select.getId())); table.setConcept(cqConcept); } else { @@ -106,11 +110,11 @@ public static CQConcept forSelect(Select select) { .getConnectors().stream() .map(conn -> { final CQTable table = new CQTable(); - table.setConnector(conn); + table.setConnector(conn.getId()); return table; }).toList()); - cqConcept.setSelects(List.of(select)); + cqConcept.setSelects(List.of(select.getId())); } return cqConcept; @@ -118,10 +122,11 @@ public static CQConcept forSelect(Select select) { public static CQConcept forConnector(Connector source) { final CQConcept cqConcept = new CQConcept(); - cqConcept.setElements(List.of(source.getConcept())); + // TODO transform to use only ids here + cqConcept.setElements(List.of(source.getConcept().getId())); final CQTable cqTable = new CQTable(); cqTable.setConcept(cqConcept); - cqTable.setConnector(source); + cqTable.setConnector(source.getId()); cqConcept.setTables(List.of(cqTable)); return cqConcept; @@ -133,7 +138,7 @@ public String defaultLabel(Locale locale) { return null; } - if (elements.size() == 1 && elements.get(0).equals(getConcept())) { + if (elements.size() == 1 && elements.get(0).equals(getConceptId())) { return getConcept().getLabel(); } @@ -142,11 +147,12 @@ public String defaultLabel(Locale locale) { builder.append(getConcept().getLabel()); builder.append(" "); - for (ConceptElement id : elements) { - if (id.equals(getConcept())) { + for (ConceptElementId id : elements) { + ConceptElement conceptElement = id.resolve(); + if (conceptElement.equals(getConcept())) { continue; } - builder.append(id.getLabel()).append("+"); + builder.append(conceptElement.getLabel()).append("+"); } builder.deleteCharAt(builder.length() - 1); @@ -155,34 +161,19 @@ public String defaultLabel(Locale locale) { } @JsonIgnore - public Concept getConcept() { - return elements.get(0).getConcept(); + public ConceptId getConceptId() { + return elements.get(0).findConcept(); } @JsonIgnore - @ValidationMethod(message = "Not all Selects belong to the Concept.") - public boolean isAllSelectsForConcept() { - final Concept concept = getConcept(); - - if (!getSelects().stream().map(Select::getHolder).allMatch(concept::equals)) { - log.error("Not all selects belong to Concept[{}]", concept); - return false; - } - - return true; + public Concept getConcept() { + return getConceptId().resolve(); } - @JsonIgnore - @ValidationMethod(message = "Not all elements belong to the same Concept.") - public boolean isAllElementsForConcept() { - final Concept concept = getConcept(); - - if (!getElements().stream().map(ConceptElement::getConcept).allMatch(concept::equals)) { - log.error("Not all elements belong to Concept[{}]", concept); - return false; - } - - return true; + @Override + public void resolve(QueryResolveContext context) { + aggregateEventDates = !(excludeFromTimeAggregation || DateAggregationMode.NONE.equals(context.getDateAggregationMode())); + tables.forEach(t -> t.resolve(context)); } @Override @@ -198,9 +189,8 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { .collect(Collectors.toList()); //add filter to children - final List> aggregators = new ArrayList<>(); - aggregators.addAll(conceptAggregators); + final List> aggregators = new ArrayList<>(conceptAggregators); final List> connectorAggregators = createAggregators(plan, table.getSelects()); @@ -218,7 +208,7 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { final List> eventDateUnionAggregators = - aggregateEventDates ? List.of(new EventDateUnionAggregator(Set.of(table.getConnector().getTable()))) + aggregateEventDates ? List.of(new EventDateUnionAggregator(Set.of(table.getConnector().resolve().getResolvedTable()))) : Collections.emptyList(); aggregators.addAll(eventDateUnionAggregators); @@ -235,7 +225,7 @@ public QPNode createQueryPlan(QueryPlanContext context, ConceptQueryPlan plan) { final ConceptNode node = new ConceptNode( conceptSpecificNode, - elements, + elements.stream().>map(ConceptElementId::resolve).toList(), table, // if the node is excluded, don't pass it into the Node. !excludeFromSecondaryId && hasSelectedSecondaryId ? context.getSelectedSecondaryId() : null @@ -260,12 +250,43 @@ public void collectRequiredQueries(Set requiredQueries) { } + @Override + public List getResultInfos() { + final List resultInfos = new ArrayList<>(); + + for (SelectId select : selects) { + Select resolved = select.resolve(); + resultInfos.add(resolved.getResultInfo(this)); + } + + for (CQTable table : tables) { + for (SelectId sel : table.getSelects()) { + Select resolved = sel.resolve(); + resultInfos.add(resolved.getResultInfo(this)); + } + } + + return resultInfos; + } + + @Override + public RequiredEntities collectRequiredEntities(QueryExecutionContext context) { + final Set connectors = getTables().stream().map(CQTable::getConnector).collect(Collectors.toSet()); + + return new RequiredEntities(context.getBucketManager() + .getEntitiesWithConcepts(getElements().stream() + .>map(ConceptElementId::resolve) + .toList(), + connectors, context.getDateRestriction())); + } + /** * Generates Aggregators from Selects. These are collected and also appended to the list of aggregators in the * query plan that contribute to columns the result. */ - private static List> createAggregators(ConceptQueryPlan plan, Listresolve) .map(Select::createAggregator) .peek(plan::registerAggregator) .collect(Collectors.toList()); @@ -273,45 +294,51 @@ private static List> createAggregators(ConceptQueryPlan plan, List private ValidityDate selectValidityDate(CQTable table) { if (table.getDateColumn() != null) { - return table.getDateColumn().getValue(); + return table.getDateColumn().getValue().resolve(); } //else use this first defined validity date column - if (!table.getConnector().getValidityDates().isEmpty()) { - return table.getConnector().getValidityDates().get(0); + final Connector connector = table.getConnector().resolve(); + if (!connector.getValidityDates().isEmpty()) { + return connector.getValidityDates().get(0); } return null; } - @Override - public List getResultInfos() { - final List resultInfos = new ArrayList<>(); - - for (Select select : selects) { - resultInfos.add(select.getResultInfo(this)); - } + @JsonIgnore + @ValidationMethod(message = "Not all Selects belong to the Concept.") + public boolean isAllSelectsForConcept() { + final ConceptId concept = getConceptId(); - for (CQTable table : tables) { - for (Select sel : table.getSelects()) { - resultInfos.add(sel.getResultInfo(this)); - } + if (!getSelects().stream().map(SelectId::findConcept).allMatch(concept::equals)) { + log.error("Not all selects belong to Concept[{}]", concept); + return false; } - return resultInfos; + return true; } - @Override - public void collectNamespacedObjects(Set> identifiables) { - identifiables.addAll(elements); - identifiables.addAll(selects); - tables.forEach(table -> identifiables.add(table.getConnector())); + @JsonIgnore + @ValidationMethod(message = "Not all elements belong to the same Concept.") + public boolean isAllElementsForConcept() { + + final ConceptId concept = getConceptId(); + + if (!getElements().stream().map(ConceptElementId::findConcept).allMatch(concept::equals)) { + log.error("Not all elements belong to Concept[{}]", concept); + return false; + } + + return true; } @Override - public void resolve(QueryResolveContext context) { - aggregateEventDates = !(excludeFromTimeAggregation || DateAggregationMode.NONE.equals(context.getDateAggregationMode())); - tables.forEach(t -> t.resolve(context)); + public void collectNamespacedObjects(Set> identifiables) { + final List> list = elements.stream().>map(ConceptElementId::resolve).toList(); + identifiables.addAll(list); + identifiables.addAll(selects.stream().map(Id::resolve).toList()); + tables.forEach(table -> identifiables.add(table.getConnector().resolve())); } @Override @@ -325,22 +352,15 @@ public void setDefaultExists() { return; } - final List conSelects = new ArrayList<>(t.getSelects()); - conSelects.addAll(t.getConnector().getDefaultSelects()); + final List conSelects = new ArrayList<>(t.getSelects()); + conSelects.addAll(t.getConnector().resolve().getDefaultSelects().stream().map(Select::getId).map(ConnectorSelectId.class::cast).toList()); t.setSelects(conSelects); } } - - @Override - public RequiredEntities collectRequiredEntities(QueryExecutionContext context) { - final Set connectors = getTables().stream().map(CQTable::getConnector).collect(Collectors.toSet()); - - return new RequiredEntities(context.getBucketManager().getEntitiesWithConcepts(getElements(), connectors, context.getDateRestriction())); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java b/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java index 3ddcb57894..f4cb95a37c 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/ManagerNode.java @@ -32,7 +32,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; import io.dropwizard.core.setup.Environment; -import io.dropwizard.jersey.DropwizardResourceConfig; import io.dropwizard.lifecycle.Managed; import lombok.Getter; import lombok.NonNull; @@ -40,6 +39,7 @@ import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; import org.glassfish.jersey.internal.inject.AbstractBinder; +import org.glassfish.jersey.server.ResourceConfig; /** * Central node of Conquery. Hosts the frontend, api, metadata and takes care of query distribution to @@ -94,7 +94,7 @@ public void run(Manager manager) throws InterruptedException { // Initialization of internationalization I18n.init(); - configureApiServlet(config, environment.jersey().getResourceConfig()); + configureApiServlet(config, environment); maintenanceService = environment.lifecycle() .scheduledExecutorService("Maintenance Service") @@ -155,7 +155,8 @@ private void registerTasks(Manager manager, Environment environment, ConqueryCon environment.lifecycle().addServerLifecycleListener(shutdown); } - private void configureApiServlet(ConqueryConfig config, DropwizardResourceConfig jerseyConfig) { + private void configureApiServlet(ConqueryConfig config, Environment environment) { + ResourceConfig jerseyConfig = environment.jersey().getResourceConfig(); RESTServer.configure(config, jerseyConfig); jerseyConfig.register(new AbstractBinder() { @Override @@ -165,12 +166,14 @@ protected void configure() { } }); + getInternalMapperFactory().customizeApiObjectMapper(environment.getObjectMapper(), getDatasetRegistry(), getMetaStorage()); + jerseyConfig.register(PathParamInjector.class); } private void loadMetaStorage() { log.info("Opening MetaStorage"); - getMetaStorage().openStores(getInternalMapperFactory().createManagerPersistenceMapper(getDatasetRegistry(), getMetaStorage())); + getMetaStorage().openStores(getInternalMapperFactory().createManagerPersistenceMapper(getDatasetRegistry(), getMetaStorage()), getEnvironment().metrics()); log.info("Loading MetaStorage"); getMetaStorage().loadData(); log.info("MetaStorage loaded {}", getMetaStorage()); diff --git a/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java b/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java index a1e084264b..a69e598fc0 100644 --- a/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java +++ b/backend/src/main/java/com/bakdata/conquery/commands/ShardNode.java @@ -65,7 +65,7 @@ public void run(ConqueryConfig config, Environment environment) throws Exception lifecycle.manage(clusterConnection); - final Collection workerStorages = config.getStorage().discoverWorkerStorages(); + final Collection workerStorages = config.getStorage().discoverWorkerStorages(); ExecutorService loaders = config.getQueries().getExecutionPool().createService("Worker loader"); @@ -74,7 +74,7 @@ public void run(ConqueryConfig config, Environment environment) throws Exception for (WorkerStorage workerStorage : workerStorages) { loaders.submit(() -> { try { - workersDone.add(workers.createWorker(workerStorage, config.isFailOnError())); + workersDone.add(workers.createWorker(workerStorage, config.isFailOnError(), environment)); } catch (Exception e) { log.error("Failed reading Storage", e); diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java index c1d942f3ee..0925b0efe9 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/IdRefPathParamConverterProvider.java @@ -2,13 +2,11 @@ import java.lang.annotation.Annotation; import java.lang.reflect.Type; - import jakarta.inject.Inject; import jakarta.ws.rs.ext.ParamConverter; import jakarta.ws.rs.ext.ParamConverterProvider; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; @@ -16,7 +14,6 @@ import com.bakdata.conquery.models.worker.DatasetRegistry; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; -import lombok.RequiredArgsConstructor; @NoArgsConstructor @AllArgsConstructor @@ -46,6 +43,6 @@ public ParamConverter getConverter(Class rawType, Type genericType, An return new NamespacedIdRefParamConverter(parser, datasetRegistry); } - return new MetaIdRefParamConverter(parser, metaStorage.getCentralRegistry()); + return new MetaIdRefParamConverter(parser, metaStorage); } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java index e70ceda844..ff01332b7a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/Initializing.java @@ -1,6 +1,7 @@ package com.bakdata.conquery.io.jackson; import com.fasterxml.jackson.databind.util.StdConverter; +import com.google.common.base.Throwables; /** * Interface for class instances that need initialization after deserialization and value injection. @@ -16,14 +17,20 @@ */ public interface Initializing { - void init(); + void init() throws Exception; class Converter extends StdConverter { @Override public T convert(T value) { - value.init(); - return value; + try { + value.init(); + return value; + } + catch (Exception e) { + Throwables.throwIfUnchecked(e); + throw new IllegalStateException("Could not initialize %s".formatted(value), e); + } } } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java index eb6e7a7c95..72c1d231a6 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/MetaIdRefParamConverter.java @@ -2,25 +2,26 @@ import jakarta.ws.rs.ext.ParamConverter; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.NonNull; import lombok.RequiredArgsConstructor; @RequiredArgsConstructor -public class MetaIdRefParamConverter, VALUE extends Identifiable> implements ParamConverter { +public class MetaIdRefParamConverter & MetaId, VALUE extends Identifiable> implements ParamConverter { private final IdUtil.Parser idParser; @NonNull - private final CentralRegistry registry; + private final MetaStorage storage; @Override public VALUE fromString(String value) { final ID id = idParser.parse(value); - return registry.resolve(id); + return (VALUE) id.get(storage); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java index c0c8fd7be8..5b849d36bc 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/MutableInjectableValues.java @@ -11,8 +11,8 @@ public class MutableInjectableValues extends InjectableValues { private final ConcurrentHashMap values = new ConcurrentHashMap<>(); - - public MutableInjectableValues add(Class type, T value) { + + public MutableInjectableValues add(Class type, T value) { if(!type.isInstance(value)) { throw new IllegalArgumentException(value+" is not of type "+type); } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java index 2d07631c49..92530d5f9e 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/NamespacedIdRefParamConverter.java @@ -21,7 +21,7 @@ public class NamespacedIdRefParamConverter & NamespacedId, public VALUE fromString(String value) { final ID id = idParser.parse(value); - return registry.resolve(id); + return (VALUE) id.get(registry.getStorage(id.getDataset())); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java index 8e06f2954c..5cd563227a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/CBlockDeserializer.java @@ -26,13 +26,31 @@ public class CBlockDeserializer extends JsonDeserializer implements Cont private JsonDeserializer beanDeserializer; + @SuppressWarnings({ "rawtypes", "unchecked" }) + @Override + public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { + JavaType type = Optional + .ofNullable(ctxt.getContextualType()) + .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); + + while(type.isContainerType()) { + type = type.getContentType(); + } + BeanDescription descr = ctxt.getConfig().introspect(type); + JsonDeserializer deser = ctxt.getFactory().createBeanDeserializer(ctxt, type, descr); + if(deser instanceof ResolvableDeserializer) { + ((ResolvableDeserializer) deser).resolve(ctxt); + } + return new CBlockDeserializer((JsonDeserializer)deser); + } + @Override public CBlock deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { CBlock block = beanDeserializer.deserialize(p, ctxt); - TreeConcept concept = block.getConnector().getConcept(); if(block.getMostSpecificChildren() != null) { + TreeConcept concept = (TreeConcept) block.getConnector().getConcept().resolve(); // deduplicate concrete paths after loading from disk. for (int event = 0; event < block.getMostSpecificChildren().length; event++) { @@ -55,21 +73,5 @@ public CBlock deserializeWithType(JsonParser p, DeserializationContext ctxt, Typ return this.deserialize(p, ctxt); } - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - JavaType type = Optional - .ofNullable(ctxt.getContextualType()) - .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); - while(type.isContainerType()) { - type = type.getContentType(); - } - BeanDescription descr = ctxt.getConfig().introspect(type); - JsonDeserializer deser = ctxt.getFactory().createBeanDeserializer(ctxt, type, descr); - if(deser instanceof ResolvableDeserializer) { - ((ResolvableDeserializer) deser).resolve(ctxt); - } - return new CBlockDeserializer((JsonDeserializer)deser); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java index 7307d1e0a7..f7604b1d79 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/IdDeserializer.java @@ -1,13 +1,19 @@ package com.bakdata.conquery.io.jackson.serializer; import java.io.IOException; +import java.util.HashSet; +import java.util.List; import java.util.Optional; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.IIdInterner; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.fasterxml.jackson.core.JsonParser; @@ -28,35 +34,99 @@ public class IdDeserializer> extends JsonDeserializer imple private Class idClass; private IdUtil.Parser idParser; - private boolean checkForInjectedPrefix; + private boolean isNamespacedId; + + @SuppressWarnings({"rawtypes", "unchecked"}) + @Override + public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) { + JavaType type = Optional.ofNullable(ctxt.getContextualType()) + .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); + + while (type.isContainerType()) { + type = type.getContentType(); + } + Class> idClass = (Class>) type.getRawClass(); + IdUtil.Parser>> parser = IdUtil.createParser((Class) idClass); + + return new IdDeserializer( + idClass, + parser, + //we only need to check for the dataset prefix if the id requires it + NamespacedId.class.isAssignableFrom(idClass) + ); + } @SuppressWarnings("unchecked") @Override public ID deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { - if (parser.getCurrentToken() != JsonToken.VALUE_STRING) { - return (ID) ctxt.handleUnexpectedToken(Id.class, parser.getCurrentToken(), parser, "name references should be strings"); + JsonToken currentToken = parser.getCurrentToken(); + if (currentToken != JsonToken.VALUE_STRING) { + return (ID) ctxt.handleUnexpectedToken(Id.class, currentToken, parser, "name references should be strings. Was: " + currentToken); } String text = parser.getText(); + // We need to assign resolvers for namespaced and meta ids because meta-objects might reference namespaced objects (e.g. ExecutionsId) + NamespacedStorageProvider namespacedStorageProvider = NamespacedStorageProvider.getResolver(ctxt); + MetaStorage metaStorage = MetaStorage.get(ctxt); + try { - return deserializeId(text, idParser, checkForInjectedPrefix, ctxt); + final ID id = deserializeId(text, idParser, isNamespacedId, ctxt); + + setResolver(id, metaStorage, namespacedStorageProvider); + + return id; } catch (Exception e) { return (ID) ctxt.handleWeirdStringValue(idClass, text, "Could not parse `" + idClass.getSimpleName() + "` from `" + text + "`: " + e.getMessage()); } } + public static void setResolver(Id id, MetaStorage metaStorage, NamespacedStorageProvider namespacedStorageProvider) { + // Set resolvers in this id and subIds + final HashSet> ids = new HashSet<>(); + id.collectIds(ids); + for (Id subId : ids) { + if (subId.getNamespacedStorageProvider() != null || subId.getMetaStorage() != null) { + // Ids are constructed of other ids that might already have a resolver set + continue; + } + if (subId instanceof NamespacedId) { + subId.setNamespacedStorageProvider(namespacedStorageProvider); + } + else if (subId instanceof MetaId) { + subId.setMetaStorage(metaStorage); + } + } + } + public static > ID deserializeId(String text, IdUtil.Parser idParser, boolean checkForInjectedPrefix, DeserializationContext ctx) throws JsonMappingException { - if (checkForInjectedPrefix) { - //check if there was a dataset injected and if it is already a prefix - String datasetName = findDatasetName(ctx); - if (datasetName != null) { - return idParser.parsePrefixed(datasetName, text); - } + List components = checkForInjectedPrefix ? + IdUtil.Parser.asComponents(findDatasetName(ctx), text) : + IdUtil.Parser.asComponents(text); + + + IIdInterner iIdInterner = IIdInterner.get(ctx); + + if (iIdInterner == null) { + // Parse directly, as no interner is available + return idParser.parse(components); + } + + IIdInterner.ParserIIdInterner idParserIIdInterner = iIdInterner.forParser(idParser); + ID id = idParserIIdInterner.get(components); + + if (id != null) { + // Return cached id + return id; } - return idParser.parse(text); + + // Parse and cache + id = idParser.parse(components); + idParserIIdInterner.putIfAbsent(components, id); + + return id; } private static String findDatasetName(DeserializationContext ctx) throws JsonMappingException { @@ -81,23 +151,5 @@ public ID deserializeWithType(JsonParser p, DeserializationContext ctxt, TypeDes return this.deserialize(p, ctxt); } - @SuppressWarnings({"rawtypes", "unchecked"}) - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - JavaType type = Optional.ofNullable(ctxt.getContextualType()) - .orElseGet(Optional.ofNullable(property).map(BeanProperty::getType)::get); - while (type.isContainerType()) { - type = type.getContentType(); - } - Class> idClass = (Class>) type.getRawClass(); - IdUtil.Parser>> parser = IdUtil.createParser((Class) idClass); - - return new IdDeserializer( - idClass, - parser, - //we only need to check for the dataset prefix if the id requires it - NamespacedId.class.isAssignableFrom(idClass) - ); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRef.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRef.java deleted file mode 100644 index ae70528480..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRef.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link Id} instead of the object content itself. - * - * @implNote You cannot expect MetaIdRefs to work beyond the ManagerNode! So resolve the content you need on the Manager (Or implement the necessary logic). - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(using = IdReferenceSerializer.class) -@JsonDeserialize(using = MetaIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface MetaIdRef { -} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRefCollection.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRefCollection.java deleted file mode 100644 index 6711fab58a..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdRefCollection.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link Id} instead of the object content itself. - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(contentUsing=IdReferenceSerializer.class) -@JsonDeserialize(contentUsing=MetaIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface MetaIdRefCollection {} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdReferenceDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdReferenceDeserializer.java deleted file mode 100644 index e08b026eea..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/MetaIdReferenceDeserializer.java +++ /dev/null @@ -1,93 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.io.IOException; -import java.util.InputMismatchException; -import java.util.Optional; - -import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonToken; -import com.fasterxml.jackson.databind.*; -import com.fasterxml.jackson.databind.deser.ContextualDeserializer; -import com.fasterxml.jackson.databind.deser.SettableBeanProperty; -import com.fasterxml.jackson.databind.jsontype.TypeDeserializer; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@AllArgsConstructor -@NoArgsConstructor -public class MetaIdReferenceDeserializer, T extends Identifiable> extends JsonDeserializer implements ContextualDeserializer { - - private Class type; - private JsonDeserializer beanDeserializer; - private Class idClass; - - @SuppressWarnings("unchecked") - @Override - public T deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { - if (parser.getCurrentToken() != JsonToken.VALUE_STRING) { - return (T) ctxt.handleUnexpectedToken(type, parser.getCurrentToken(), parser, "name references should be strings"); - } - - ID id = ctxt.readValue(parser, idClass); - - try { - final CentralRegistry centralRegistry = MetaStorage.get(ctxt).getCentralRegistry(); - - // Not all Components have registries, we leave it up to the validator to be angry. - if (centralRegistry == null) { - return null; - } - - Optional result = centralRegistry.getOptional(id); - - if (result.isEmpty()) { - throw new IdReferenceResolvingException(parser, "Could not find entry `" + id + "` of type " + type.getName(), id.toString(), type); - } - - if (!type.isAssignableFrom(result.get().getClass())) { - throw new InputMismatchException(String.format("Cannot assign type %s to %s ", result.get().getClass(), type)); - } - - return result.get(); - } - catch (Exception e) { - log.error("Error while resolving entry {} of type {}", id, type, e); - throw e; - } - } - - @Override - public T deserializeWithType(JsonParser p, DeserializationContext ctxt, TypeDeserializer typeDeserializer) throws IOException { - return this.deserialize(p, ctxt); - } - - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - - JavaType type = Optional.ofNullable(ctxt.getContextualType()) - .orElseGet(property::getType); - - BeanDescription descr = ctxt.getConfig().introspect(type); - - while (type.isContainerType()) { - type = type.getContentType(); - } - - Class cl = type.getRawClass(); - Class idClass = IdUtil.findIdClass(cl); - - return new MetaIdReferenceDeserializer<>(cl, ctxt.getFactory().createBeanDeserializer(ctxt, type, descr), idClass); - } - - @Override - public SettableBeanProperty findBackReference(String refName) { - return beanDeserializer.findBackReference(refName); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRef.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRef.java deleted file mode 100644 index 155d5224fc..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRef.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link NamespacedId} instead of the object content itself. - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(using = IdReferenceSerializer.class) -@JsonDeserialize(using = NsIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface NsIdRef { -} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefCollection.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefCollection.java deleted file mode 100644 index cb3f0a3d15..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefCollection.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - -/** - * An annotation that guides Jackson to serialize/deserialize the field as a {@link NamespacedId} instead of the object content itself. - * - */ -@Retention(RetentionPolicy.RUNTIME) -@JacksonAnnotationsInside -@JsonProperty -@JsonSerialize(contentUsing=IdReferenceSerializer.class) -@JsonDeserialize(contentUsing=NsIdReferenceDeserializer.class) -@Target({ElementType.FIELD, ElementType.TYPE_PARAMETER, ElementType.TYPE_USE}) -public @interface NsIdRefCollection {} \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefKeys.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefKeys.java deleted file mode 100644 index 08bb4ddc2a..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdRefKeys.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.fasterxml.jackson.annotation.JacksonAnnotationsInside; -import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import com.fasterxml.jackson.databind.annotation.JsonSerialize; - - -/** - * Annotation for Maps where the keys are supposed to be NsIdRefs - */ -@JacksonAnnotationsInside -@JsonDeserialize(keyUsing = NsIdReferenceKeyDeserializer.class) -@JsonSerialize(keyUsing = IdReferenceKeySerializer.class) -@Target({ElementType.FIELD, ElementType.PARAMETER}) -@Retention(RetentionPolicy.RUNTIME) -public @interface NsIdRefKeys { -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceDeserializer.java deleted file mode 100644 index 59414d51b8..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceDeserializer.java +++ /dev/null @@ -1,95 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.io.IOException; -import java.util.InputMismatchException; -import java.util.Optional; - -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.worker.IdResolveContext; -import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonToken; -import com.fasterxml.jackson.databind.BeanDescription; -import com.fasterxml.jackson.databind.BeanProperty; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JavaType; -import com.fasterxml.jackson.databind.JsonDeserializer; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.deser.ContextualDeserializer; -import com.fasterxml.jackson.databind.deser.SettableBeanProperty; -import com.fasterxml.jackson.databind.jsontype.TypeDeserializer; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@AllArgsConstructor -@NoArgsConstructor -public class NsIdReferenceDeserializer & NamespacedId, T extends Identifiable> extends JsonDeserializer implements ContextualDeserializer { - - private Class type; - private JsonDeserializer beanDeserializer; - private Class idClass; - - @Override - public T deserializeWithType(JsonParser p, DeserializationContext ctxt, TypeDeserializer typeDeserializer) throws IOException { - return this.deserialize(p, ctxt); - } - - @SuppressWarnings("unchecked") - @Override - public T deserialize(JsonParser parser, DeserializationContext ctxt) throws IOException { - if (parser.getCurrentToken() != JsonToken.VALUE_STRING) { - return (T) ctxt.handleUnexpectedToken(type, parser.getCurrentToken(), parser, "name references should be strings"); - } - - ID id = ctxt.readValue(parser, idClass); - - try { - - final IdResolveContext idResolveContext = IdResolveContext.get(ctxt); - Optional result = idResolveContext.getOptional(id); - - if (result.isEmpty()) { - throw new IdReferenceResolvingException(parser, "Could not find entry `" + id + "` of type " + type.getName(), id.toString(), type); - } - - if (!type.isAssignableFrom(result.get().getClass())) { - throw new InputMismatchException(String.format("Cannot assign %s of type %s to %s ", id, result.get().getClass(), type)); - } - - return result.get(); - } - catch (Exception e) { - throw new RuntimeException("Error while resolving entry " + id + " of type " + type, e); - } - } - - @Override - public JsonDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - JavaType type = Optional - .ofNullable(ctxt.getContextualType()) - .orElseGet(property::getType); - - BeanDescription descr = ctxt.getConfig().introspect(type); - - while (type.isContainerType()) { - type = type.getContentType(); - } - Class cl = (Class) type.getRawClass(); - Class idClass = IdUtil.findIdClass(cl); - - return new NsIdReferenceDeserializer<>( - cl, - ctxt.getFactory().createBeanDeserializer(ctxt, type, descr), - idClass - ); - } - - @Override - public SettableBeanProperty findBackReference(String refName) { - return beanDeserializer.findBackReference(refName); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceKeyDeserializer.java b/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceKeyDeserializer.java deleted file mode 100644 index 1fd0fde25e..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/jackson/serializer/NsIdReferenceKeyDeserializer.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import java.io.IOException; - -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; -import com.bakdata.conquery.models.worker.IdResolveContext; -import com.fasterxml.jackson.databind.BeanProperty; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonMappingException; -import com.fasterxml.jackson.databind.KeyDeserializer; -import com.fasterxml.jackson.databind.deser.ContextualKeyDeserializer; -import lombok.AllArgsConstructor; -import lombok.NoArgsConstructor; - -/** - * Deserializer for Map keys. See {@link NsIdRef} for details. - */ -@AllArgsConstructor -@NoArgsConstructor -public class NsIdReferenceKeyDeserializer & NamespacedId, VALUE extends NamespacedIdentifiable> extends KeyDeserializer implements ContextualKeyDeserializer { - - private IdUtil.Parser parser; - - @Override - public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException { - final ID id = parser.parse(key); - - return IdResolveContext.get(ctxt).resolve(id); - } - - @Override - public KeyDeserializer createContextual(DeserializationContext ctxt, BeanProperty property) throws JsonMappingException { - - final Class idClass = IdUtil.findIdClass(property.getType().getKeyType().getRawClass()); - final IdUtil.Parser parser = IdUtil.createParser(idClass); - - - return new NsIdReferenceKeyDeserializer<>(parser); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java b/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java index 2bfdb36dce..a60d742f96 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java +++ b/backend/src/main/java/com/bakdata/conquery/io/mina/BinaryJacksonCoder.java @@ -1,14 +1,15 @@ package com.bakdata.conquery.io.mina; +import jakarta.validation.Validator; + import com.bakdata.conquery.models.exceptions.ValidatorHelper; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.messages.network.NetworkMessage; -import com.bakdata.conquery.models.worker.IdResolveContext; import com.bakdata.conquery.util.io.EndCheckableInputStream; import com.fasterxml.jackson.core.JsonParser.Feature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; -import jakarta.validation.Validator; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -18,17 +19,10 @@ public class BinaryJacksonCoder implements CQCoder> { private final ObjectWriter writer; private final ObjectReader reader; - public BinaryJacksonCoder(IdResolveContext datasets, Validator validator, ObjectMapper objectMapper) { + public BinaryJacksonCoder(NamespacedStorageProvider namespacedStorageProvider, Validator validator, ObjectMapper objectMapper) { this.validator = validator; writer = objectMapper.writerFor(NetworkMessage.class); - reader = datasets.injectIntoNew(objectMapper.readerFor(NetworkMessage.class)).without(Feature.AUTO_CLOSE_SOURCE); - } - - @Override - public Chunkable encode(NetworkMessage message) throws Exception { - ValidatorHelper.failOnError(log, validator.validate(message)); - - return new Chunkable(message.getMessageId(), writer, message); + reader = namespacedStorageProvider.injectIntoNew(objectMapper.readerFor(NetworkMessage.class)).without(Feature.AUTO_CLOSE_SOURCE); } @Override @@ -42,4 +36,11 @@ public NetworkMessage decode(ChunkedMessage message) throws Exception { return (NetworkMessage) obj; } } + + @Override + public Chunkable encode(NetworkMessage message) throws Exception { + ValidatorHelper.failOnError(log, validator.validate(message)); + + return new Chunkable(message.getMessageId(), writer, message); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java b/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java index e49a9554ba..53a354b7ba 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/ResultUtil.java @@ -5,18 +5,18 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Locale; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.core.HttpHeaders; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.auth.permissions.Ability; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.util.io.FileUtil; import com.google.common.base.Strings; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.core.HttpHeaders; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -77,9 +77,9 @@ public static void checkSingleTableResult(ManagedExecution exec) { public static void authorizeExecutable(Subject subject, ManagedExecution exec) { - final Dataset dataset = exec.getDataset(); - subject.authorize(dataset, Ability.READ); - subject.authorize(dataset, Ability.DOWNLOAD); + final DatasetId datasetId = exec.getDataset(); + subject.authorize(datasetId, Ability.READ); + subject.authorize(datasetId, Ability.DOWNLOAD); subject.authorize(exec, Ability.READ); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java index 376d1dcb17..68d0f4b245 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/arrow/ResultArrowProcessor.java @@ -11,14 +11,18 @@ import java.util.Locale; import java.util.OptionalLong; import java.util.function.Function; +import jakarta.inject.Inject; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ArrowConfig; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; @@ -28,10 +32,6 @@ import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.arrow.vector.VectorSchemaRoot; @@ -84,15 +84,15 @@ public static Response getArrow ConqueryMDC.setLocation(subject.getName()); - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); - log.info("Downloading results for {}", exec.getId()); + log.info("Downloading results for {}", datasetId); ResultUtil.authorizeExecutable(subject, exec); // Get the locale extracted by the LocaleFilter - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); IdPrinter idPrinter = IdColumnUtil.getIdPrinter(subject, exec, namespace, config.getIdColumns().getIds()); final Locale locale = I18n.LOCALE.get(); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java index 261ca45f1e..3492cee595 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/csv/ResultCsvProcessor.java @@ -8,13 +8,18 @@ import java.nio.charset.Charset; import java.util.Locale; import java.util.OptionalLong; +import jakarta.inject.Inject; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; @@ -23,11 +28,6 @@ import com.bakdata.conquery.resources.ResourceConstants; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.WebApplicationException; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.eclipse.jetty.io.EofException; @@ -41,9 +41,9 @@ public class ResultCsvProcessor { public Response createResult(Subject subject, E exec, boolean pretty, Charset charset, OptionalLong limit) { - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); ConqueryMDC.setLocation(subject.getName()); log.info("Downloading results for {}", exec.getId()); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java index ec29adb030..3169c321be 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java @@ -12,11 +12,13 @@ import c10n.C10N; import com.bakdata.conquery.internationalization.ExcelSheetNameC10n; +import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ExcelConfig; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.mapping.PrintIdMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; @@ -93,8 +95,8 @@ private void setMetaData(E exec final POIXMLProperties.CoreProperties coreProperties = workbook.getXSSFWorkbook().getProperties().getCoreProperties(); coreProperties.setTitle(exec.getLabelWithoutAutoLabelSuffix()); - final User owner = exec.getOwner(); - coreProperties.setCreator(owner != null ? owner.getLabel() : config.getApplicationName()); + final UserId owner = exec.getOwner(); + coreProperties.setCreator(owner != null ? owner.resolve().getLabel() : config.getApplicationName()); coreProperties.setKeywords(String.join(" ", exec.getTags())); final POIXMLProperties.ExtendedProperties extendedProperties = workbook.getXSSFWorkbook().getProperties().getExtendedProperties(); extendedProperties.setApplication(config.getApplicationName()); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java index cb0c79d25c..18a0205c2b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java @@ -4,14 +4,18 @@ import java.util.Locale; import java.util.OptionalLong; +import jakarta.inject.Inject; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.ExcelConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; @@ -20,10 +24,6 @@ import com.bakdata.conquery.resources.ResourceConstants; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -42,13 +42,13 @@ public Response createResult(Su ConqueryMDC.setLocation(subject.getName()); - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); log.info("Downloading results for {}", exec.getId()); ResultUtil.authorizeExecutable(subject, exec); - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); final IdPrinter idPrinter = IdColumnUtil.getIdPrinter(subject, exec, namespace, conqueryConfig.getIdColumns().getIds()); final Locale locale = I18n.LOCALE.get(); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java index c8ed12696c..ef14d4f10b 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/external/ExternalResultProcessor.java @@ -20,7 +20,7 @@ public Response getResult(Subject subject, ExternalExecution execution, String f ResultUtil.authorizeExecutable(subject, execution); - ExecutionManager executionManager = datasetRegistry.get(execution.getDataset().getId()).getExecutionManager(); + ExecutionManager executionManager = datasetRegistry.get(execution.getDataset()).getExecutionManager(); ExternalState externalResult = executionManager.getResult(execution.getId()); return externalResult.fetchExternalResult(fileName); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java index 33ce54324a..f6c75099c1 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/parquet/ResultParquetProcessor.java @@ -4,13 +4,17 @@ import java.util.Locale; import java.util.OptionalLong; +import jakarta.inject.Inject; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; @@ -20,10 +24,6 @@ import com.bakdata.conquery.resources.api.ResultParquetResource; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.io.IdColumnUtil; -import jakarta.inject.Inject; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.StreamingOutput; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -32,14 +32,14 @@ public class ResultParquetProcessor { public static final MediaType PARQUET_MEDIA_TYPE = MediaType.valueOf(ResultParquetResource.PARQUET_MEDIA_TYPE_STRING); - private final DatasetRegistry datasetRegistry; + private final DatasetRegistry datasetRegistry; private final ConqueryConfig config; public Response createResultFile(Subject subject, ManagedExecution exec, boolean pretty, OptionalLong limit) { ConqueryMDC.setLocation(subject.getName()); - final Dataset dataset = exec.getDataset(); + final DatasetId datasetId = exec.getDataset(); log.info("Downloading results for {}", exec.getId()); @@ -47,7 +47,7 @@ public Response createResultFile(Subject subject, ManagedExecution exec, boolean ResultUtil.checkSingleTableResult(exec); - final Namespace namespace = datasetRegistry.get(dataset.getId()); + final Namespace namespace = datasetRegistry.get(datasetId); final IdPrinter idPrinter = IdColumnUtil.getIdPrinter(subject, exec, namespace, config.getIdColumns().getIds()); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java index b9e9ba69ff..d23ef17c6f 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/ConqueryStorage.java @@ -3,7 +3,7 @@ import java.io.Closeable; import java.io.IOException; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import lombok.extern.slf4j.Slf4j; @@ -11,16 +11,21 @@ @Slf4j public abstract class ConqueryStorage implements Closeable { - public abstract CentralRegistry getCentralRegistry(); - /** * @implSpec The order defines the order of loading. Dependencies should be modeled here. * @implNote If you implement this method, please do it always from scratch and not using calls to super, it can be quite annoying. */ public abstract ImmutableList getStores(); - public abstract void openStores(ObjectMapper objectMapper); - + /** + * Initializes the internal stores. + * Injects this storage into the provided object mapper. + * + * @param objectMapper (optional) needed when the {@link com.bakdata.conquery.models.config.StoreFactory} deserializes objects + * @param metricRegistry + */ + public abstract void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry); + public final void loadData(){ for (ManagedStore store : getStores()) { store.loadData(); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/DirectIdentifiableStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/DirectIdentifiableStore.java deleted file mode 100644 index 45d30b56b6..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/DirectIdentifiableStore.java +++ /dev/null @@ -1,72 +0,0 @@ -package com.bakdata.conquery.io.storage; - -import java.util.Optional; - -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; - -/** - * Registered items are directly referenced. Compare to {@link IdentifiableCachedStore} - */ -public class DirectIdentifiableStore> extends IdentifiableStore { - - public DirectIdentifiableStore(CentralRegistry centralRegistry, Store, VALUE> store) { - super(store, centralRegistry); - } - - @Override - protected Id extractKey(VALUE value) { - return (Id) value.getId(); - } - - @Override - protected void removed(VALUE value) { - try { - if (value == null) { - return; - } - - onRemove.accept(value); - centralRegistry.remove(value); - } - catch (Exception e) { - throw new RuntimeException("Failed to remove " + value, e); - } - } - - @Override - protected void added(VALUE value) { - try { - if (value == null) { - return; - } - - centralRegistry.register(value); - onAdd.accept(value); - } - catch (Exception e) { - throw new RuntimeException("Failed to add " + value, e); - } - } - - @Override - protected void updated(VALUE value) { - try { - if (value == null) { - return; - } - final Optional> old = centralRegistry.getOptional(value.getId()); - - if (old.isPresent()) { - onRemove.accept((VALUE) old.get()); - } - - centralRegistry.update(value); - onAdd.accept(value); - } - catch (Exception e) { - throw new RuntimeException("Failed to add " + value, e); - } - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/FailingProvider.java b/backend/src/main/java/com/bakdata/conquery/io/storage/FailingProvider.java new file mode 100644 index 0000000000..a7a6d30b07 --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/FailingProvider.java @@ -0,0 +1,24 @@ +package com.bakdata.conquery.io.storage; + +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class FailingProvider implements NamespacedStorageProvider { + + public final static FailingProvider INSTANCE = new FailingProvider(); + public static final String ERROR_MSG = "Cannot be used in this environment. This id '%s' cannot be resolved on this node."; + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(NamespacedStorageProvider.class, this); + } + + @Override + public NamespacedStorage getStorage(DatasetId datasetId) { + throw new UnsupportedOperationException(ERROR_MSG.formatted(datasetId)); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableCachedStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableCachedStore.java deleted file mode 100644 index a53be0d30e..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableCachedStore.java +++ /dev/null @@ -1,77 +0,0 @@ -package com.bakdata.conquery.io.storage; - -import java.util.Optional; - -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import lombok.Getter; -import lombok.Setter; -import lombok.experimental.Accessors; - -/** - * Registers accessors of values instead of the value itself to the central registry. - * Might be useful if the object are very large and should only be loaded on demand. - */ -@Accessors(fluent=true) @Setter @Getter -public class IdentifiableCachedStore> extends IdentifiableStore { - - public IdentifiableCachedStore(CentralRegistry centralRegistry, Store, VALUE> store) { - super(store, centralRegistry); - } - - @Override - protected Id extractKey(VALUE value) { - return (Id) value.getId(); - } - - @Override - protected void removed(VALUE value) { - try { - if(value != null) { - onRemove.accept(value); - centralRegistry.remove(value); - } - } catch(Exception e) { - throw new RuntimeException("Failed to remove "+value, e); - } - } - - @Override - protected void added(VALUE value) { - try { - if(value != null) { - final Id key = extractKey(value); - centralRegistry.registerCacheable(key, this::get); - onAdd.accept(value); - } - } catch(Exception e) { - throw new RuntimeException("Failed to add "+value, e); - } - } - - @Override - protected void updated(VALUE value) { - try { - if(value != null) { - final Id key = extractKey(value); - final Optional oldOpt = centralRegistry.updateCacheable(key, this::get); - if (oldOpt.isPresent()) { - final VALUE old = (VALUE) oldOpt.get(); - onRemove.accept(old); - } - onAdd.accept(value); - } - } catch(Exception e) { - throw new RuntimeException("Failed to add "+value, e); - } - } - - @Override - public void loadData() { - store.loadData(); - for (Id key : getAllKeys()) { - centralRegistry.registerCacheable(key, this::get); - } - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java index 7fe75b7766..08076fef95 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/IdentifiableStore.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.io.storage; import com.bakdata.conquery.io.storage.xodus.stores.KeyIncludingStore; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.util.functions.ThrowingConsumer; @@ -11,16 +10,14 @@ import lombok.experimental.Accessors; /** - * Store for Identifiable values. Directly delegates all changes to the supplied {@link CentralRegistry}. + * Store for Identifiable values. *

- * The {@link ThrowingConsumer}s can be used to reflect/model dependencies of the identifiable values inside the store. For example {@link com.bakdata.conquery.models.datasets.concepts.Concept} holds multiple {@link com.bakdata.conquery.models.datasets.concepts.Connector}s where a deletion of a concept requires the deletion of the Conncetors as well. {@link NamespacedStorage} is the main user of those two methods and should be looked at if desired. + * The {@link ThrowingConsumer}s can be used to reflect/model dependencies of the identifiable values inside the store. For example {@link com.bakdata.conquery.models.datasets.concepts.Concept} holds multiple {@link com.bakdata.conquery.models.datasets.concepts.Connector}s where a deletion of a concept requires the deletion of the Conncetors as well. {@link NamespacedStorageImpl} is the main user of those two methods and should be looked at if desired. */ @Accessors(fluent = true) @Setter @Getter -public abstract class IdentifiableStore> extends KeyIncludingStore, VALUE> { - - protected final CentralRegistry centralRegistry; +public class IdentifiableStore> extends KeyIncludingStore, VALUE> { // TODO: 09.01.2020 fk: Consider making these part of a class that is passed on creation instead so they are less loosely bound. @NonNull @@ -31,17 +28,60 @@ public abstract class IdentifiableStore> extends K protected ThrowingConsumer onRemove = (v) -> { }; - public IdentifiableStore(Store, VALUE> store, CentralRegistry centralRegistry) { + public IdentifiableStore(Store, VALUE> store) { super(store); - this.centralRegistry = centralRegistry; + } + + + @Override + protected Id extractKey(VALUE value) { + return (Id) value.getId(); + } + + @Override + protected void removed(VALUE value) { + try { + if (value == null) { + return; + } + + onRemove.accept(value); + } + catch (Exception e) { + throw new RuntimeException("Failed to remove " + value, e); + } } @Override - protected abstract Id extractKey(VALUE value); + protected void added(VALUE value) { + try { + if (value == null) { + return; + } - @Override - protected abstract void removed(VALUE value); + onAdd.accept(value); + } + catch (Exception e) { + throw new RuntimeException("Failed to add " + value, e); + } + } - @Override - protected abstract void added(VALUE value); + @Override + protected void updated(VALUE value) { + try { + if (value == null) { + return; + } + final VALUE old = store.get((Id) value.getId()); + + if (old != null) { + onRemove.accept(old); + } + + onAdd.accept(value); + } + catch (Exception e) { + throw new RuntimeException("Failed to add " + value, e); + } + } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java index d0cf222cef..a7e4285a53 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/MetaStorage.java @@ -1,6 +1,6 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; +import java.util.stream.Stream; import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.MutableInjectableValues; @@ -10,40 +10,42 @@ import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.*; +import com.bakdata.conquery.models.identifiable.IdResolvingException; +import com.bakdata.conquery.models.identifiable.ids.Id; +import com.bakdata.conquery.models.identifiable.ids.MetaId; +import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; +import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.RoleId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +/** + * Access to persisted entities that are not namespace/dataset crucial (see {@link NamespacedStorageImpl}). + * All entities are loaded through a cache. The cache can be configured through the StoreFactory. + */ @Slf4j @RequiredArgsConstructor public class MetaStorage extends ConqueryStorage implements Injectable { - @Getter - protected final CentralRegistry centralRegistry = new CentralRegistry(); private final StoreFactory storageFactory; - private IdentifiableStore executions; private IdentifiableStore formConfigs; private IdentifiableStore authUser; private IdentifiableStore authRole; private IdentifiableStore authGroup; - public void openStores(ObjectMapper mapper) { - authUser = storageFactory.createUserStore(centralRegistry, "meta", this, mapper); - authRole = storageFactory.createRoleStore(centralRegistry, "meta", this, mapper); - authGroup = storageFactory.createGroupStore(centralRegistry, "meta", this, mapper); - // Executions depend on users - executions = storageFactory.createExecutionsStore(centralRegistry, "meta", mapper); - formConfigs = storageFactory.createFormConfigStore(centralRegistry, "meta", mapper); - + public static MetaStorage get(DeserializationContext ctxt) throws JsonMappingException { + return (MetaStorage) ctxt + .findInjectableValue(MetaStorage.class.getName(), null, null); } @Override @@ -63,12 +65,21 @@ public ImmutableList getStores() { ); } - @Override - public void clear() { - super.clear(); - centralRegistry.clear(); + public void openStores(ObjectMapper mapper, MetricRegistry metricRegistry) { + if (mapper != null) { + this.injectInto(mapper); + } + authUser = storageFactory.createUserStore("meta", mapper); + authRole = storageFactory.createRoleStore("meta", mapper); + authGroup = storageFactory.createGroupStore("meta", mapper); + // Executions depend on users + executions = storageFactory.createExecutionsStore("meta", mapper); + formConfigs = storageFactory.createFormConfigStore("meta", mapper); + } + // Executions + public void addExecution(ManagedExecution query) { executions.add(query); } @@ -77,19 +88,21 @@ public ManagedExecution getExecution(ManagedExecutionId id) { return executions.get(id); } - public Collection getAllExecutions() { - return executions.getAll(); + public Stream getAllExecutions() { + return executions.getAllKeys().map(executions::get); } - public void updateExecution(ManagedExecution query) { + public synchronized void updateExecution(ManagedExecution query) { executions.update(query); } - public void removeExecution(ManagedExecutionId id) { + public synchronized void removeExecution(ManagedExecutionId id) { executions.remove(id); } - public void addGroup(Group group) { + // Groups + + public synchronized void addGroup(Group group) { log.info("Adding group = {}", group.getId()); authGroup.add(group); } @@ -100,8 +113,8 @@ public Group getGroup(GroupId groupId) { return group; } - public Collection getAllGroups() { - return authGroup.getAll(); + public Stream getAllGroups() { + return authGroup.getAllKeys().map(authGroup::get); } public void removeGroup(GroupId id) { @@ -109,12 +122,14 @@ public void removeGroup(GroupId id) { authGroup.remove(id); } - public void updateGroup(Group group) { + public synchronized void updateGroup(Group group) { log.info("Updating group = {}", group.getId()); authGroup.update(group); } - public void addUser(User user) { + // User + + public synchronized void addUser(User user) { log.info("Adding user = {}", user.getId()); authUser.add(user); } @@ -125,21 +140,23 @@ public User getUser(UserId userId) { return user; } - public Collection getAllUsers() { - return authUser.getAll(); + public Stream getAllUsers() { + return authUser.getAllKeys().map(authUser::get); } - public void removeUser(UserId userId) { + public synchronized void removeUser(UserId userId) { log.info("Removing user = {}", userId); authUser.remove(userId); } - public void updateUser(User user) { + public synchronized void updateUser(User user) { log.info("Updating user = {}", user.getId()); authUser.update(user); } - public void addRole(Role role) { + // Roles + + public synchronized void addRole(Role role) { authRole.add(role); } @@ -149,49 +166,72 @@ public Role getRole(RoleId roleId) { return role; } - public Collection getAllRoles() { - return authRole.getAll(); + public Stream getAllRoles() { + return authRole.getAllKeys().map(authRole::get); } - public void removeRole(RoleId roleId) { + public synchronized void removeRole(RoleId roleId) { log.info("Removing role = {}", roleId); authRole.remove(roleId); } - public void updateRole(Role role) { + public synchronized void updateRole(Role role) { log.info("Updating role = {}", role.getId()); authRole.update(role); } + // FormConfigs + public FormConfig getFormConfig(FormConfigId id) { return formConfigs.get(id); } - public Collection getAllFormConfigs() { - return formConfigs.getAll(); + public Stream getAllFormConfigs() { + return formConfigs.getAllKeys().map(formConfigs::get); } - public void removeFormConfig(FormConfigId id) { + public synchronized void removeFormConfig(FormConfigId id) { formConfigs.remove(id); } @SneakyThrows - public void updateFormConfig(FormConfig formConfig) { + public synchronized void updateFormConfig(FormConfig formConfig) { formConfigs.update(formConfig); } @SneakyThrows - public void addFormConfig(FormConfig formConfig) { + public synchronized void addFormConfig(FormConfig formConfig) { formConfigs.add(formConfig); } + // Utility @Override public MutableInjectableValues inject(MutableInjectableValues values) { return values.add(MetaStorage.class, this); } - public static MetaStorage get(DeserializationContext ctxt) throws JsonMappingException { - return (MetaStorage) ctxt.findInjectableValue(MetaStorage.class.getName(), null, null); + /** + * Almost identical to {@link MetaStorage#get(Id)}, but throws an IdResolvingException if no object could be resolved. + * @return the object or throws an {@link IdResolvingException} if the Object could not be resolved. + */ + public & MetaId, VALUE> VALUE resolve(ID id) { + try { + VALUE o = get(id); + if (o == null) { + throw new IdResolvingException(id); + } + return o; + } + catch (IdResolvingException e) { + throw e; + } + catch (Exception e) { + throw new IdResolvingException(id, e); + } + } + + public & MetaId, VALUE> VALUE get(ID id) { + return (VALUE) id.get(this); } } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java index 4cd1c80f4a..a761782331 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/ModificationShieldedWorkerStorage.java @@ -1,74 +1,135 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; - +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.bakdata.conquery.models.worker.WorkerInformation; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; import lombok.RequiredArgsConstructor; import lombok.ToString; +import lombok.experimental.Delegate; /** * Provides a view on the storage that does not allow modification of the storage (update, delete). */ @RequiredArgsConstructor @ToString(of = "delegate") -public class ModificationShieldedWorkerStorage { +public class ModificationShieldedWorkerStorage implements WorkerStorage, Injectable { + @Delegate private final WorkerStorage delegate; - public CentralRegistry getCentralRegistry() { - return delegate.getCentralRegistry(); + @Override + public void addCBlock(CBlock cBlock) { + throw new UnsupportedOperationException(); } + @Override + public void removeCBlock(CBlockId id) { + throw new UnsupportedOperationException(); + } - public Import getImport(ImportId id) { - return delegate.getImport(id); + @Override + public void addBucket(Bucket bucket) { + throw new UnsupportedOperationException(); } - public Collection getAllImports() { - return delegate.getAllImports(); + @Override + public void removeBucket(BucketId id) { + throw new UnsupportedOperationException(); } + @Override + public void setWorker(WorkerInformation worker) { + throw new UnsupportedOperationException(); + } + @Override + public void updateWorker(WorkerInformation worker) { + throw new UnsupportedOperationException(); + } - public Dataset getDataset() { - return delegate.getDataset(); + @Override + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + throw new UnsupportedOperationException(); } + @Override + public void removeStorage() { + throw new UnsupportedOperationException(); + } - public Collection> getAllConcepts() { - return delegate.getAllConcepts(); + @Override + public void addImport(Import imp) { + throw new UnsupportedOperationException(); } + @Override + public void updateImport(Import imp) { + throw new UnsupportedOperationException(); + } - public Bucket getBucket(BucketId id) { - return delegate.getBucket(id); + @Override + public void removeImport(ImportId id) { + throw new UnsupportedOperationException(); } + @Override + public void updateDataset(Dataset dataset) { + throw new UnsupportedOperationException(); + } - public Collection getAllBuckets() { - return delegate.getAllBuckets(); + @Override + public void addTable(Table table) { + throw new UnsupportedOperationException(); } + @Override + public void removeTable(TableId table) { + throw new UnsupportedOperationException(); + } - public Collection getAllCBlocks() { - return delegate.getAllCBlocks(); + @Override + public void addSecondaryId(SecondaryIdDescription secondaryIdDescription) { + throw new UnsupportedOperationException(); } - public Table getTable(TableId tableId){ - return delegate.getTable(tableId); + @Override + public void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId) { + throw new UnsupportedOperationException(); } - public Concept getConcept(ConceptId conceptId) { - return delegate.getConcept(conceptId); + @Override + public void updateConcept(Concept concept) { + throw new UnsupportedOperationException(); } + + @Override + public void removeConcept(ConceptId id) { + throw new UnsupportedOperationException(); + } + + @Override + public void close() { + throw new UnsupportedOperationException(); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(WorkerStorageImpl.class, this); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java index f22cc675e9..7a1b52173a 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespaceStorage.java @@ -1,11 +1,9 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; import java.util.Objects; +import java.util.stream.Stream; -import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.datasets.PreviewConfig; @@ -16,14 +14,13 @@ import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; import com.bakdata.conquery.models.worker.WorkerToBucketsMap; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import lombok.ToString; import lombok.extern.slf4j.Slf4j; @Slf4j -@ToString -public class NamespaceStorage extends NamespacedStorage implements Injectable { +public class NamespaceStorage extends NamespacedStorageImpl { protected IdentifiableStore internToExternMappers; protected IdentifiableStore searchIndexes; @@ -32,7 +29,7 @@ public class NamespaceStorage extends NamespacedStorage implements Injectable { protected SingletonStore preview; protected SingletonStore workerToBuckets; - protected CachedStore entity2Bucket; + protected Store entity2Bucket; public NamespaceStorage(StoreFactory storageFactory, String pathName) { super(storageFactory, pathName); @@ -44,18 +41,24 @@ private void decorateIdMapping(SingletonStore idMapping) { .onAdd(mapping -> mapping.setStorage(this)); } + private void decorateInternToExternMappingStore(IdentifiableStore store) { + // We don't call internToExternMapper::init this is done by the first select that needs the mapping + } + + @Override - public void openStores(ObjectMapper objectMapper) { - super.openStores(objectMapper); + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + super.openStores(objectMapper, metricRegistry); - internToExternMappers = getStorageFactory().createInternToExternMappingStore(super.getPathName(), getCentralRegistry(), objectMapper); - searchIndexes = getStorageFactory().createSearchIndexStore(super.getPathName(), getCentralRegistry(), objectMapper); + internToExternMappers = getStorageFactory().createInternToExternMappingStore(super.getPathName(), objectMapper); + searchIndexes = getStorageFactory().createSearchIndexStore(super.getPathName(), objectMapper); idMapping = getStorageFactory().createIdMappingStore(super.getPathName(), objectMapper); - structure = getStorageFactory().createStructureStore(super.getPathName(), getCentralRegistry(), objectMapper); + structure = getStorageFactory().createStructureStore(super.getPathName(), objectMapper); workerToBuckets = getStorageFactory().createWorkerToBucketsStore(super.getPathName(), objectMapper); - preview = getStorageFactory().createPreviewStore(super.getPathName(), getCentralRegistry(), objectMapper); + preview = getStorageFactory().createPreviewStore(super.getPathName(), objectMapper); entity2Bucket = getStorageFactory().createEntity2BucketStore(super.getPathName(), objectMapper); + decorateInternToExternMappingStore(internToExternMappers); decorateIdMapping(idMapping); } @@ -83,17 +86,18 @@ public ImmutableList getStores() { } - + // IdMapping public EntityIdMap getIdMapping() { return idMapping.get(); } - public void updateIdMapping(EntityIdMap idMapping) { this.idMapping.update(idMapping); } + // Bucket to Worker Assignment + public void setWorkerToBucketsMap(WorkerToBucketsMap map) { workerToBuckets.update(map); } @@ -115,6 +119,7 @@ public void registerEntity(String entity, int bucket) { entity2Bucket.update(entity, bucket); } + // Structure public StructureNode[] getStructure() { return Objects.requireNonNullElseGet(structure.get(), () -> new StructureNode[0]); @@ -124,7 +129,13 @@ public void updateStructure(StructureNode[] structure) { this.structure.update(structure); } + // InternToExternMappers + public InternToExternMapper getInternToExternMapper(InternToExternMapperId id) { + return getInternToExternMapperFromStorage(id); + } + + private InternToExternMapper getInternToExternMapperFromStorage(InternToExternMapperId id) { return internToExternMappers.get(id); } @@ -136,26 +147,34 @@ public void removeInternToExternMapper(InternToExternMapperId id) { internToExternMappers.remove(id); } - public Collection getInternToExternMappers() { + public Stream getInternToExternMappers() { return internToExternMappers.getAll(); } - public void removeSearchIndex(SearchIndexId id) { - searchIndexes.remove(id); - } + // SearchIndices public SearchIndex getSearchIndex(SearchIndexId id) { + return getSearchIndexFromStorage(id); + } + + private SearchIndex getSearchIndexFromStorage(SearchIndexId id) { return searchIndexes.get(id); } + public void removeSearchIndex(SearchIndexId id) { + searchIndexes.remove(id); + } + public void addSearchIndex(SearchIndex searchIndex) { searchIndexes.add(searchIndex); } - public Collection getSearchIndices() { + public Stream getSearchIndices() { return searchIndexes.getAll(); } + // PreviewConfig + public void setPreviewConfig(PreviewConfig previewConfig){ preview.update(previewConfig); } @@ -168,6 +187,7 @@ public void removePreviewConfig() { preview.remove(); } + // Utilities @Override public MutableInjectableValues inject(MutableInjectableValues values) { diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java index 25baf5df05..e934f53621 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorage.java @@ -1,231 +1,68 @@ -package com.bakdata.conquery.io.storage; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import com.bakdata.conquery.io.jackson.Injectable; -import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; -import com.bakdata.conquery.models.config.StoreFactory; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Import; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.Connector; -import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; -import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; -import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; -import com.bakdata.conquery.models.identifiable.ids.specific.TableId; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; -import lombok.Getter; -import lombok.SneakyThrows; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; - -/** - * Overlapping storage structure for {@link WorkerStorage} and {@link NamespaceStorage}. - * The reason for the overlap ist primarily that all this stored members are necessary in the - * SerDes communication between the manager and the shards/worker for the resolving of ids included in - * messages (see also {@link com.bakdata.conquery.io.jackson.serializer.NsIdRef}). - */ -@Slf4j -@ToString(onlyExplicitlyIncluded = true) -public abstract class NamespacedStorage extends ConqueryStorage implements Injectable { - - @Getter - protected final CentralRegistry centralRegistry = new CentralRegistry(); - @Getter - @ToString.Include - private final String pathName; - @Getter - private final StoreFactory storageFactory; - - protected SingletonStore dataset; - protected IdentifiableStore secondaryIds; - protected IdentifiableStore

tables; - protected IdentifiableStore imports; - protected IdentifiableStore> concepts; - - public NamespacedStorage(StoreFactory storageFactory, String pathName) { - this.pathName = pathName; - this.storageFactory = storageFactory; - } - - public void openStores(ObjectMapper objectMapper) { - // Before we start to parse the stores we need to replace the injected value for the IdResolveContext (from DatasetRegistry to this centralRegistry) - new SingletonNamespaceCollection(centralRegistry).injectInto(objectMapper); - this.injectInto(objectMapper); - - dataset = storageFactory.createDatasetStore(pathName, objectMapper); - secondaryIds = storageFactory.createSecondaryIdDescriptionStore(centralRegistry, pathName, objectMapper); - tables = storageFactory.createTableStore(centralRegistry, pathName, objectMapper); - imports = storageFactory.createImportStore(centralRegistry, pathName, objectMapper); - concepts = storageFactory.createConceptStore(centralRegistry, pathName, objectMapper); - - decorateDatasetStore(dataset); - decorateTableStore(tables); - decorateConceptStore(concepts); - } - - @Override - public ImmutableList getStores() { - return ImmutableList.of(dataset, secondaryIds, tables, imports, concepts); - } - - @Override - public void clear() { - super.clear(); - centralRegistry.clear(); - } - - private void decorateDatasetStore(SingletonStore store) { - store.onAdd(centralRegistry::register).onRemove(centralRegistry::remove); - } - - private void decorateTableStore(IdentifiableStore
store) { - store.onAdd(table -> { - for (Column column : table.getColumns()) { - column.init(); - getCentralRegistry().register(column); - } - }) - .onRemove(table -> { - for (Column c : table.getColumns()) { - getCentralRegistry().remove(c); - } - }); - } - - private void decorateConceptStore(IdentifiableStore> store) { - store.onAdd(concept -> { - - if (concept.getDataset() == null) { - throw new IllegalStateException("Concept had no dataset set"); - } - - if (!concept.getDataset().equals(dataset.get())) { - throw new IllegalStateException("Concept is not for this dataset."); - } - - concept.getSelects().forEach(centralRegistry::register); - for (Connector connector : concept.getConnectors()) { - centralRegistry.register(connector); - connector.collectAllFilters().forEach(centralRegistry::register); - connector.getSelects().forEach(centralRegistry::register); - connector.getValidityDates().forEach(centralRegistry::register); - } - - - if (concept instanceof TreeConcept) { - ((TreeConcept) concept).getAllChildren().forEach(centralRegistry::register); - } - }).onRemove(concept -> { - concept.getSelects().forEach(centralRegistry::remove); - //see #146 remove from Dataset.concepts - for (Connector connector : concept.getConnectors()) { - connector.getSelects().forEach(centralRegistry::remove); - connector.collectAllFilters().forEach(centralRegistry::remove); - connector.getValidityDates().forEach(centralRegistry::remove); - centralRegistry.remove(connector); - } - - if (concept instanceof TreeConcept) { - ((TreeConcept) concept).getAllChildren().forEach(centralRegistry::remove); - } - }); - } - - public void addImport(Import imp) { - imports.add(imp); - } - - public Import getImport(ImportId id) { - return imports.get(id); - } - - public Collection getAllImports() { - return imports.getAll(); - } - - public void updateImport(Import imp) { - imports.update(imp); - } - - public void removeImport(ImportId id) { - imports.remove(id); - } - - public Dataset getDataset() { - return dataset.get(); - } - - public void updateDataset(Dataset dataset) { - this.dataset.update(dataset); - } - - public List
getTables() { - return new ArrayList<>(tables.getAll()); - } - - public Table getTable(TableId tableId) { - return tables.get(tableId); - } - - public void addTable(Table table) { - tables.add(table); - } - - public void removeTable(TableId table) { - tables.remove(table); - } - - public List getSecondaryIds() { - return new ArrayList<>(secondaryIds.getAll()); - } - - public SecondaryIdDescription getSecondaryId(SecondaryIdDescriptionId descriptionId) { - return secondaryIds.get(descriptionId); - } - - public void addSecondaryId(SecondaryIdDescription secondaryIdDescription) { - secondaryIds.add(secondaryIdDescription); - } - - public void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId) { - secondaryIds.remove(secondaryIdDescriptionId); - } - - public Concept getConcept(ConceptId id) { - return concepts.get(id); - } - - public boolean hasConcept(ConceptId id) { - return concepts.get(id) != null; - } - - @SneakyThrows - public void updateConcept(Concept concept) { - concepts.update(concept); - } - - public void removeConcept(ConceptId id) { - concepts.remove(id); - } - - public Collection> getAllConcepts() { - return concepts.getAll(); - } - - - @Override - public MutableInjectableValues inject(MutableInjectableValues values) { - return values.add(NamespacedStorage.class, this); - } -} +package com.bakdata.conquery.io.storage; + +import java.util.stream.Stream; + +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import lombok.SneakyThrows; + +public interface NamespacedStorage extends NamespacedStorageProvider { + void addImport(Import imp); + + Import getImport(ImportId id); + + Stream getAllImports(); + + void updateImport(Import imp); + + void removeImport(ImportId id); + + void updateDataset(Dataset dataset); + + Table getTable(TableId tableId); + + Stream
getTables(); + + void addTable(Table table); + + void removeTable(TableId table); + + SecondaryIdDescription getSecondaryId(SecondaryIdDescriptionId descriptionId); + + Stream getSecondaryIds(); + + void addSecondaryId(SecondaryIdDescription secondaryIdDescription); + + void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId); + + Concept getConcept(ConceptId id); + + Stream> getAllConcepts(); + + boolean hasConcept(ConceptId id); + + @SneakyThrows + void updateConcept(Concept concept); + + void removeConcept(ConceptId id); + + @Override + default NamespacedStorage getStorage(DatasetId datasetId) { + if (getDataset() == null || datasetId.getName().equals(getDataset().getName())) { + // Storage was empty (new Worker/Namespace) or it matches + return this; + } + return null; + } + + Dataset getDataset(); +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorageImpl.java b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorageImpl.java new file mode 100644 index 0000000000..0e0555a94c --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/NamespacedStorageImpl.java @@ -0,0 +1,249 @@ +package com.bakdata.conquery.io.storage; + +import java.util.stream.Stream; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; +import com.bakdata.conquery.models.config.StoreFactory; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; +import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import lombok.Getter; +import lombok.SneakyThrows; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; + +/** + * Overlapping storage structure for {@link WorkerStorageImpl} and {@link NamespaceStorage}. + * The reason for the overlap ist primarily that all this stored members are necessary in the + * SerDes communication between the manager and the shards/worker for the resolving of ids included in + * messages. + */ +@Slf4j +@ToString(onlyExplicitlyIncluded = true) +public abstract class NamespacedStorageImpl extends ConqueryStorage implements Injectable, NamespacedStorage { + + @Getter + @ToString.Include + private final String pathName; + @Getter + private final StoreFactory storageFactory; + + protected SingletonStore dataset; + protected IdentifiableStore secondaryIds; + protected IdentifiableStore
tables; + protected IdentifiableStore imports; + protected IdentifiableStore> concepts; + + public NamespacedStorageImpl(StoreFactory storageFactory, String pathName) { + this.pathName = pathName; + this.storageFactory = storageFactory; + } + + @Override + public ImmutableList getStores() { + return ImmutableList.of(dataset, secondaryIds, tables, imports, concepts); + } + + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + if (objectMapper != null) { + injectInto(objectMapper); + } + + dataset = storageFactory.createDatasetStore(pathName, objectMapper); + secondaryIds = storageFactory.createSecondaryIdDescriptionStore(pathName, objectMapper); + tables = storageFactory.createTableStore(pathName, objectMapper); + imports = storageFactory.createImportStore(pathName, objectMapper); + concepts = storageFactory.createConceptStore(pathName, objectMapper); + + decorateDatasetStore(dataset); + decorateSecondaryIdDescriptionStore(secondaryIds); + decorateTableStore(tables); + decorateImportStore(imports); + decorateConceptStore(concepts); + } + + private void decorateDatasetStore(SingletonStore store) { + } + + private void decorateSecondaryIdDescriptionStore(IdentifiableStore store) { + // Nothing to decorate + } + + private void decorateTableStore(IdentifiableStore
store) { + + } + + private void decorateImportStore(IdentifiableStore store) { + // Intentionally left blank + } + + private void decorateConceptStore(IdentifiableStore> store) { + store.onAdd(concept -> { + + if (concept.getDataset() != null && !concept.getDataset().equals(dataset.get().getId())) { + throw new IllegalStateException("Concept is not for this dataset."); + } + + concept.setDataset(dataset.get().getId()); + + }); + } + + // Imports + + @Override + public void addImport(Import imp) { + imports.add(imp); + } + + @Override + public Import getImport(ImportId id) { + return getImportFromStorage(id); + } + + private Import getImportFromStorage(ImportId id) { + return imports.get(id); + } + + @Override + public Stream getAllImports() { + return imports.getAll(); + } + + @Override + public void updateImport(Import imp) { + imports.update(imp); + } + + @Override + public void removeImport(ImportId id) { + imports.remove(id); + } + + // Datasets + + @Override + public void updateDataset(Dataset dataset) { + this.dataset.update(dataset); + } + public & NamespacedId, VALUE> VALUE get(ID id) { + return (VALUE) id.get(this); + } +@Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(NamespacedStorageProvider.class, this). + add(NamespacedStorage.class, this); + }@Override + public Dataset getDataset() { + return dataset.get(); + } + + + + // Tables + + @Override + public Table getTable(TableId tableId) { + return getTableFromStorage(tableId); + } + + private Table getTableFromStorage(TableId tableId) { + return tables.get(tableId); + } + + @Override + public Stream
getTables() { + return tables.getAllKeys().map(TableId.class::cast).map(this::getTable); + } + + + @Override + public void addTable(Table table) { + tables.add(table); + } + + @Override + public void removeTable(TableId table) { + tables.remove(table); + } + + // SecondaryId + + @Override + public SecondaryIdDescription getSecondaryId(SecondaryIdDescriptionId descriptionId) { + return getSecondaryIdFromStorage(descriptionId); + } + + private SecondaryIdDescription getSecondaryIdFromStorage(SecondaryIdDescriptionId descriptionId) { + return secondaryIds.get(descriptionId); + } + + @Override + public Stream getSecondaryIds() { + return secondaryIds.getAllKeys().map(SecondaryIdDescriptionId.class::cast).map(this::getSecondaryId); + } + + @Override + public void addSecondaryId(SecondaryIdDescription secondaryIdDescription) { + secondaryIds.add(secondaryIdDescription); + } + + @Override + public void removeSecondaryId(SecondaryIdDescriptionId secondaryIdDescriptionId) { + secondaryIds.remove(secondaryIdDescriptionId); + } + + // Concepts + + @Override + public Concept getConcept(ConceptId id) { + return getConceptFromStorage(id); + } + + private Concept getConceptFromStorage(ConceptId id) { + return concepts.get(id); + } + + @Override + public Stream> getAllConcepts() { + return concepts.getAllKeys().map(ConceptId.class::cast).map(this::getConcept); + } + + @Override + public boolean hasConcept(ConceptId id) { + return concepts.get(id) != null; + } + + @Override + @SneakyThrows + public void updateConcept(Concept concept) { + log.debug("Updating Concept[{}]", concept.getId()); + concepts.update(concept); + } + + @Override + public void removeConcept(ConceptId id) { + log.debug("Removing Concept[{}]", id); + concepts.remove(id); + } + + // Utility + + + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java b/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java index 51e92011a6..0458b2f8d5 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/Store.java @@ -1,6 +1,6 @@ package com.bakdata.conquery.io.storage; -import java.util.Collection; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore.IterationStatistic; @@ -14,17 +14,17 @@ public interface Store extends ManagedStore { // TODO: 08.01.2020 fk: Is this still necessary? The implementation in XodusStore uses different methods that in our context don't act differently. public void update(KEY key, VALUE value); - + public void remove(KEY key); public int count(); - public Collection getAll(); + public Stream getAll(); - public Collection getAllKeys(); + public Stream getAllKeys(); - /** + /** * Consumer of key-value pairs stored in this Store. Used in conjunction with for-each. */ @FunctionalInterface diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java b/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java index ef3483f264..f025319b58 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/StoreMappings.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.io.storage; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.io.storage.xodus.stores.StoreInfo; import com.bakdata.conquery.models.auth.entities.Group; @@ -17,7 +16,6 @@ import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; @@ -50,7 +48,7 @@ */ @RequiredArgsConstructor @Getter -@ToString(of = {"name", "keyType", "valueType"}) +@ToString(of = {"keyType", "valueType"}) public enum StoreMappings { AUTH_GROUP(Group.class, GroupId.class), @@ -79,24 +77,10 @@ public enum StoreMappings { private final Class keyType; /** - * Store for identifiable values, with injectors. Store is also cached. + * Store for identifiable values, with injectors. */ - public static > DirectIdentifiableStore identifiable(Store, T> baseStore, CentralRegistry centralRegistry) { - return new DirectIdentifiableStore<>(centralRegistry, baseStore); - } - - /** - * General Key-Value store with caching. - */ - public static CachedStore cached(Store baseStore) { - return new CachedStore<>(baseStore); - } - - /** - * Identifiable store, that lazy registers items in the central registry. - */ - public static > IdentifiableCachedStore identifiableCachedStore(Store, T> baseStore, CentralRegistry centralRegistry) { - return new IdentifiableCachedStore(centralRegistry, baseStore); + public static > IdentifiableStore identifiable(Store, T> baseStore) { + return new IdentifiableStore<>(baseStore); } /** diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java index ac51d97c4b..128a708973 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorage.java @@ -1,121 +1,45 @@ -package com.bakdata.conquery.io.storage; - -import java.util.Collection; - -import com.bakdata.conquery.io.jackson.Injectable; -import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; -import com.bakdata.conquery.models.config.StoreFactory; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.events.Bucket; -import com.bakdata.conquery.models.events.CBlock; -import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; -import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; -import com.bakdata.conquery.models.worker.WorkerInformation; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -@ToString(of = "worker") -public class WorkerStorage extends NamespacedStorage implements Injectable { - - private SingletonStore worker; - private IdentifiableStore buckets; - private IdentifiableStore cBlocks; - - public WorkerStorage(StoreFactory storageFactory, String pathName) { - super(storageFactory, pathName); - } - - @Override - public void openStores(ObjectMapper objectMapper) { - super.openStores(objectMapper); - - worker = getStorageFactory().createWorkerInformationStore(getPathName(), objectMapper); - buckets = getStorageFactory().createBucketStore(centralRegistry, getPathName(), objectMapper); - cBlocks = getStorageFactory().createCBlockStore(centralRegistry, getPathName(), objectMapper); - } - - @Override - public ImmutableList getStores() { - return ImmutableList.of( - dataset, - secondaryIds, - tables, - imports, - concepts, - - worker, - buckets, - cBlocks - ); - } - - - public void addCBlock(CBlock cBlock) { - log.trace("Adding CBlock[{}]", cBlock.getId()); - cBlocks.add(cBlock); - } - - public CBlock getCBlock(CBlockId id) { - return cBlocks.get(id); - } - - public void removeCBlock(CBlockId id) { - log.trace("Removing CBlock[{}]", id); - cBlocks.remove(id); - } - - public Collection getAllCBlocks() { - return cBlocks.getAll(); - } - - public void addBucket(Bucket bucket) { - log.trace("Adding Bucket[{}]", bucket.getId()); - buckets.add(bucket); - } - - public Bucket getBucket(BucketId id) { - return buckets.get(id); - } - - public void removeBucket(BucketId id) { - log.trace("Removing Bucket[{}]", id); - buckets.remove(id); - } - - public Collection getAllBuckets() { - return buckets.getAll(); - } - - public WorkerInformation getWorker() { - return worker.get(); - } - - public void setWorker(WorkerInformation worker) { - this.worker.add(worker); - } - - public void updateWorker(WorkerInformation worker) { - this.worker.update(worker); - } - - //block manager overrides - public void updateConcept(Concept concept) { - log.debug("Updating Concept[{}]", concept.getId()); - concepts.update(concept); - } - - public void removeConcept(ConceptId id) { - log.debug("Removing Concept[{}]", id); - concepts.remove(id); - } - - @Override - public MutableInjectableValues inject(MutableInjectableValues values) { - return super.inject(values).add(WorkerStorage.class, this); - } -} +package com.bakdata.conquery.io.storage; + +import java.io.Closeable; +import java.util.stream.Stream; + +import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.worker.WorkerInformation; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; + +public interface WorkerStorage extends NamespacedStorage, Closeable { + void addCBlock(CBlock cBlock); + + CBlock getCBlock(CBlockId id); + + void removeCBlock(CBlockId id); + + Stream getAllCBlocks(); + + Stream getAllCBlockIds(); + + void addBucket(Bucket bucket); + + Bucket getBucket(BucketId id); + + void removeBucket(BucketId id); + + Stream getAllBuckets(); + + Stream getAllBucketIds(); + + WorkerInformation getWorker(); + + void setWorker(WorkerInformation worker); + + void updateWorker(WorkerInformation worker); + + + void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry); + void loadData(); + void removeStorage(); +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorageImpl.java b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorageImpl.java new file mode 100644 index 0000000000..118ed982eb --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/WorkerStorageImpl.java @@ -0,0 +1,153 @@ +package com.bakdata.conquery.io.storage; + +import java.util.stream.Stream; +import jakarta.validation.Validator; + +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; +import com.bakdata.conquery.models.config.StoreFactory; +import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.worker.WorkerInformation; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import lombok.ToString; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@ToString(of = "worker") +public class WorkerStorageImpl extends NamespacedStorageImpl implements WorkerStorage { + + private SingletonStore worker; + private IdentifiableStore buckets; + private IdentifiableStore cBlocks; + + public WorkerStorageImpl(StoreFactory storageFactory, Validator validator, String pathName) { + super(storageFactory, pathName); + } + + @Override + public ImmutableList getStores() { + return ImmutableList.of( + dataset, + secondaryIds, + tables, + imports, + concepts, + + worker, + buckets, + cBlocks + ); + } + + @Override + public void openStores(ObjectMapper objectMapper, MetricRegistry metricRegistry) { + super.openStores(objectMapper, metricRegistry); + + worker = getStorageFactory().createWorkerInformationStore(getPathName(), objectMapper); + buckets = getStorageFactory().createBucketStore(getPathName(), objectMapper); + cBlocks = getStorageFactory().createCBlockStore(getPathName(), objectMapper); + + decorateWorkerStore(worker); + decorateBucketStore(buckets); + decorateCBlockStore(cBlocks); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return super.inject(values).add(WorkerStorage.class, this); + } + + private void decorateWorkerStore(SingletonStore store) { + // Nothing to decorate + } + + private void decorateBucketStore(IdentifiableStore store) { + // Nothing to decorate + } + + // CBlocks + + private void decorateCBlockStore(IdentifiableStore baseStoreCreator) { + // Nothing to decorate + } + + @Override + public void addCBlock(CBlock cBlock) { + log.trace("Adding CBlock[{}]", cBlock.getId()); + cBlocks.add(cBlock); + } @Override + public CBlock getCBlock(CBlockId id) { + return cBlocks.get(id); + } + + @Override + public void removeCBlock(CBlockId id) { + log.trace("Removing CBlock[{}]", id); + cBlocks.remove(id); + } + + @Override + public Stream getAllCBlocks() { + return cBlocks.getAllKeys().map(CBlockId.class::cast).map(this::getCBlock); + } + + @Override + public Stream getAllCBlockIds() { + return cBlocks.getAllKeys().map(CBlockId.class::cast); + } + + // Buckets + + @Override + public void addBucket(Bucket bucket) { + log.trace("Adding Bucket[{}]", bucket.getId()); + buckets.add(bucket); + } + + @Override + public Bucket getBucket(BucketId id) { + return buckets.get(id); + } + + @Override + public void removeBucket(BucketId id) { + log.trace("Removing Bucket[{}]", id); + buckets.remove(id); + } + + @Override + public Stream getAllBuckets() { + return buckets.getAllKeys().map(BucketId.class::cast).map(this::getBucket); + } + + @Override + public Stream getAllBucketIds() { + return buckets.getAllKeys().map(BucketId.class::cast); + } + + // Worker + + @Override + public WorkerInformation getWorker() { + return worker.get(); + } + + @Override + public void setWorker(WorkerInformation worker) { + this.worker.add(worker); + } + + @Override + public void updateWorker(WorkerInformation worker) { + this.worker.update(worker); + } + + // Utilities + + +} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java index 96e2ca06f2..9097885ec4 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/BigStore.java @@ -9,15 +9,15 @@ import java.io.SequenceInputStream; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.UUID; -import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.stream.Stream; +import jakarta.validation.Validator; +import jakarta.validation.constraints.NotEmpty; import com.bakdata.conquery.io.mina.ChunkingOutputStream; import com.bakdata.conquery.io.storage.Store; @@ -28,8 +28,6 @@ import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.primitives.Ints; -import jakarta.validation.Validator; -import jakarta.validation.constraints.NotEmpty; import jetbrains.exodus.env.Environment; import lombok.Getter; import lombok.RequiredArgsConstructor; @@ -153,15 +151,13 @@ public int count() { } @Override - public Collection getAll() { + public Stream getAll() { throw new UnsupportedOperationException(); } @Override - public Collection getAllKeys() { - Collection out = new ConcurrentLinkedQueue<>(); // has to be concurrent because forEach is concurrent. - metaStore.forEach((key, value, size) -> out.add(key)); - return out; + public Stream getAllKeys() { + return metaStore.getAllKeys(); } private BigStoreMetaKeys writeValue(VALUE value) { diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java index bf6588683f..043058bfaf 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/CachedStore.java @@ -1,9 +1,9 @@ package com.bakdata.conquery.io.storage.xodus.stores; import java.io.IOException; -import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.LongAdder; +import java.util.stream.Stream; import com.bakdata.conquery.io.jackson.serializer.IdReferenceResolvingException; import com.bakdata.conquery.io.storage.Store; @@ -115,8 +115,8 @@ public void loadData() { } @Override - public Collection getAll() { - return cache.values(); + public Stream getAll() { + return cache.values().stream(); } @Override @@ -125,8 +125,8 @@ public String toString() { } @Override - public Collection getAllKeys() { - return cache.keySet(); + public Stream getAllKeys() { + return cache.keySet().stream(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java index d89a3c5f87..e04b05acac 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/KeyIncludingStore.java @@ -2,9 +2,7 @@ import java.io.Closeable; import java.io.IOException; -import java.util.Collection; - -import com.bakdata.conquery.io.storage.Store; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.ManagedStore; import com.bakdata.conquery.io.storage.Store; @@ -12,18 +10,18 @@ public abstract class KeyIncludingStore implements Closeable, ManagedStore { protected final Store store; - + public KeyIncludingStore(Store store) { this.store = store; } - + protected abstract KEY extractKey(VALUE value); - + public void add(VALUE value) { store.add(extractKey(value), value); added(value); } - + public VALUE get(KEY key) { return store.get(key); } @@ -34,34 +32,33 @@ public void update(VALUE value) { updated(value); store.update(extractKey(value), value); } - + public void remove(KEY key) { VALUE old = get(key); store.remove(key); if(old != null) removed(old); } - + public void loadData() { store.loadData(); - for(VALUE value : getAll()) { - added(value); - } + getAll().forEach(this::added); } - - public Collection getAll() { - return store.getAll(); + + public Stream getAll() { + return store.getAllKeys() + .map(store::get); } - - public Collection getAllKeys() { + + public Stream getAllKeys() { return store.getAllKeys(); } - + @Override public String toString() { return store.toString(); } - + protected abstract void removed(VALUE value); protected abstract void added(VALUE value); diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java index 840a154ac7..94d04e5e3d 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStore.java @@ -10,7 +10,6 @@ import java.nio.file.Files; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -25,8 +24,10 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; +import java.util.stream.Stream; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; +import jakarta.validation.Validator; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.JacksonUtil; @@ -43,7 +44,6 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; -import jakarta.validation.Validator; import jetbrains.exodus.ArrayByteIterable; import jetbrains.exodus.ByteIterable; import lombok.Data; @@ -515,13 +515,13 @@ public int count() { } @Override - public Collection getAll() { - throw new UnsupportedOperationException(); + public Stream getAll() { + return store.getAllKeys().stream().map(store::get).map(this::readValue); } @Override - public Collection getAllKeys() { - throw new UnsupportedOperationException(); + public Stream getAllKeys() { + return store.getAllKeys().stream().map(this::readKey); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/WeakCachedStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/WeakCachedStore.java deleted file mode 100644 index 6a66f56d93..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/WeakCachedStore.java +++ /dev/null @@ -1,126 +0,0 @@ -package com.bakdata.conquery.io.storage.xodus.stores; - -import java.io.IOException; -import java.util.Collection; -import java.util.Optional; -import java.util.concurrent.ExecutionException; - -import com.bakdata.conquery.io.storage.Store; -import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore.IterationStatistic; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import io.dropwizard.util.Duration; -import lombok.extern.slf4j.Slf4j; - -/** - * Weakly cached store, using {@link LoadingCache} to maintain values. Is a wrapper around the supplied {@link Store}. - */ -@Slf4j -public class WeakCachedStore implements Store { - - private final LoadingCache> cache; - - private final Store store; - - public WeakCachedStore(Store store, Duration weakCacheDuration) { - this.store = store; - this.cache = CacheBuilder.newBuilder() - .weakValues() - .expireAfterAccess( - weakCacheDuration.getQuantity(), - weakCacheDuration.getUnit() - ) - .build(new CacheLoader>() { - @Override - public Optional load(KEY key) throws Exception { - log.trace("Needing to load entry "+key+" in "+this); - return Optional.ofNullable(store.get(key)); - } - }); - } - - - @Override - public void add(KEY key, VALUE value) { - try { - Optional old = cache.get(key); - if(old.isPresent()) { - throw new IllegalStateException("The id "+key+" is already part of this store"); - } - cache.put(key, Optional.of(value)); - store.add(key, value); - } - catch(ExecutionException e) { - throw new RuntimeException("Failed to load entry for key "+key, e); - } - } - - @Override - public VALUE get(KEY key) { - try { - return cache.get(key).orElse(null); - } - catch (ExecutionException e) { - throw new RuntimeException("Failed to load entry for key "+key, e); - } - } - - @Override - public IterationStatistic forEach(StoreEntryConsumer consumer) { - throw new UnsupportedOperationException(); - } - - @Override - public void update(KEY key, VALUE value) { - cache.put(key, Optional.of(value)); - store.update(key, value); - } - - @Override - public void remove(KEY key) { - cache.invalidate(key); - store.remove(key); - } - - @Override - public int count() { - return store.count(); - } - - @Override - public void loadData() {} - - @Override - public Collection getAll() { - return store.getAll(); - } - - @Override - public Collection getAllKeys() { - return store.getAllKeys(); - } - - @Override - public String toString() { - return "weakcached "+store.toString(); - } - - - @Override - public void clear() { - cache.invalidateAll(); - store.clear(); - } - - @Override - public void removeStore() { - cache.invalidateAll(); - store.removeStore(); - } - - @Override - public void close() throws IOException { - store.close(); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java index 95c0777b26..eb9acab3ac 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java +++ b/backend/src/main/java/com/bakdata/conquery/io/storage/xodus/stores/XodusStore.java @@ -1,5 +1,7 @@ package com.bakdata.conquery.io.storage.xodus.stores; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; @@ -77,6 +79,18 @@ public void forEach(BiConsumer consumer) { } } + public List getAllKeys() { + return environment.computeInReadonlyTransaction(txn -> { + List keys = new ArrayList<>(); + try (Cursor c = store.openCursor(txn)) { + while (c.getNext()) { + keys.add(c.getKey()); + } + return keys; + } + }); + } + public boolean update(ByteIterable key, ByteIterable value) { return environment.computeInTransaction(t -> store.put(t, key, value)); } diff --git a/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java b/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java index 748c92d2d5..0032246cdb 100644 --- a/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java +++ b/backend/src/main/java/com/bakdata/conquery/metrics/ExecutionMetrics.java @@ -4,8 +4,11 @@ import java.util.HashSet; import java.util.Set; +import com.bakdata.conquery.apiv1.query.CQElement; import com.bakdata.conquery.apiv1.query.QueryDescription; -import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; +import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; +import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; @@ -15,10 +18,6 @@ import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.models.query.Visitable; -import com.bakdata.conquery.apiv1.query.CQElement; -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.query.visitor.QueryVisitor; import com.codahale.metrics.Counter; import com.codahale.metrics.Histogram; @@ -115,23 +114,23 @@ public void accept(Visitable element) { } if (element instanceof CQConcept) { - for (Select select : ((CQConcept) element).getSelects()) { - doReport(CLASSES, select.getClass().getSimpleName()); - doReport(SELECTS, select.getId().toString()); + for (SelectId select : ((CQConcept) element).getSelects()) { + doReport(CLASSES, select.resolve().getClass().getSimpleName()); + doReport(SELECTS, select.toString()); } // Report classes and ids used of filters and selects for (CQTable table : ((CQConcept) element).getTables()) { for (FilterValue filter : table.getFilters()) { - doReport(CLASSES, filter.getFilter().getClass().getSimpleName()); - doReport(FILTERS, filter.getFilter().getId().toString()); + doReport(CLASSES, filter.getFilter().resolve().getClass().getSimpleName()); + doReport(FILTERS, filter.getFilter().toString()); } - for (Select select : table.getSelects()) { - doReport(CLASSES, select.getClass().getSimpleName()); + for (SelectId select : table.getSelects()) { + doReport(CLASSES, select.resolve().getClass().getSimpleName()); - doReport(SELECTS, select.getId().toString()); + doReport(SELECTS, select.toString()); } } } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java index c623aa055e..f7b654083f 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/NamespaceHandler.java @@ -24,22 +24,19 @@ */ public interface NamespaceHandler { - N createNamespace(NamespaceStorage namespaceStorage, MetaStorage metaStorage, DatasetRegistry datasetRegistry, Environment environment); - - void removeNamespace(DatasetId id, N namespace); - /** * Creates the {@link NamespaceSetupData} that is shared by all {@link Namespace} types. */ - static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final ConqueryConfig config, final InternalMapperFactory internalMapperFactory, DatasetRegistry datasetRegistry) { + static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final ConqueryConfig config, final InternalMapperFactory internalMapperFactory, DatasetRegistry datasetRegistry, Environment environment) { List injectables = new ArrayList<>(); injectables.add(datasetRegistry); injectables.add(storage); - ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(datasetRegistry); - ObjectMapper communicationMapper = internalMapperFactory.createManagerCommunicationMapper(datasetRegistry); - ObjectMapper preprocessMapper = internalMapperFactory.createPreprocessMapper(datasetRegistry); + ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(storage); + ObjectMapper communicationMapper = internalMapperFactory.createNamespaceCommunicationMapper(storage); + ObjectMapper preprocessMapper = internalMapperFactory.createPreprocessMapper(storage); + // Todo remove these injectables.forEach(i -> { i.injectInto(persistenceMapper); i.injectInto(communicationMapper); @@ -48,7 +45,7 @@ static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final C // Each store needs its own mapper because each injects its own registry - storage.openStores(Jackson.copyMapperAndInjectables(persistenceMapper)); + storage.openStores(Jackson.copyMapperAndInjectables(persistenceMapper), environment.metrics()); storage.loadData(); JobManager jobManager = new JobManager(storage.getDataset().getName(), config.isFailOnError()); @@ -57,4 +54,8 @@ static NamespaceSetupData createNamespaceSetup(NamespaceStorage storage, final C return new NamespaceSetupData(injectables, communicationMapper, preprocessMapper, jobManager, filterSearch); } + N createNamespace(NamespaceStorage namespaceStorage, MetaStorage metaStorage, DatasetRegistry datasetRegistry, Environment environment); + + void removeNamespace(DatasetId id, N namespace); + } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java index fe831a4733..e78ec3ce91 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/StorageListener.java @@ -3,6 +3,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; /** * Listener for updates of stored entities in ConQuery. @@ -19,6 +20,6 @@ public interface StorageListener { void onAddConcept(Concept concept); - void onDeleteConcept(Concept concept); + void onDeleteConcept(ConceptId concept); } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java index 7416c9a16a..de9b9f3dbe 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterConnectionShard.java @@ -21,7 +21,6 @@ import com.bakdata.conquery.models.messages.network.specific.AddShardNode; import com.bakdata.conquery.models.messages.network.specific.RegisterWorker; import com.bakdata.conquery.models.messages.network.specific.UpdateJobManagerStatus; -import com.bakdata.conquery.models.worker.IdResolveContext; import com.bakdata.conquery.models.worker.ShardWorkers; import com.bakdata.conquery.models.worker.Worker; import com.bakdata.conquery.models.worker.WorkerInformation; @@ -68,7 +67,7 @@ public void sessionCreated(IoSession session) { public void sessionOpened(IoSession session) { final NetworkSession networkSession = new NetworkSession(session); - context = new NetworkMessageContext.ShardNodeNetworkContext(networkSession, workers, config, environment.getValidator()); + context = new NetworkMessageContext.ShardNodeNetworkContext(networkSession, workers, config, environment); log.info("Connected to ManagerNode @ `{}`", session.getRemoteAddress()); // Authenticate with ManagerNode @@ -204,8 +203,8 @@ private void disconnectFromCluster() { @NotNull - private NioSocketConnector getClusterConnector(IdResolveContext workers) { - final ObjectMapper om = internalMapperFactory.createShardCommunicationMapper(); + private NioSocketConnector getClusterConnector(ShardWorkers workers) { + ObjectMapper om = internalMapperFactory.createShardCommunicationMapper(); final NioSocketConnector connector = new NioSocketConnector(); @@ -230,6 +229,19 @@ private static void scheduleIdleLogger(ScheduledExecutorService scheduler, IoSes ); } + @Override + public void start() throws Exception { + + + jobManager = new JobManager(environment.getName(), config.isFailOnError()); + + scheduler = environment.lifecycle().scheduledExecutorService("cluster-connection-shard").build(); + // Connect async as the manager might not be up jet or is started by a test in succession + scheduler.schedule(this::connectToCluster, 0, TimeUnit.MINUTES); + + scheduler.scheduleAtFixedRate(this::reportJobManagerStatus, 30, 1, TimeUnit.SECONDS); + + } private void reportJobManagerStatus() { if (context == null || !context.isConnected()) { @@ -258,24 +270,6 @@ private void reportJobManagerStatus() { } } - @Override - public void start() throws Exception { - - - jobManager = new JobManager(environment.getName(), config.isFailOnError()); - - scheduler = environment.lifecycle().scheduledExecutorService("cluster-connection-shard").build(); - // Connect async as the manager might not be up jet or is started by a test in succession - scheduler.schedule(this::connectToCluster, 0, TimeUnit.MINUTES); - - scheduler.scheduleAtFixedRate(this::reportJobManagerStatus, 30, 1, TimeUnit.SECONDS); - - } - - public boolean isBusy() { - return jobManager.isSlowWorkerBusy(); - } - @Override public void stop() throws Exception { // close scheduler before disconnect to avoid scheduled reconnects @@ -283,4 +277,8 @@ public void stop() throws Exception { disconnectFromCluster(); jobManager.close(); } + + public boolean isBusy() { + return jobManager.isSlowWorkerBusy(); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java index f5a1b5179b..3d7ca63abb 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java @@ -6,6 +6,11 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.Stream; +import jakarta.ws.rs.BadRequestException; +import jakarta.ws.rs.NotFoundException; +import jakarta.ws.rs.WebApplicationException; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.mode.ImportHandler; import com.bakdata.conquery.models.datasets.Import; @@ -27,10 +32,6 @@ import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.models.worker.WorkerInformation; -import jakarta.ws.rs.BadRequestException; -import jakarta.ws.rs.NotFoundException; -import jakarta.ws.rs.WebApplicationException; -import jakarta.ws.rs.core.Response; import lombok.AllArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @@ -47,10 +48,10 @@ public class ClusterImportHandler implements ImportHandler { @SneakyThrows @Override public void updateImport(Namespace namespace, InputStream inputStream) { - handleImport(namespace, inputStream, true); + handleImport(namespace, inputStream, true, datasetRegistry); } - private static void handleImport(Namespace namespace, InputStream inputStream, boolean update) throws IOException { + private static void handleImport(Namespace namespace, InputStream inputStream, boolean update, DatasetRegistry datasetRegistry) throws IOException { try (PreprocessedReader parser = new PreprocessedReader(inputStream, namespace.getPreprocessMapper())) { // We parse semi-manually as the incoming file consist of multiple documents we read progressively: // 1) the header to check metadata @@ -60,7 +61,7 @@ private static void handleImport(Namespace namespace, InputStream inputStream, b final Table table = validateImportable(((DistributedNamespace) namespace), header, update); - readAndDistributeImport(((DistributedNamespace) namespace), table, header, parser); + readAndDistributeImport(((DistributedNamespace) namespace), table, header, parser, datasetRegistry); clearDependentConcepts(namespace.getStorage().getAllConcepts(), table); } @@ -97,7 +98,7 @@ private static Table validateImportable(DistributedNamespace namespace, Preproce } // before updating the import, make sure that all workers removed the prior import - namespace.getWorkerHandler().sendToAll(new RemoveImportJob(processedImport)); + namespace.getWorkerHandler().sendToAll(new RemoveImportJob(processedImport.getId())); namespace.getStorage().removeImport(importId); } else if (processedImport != null) { @@ -107,7 +108,7 @@ else if (processedImport != null) { return table; } - private static void readAndDistributeImport(DistributedNamespace namespace, Table table, PreprocessedHeader header, PreprocessedReader reader) { + private static void readAndDistributeImport(DistributedNamespace namespace, Table table, PreprocessedHeader header, PreprocessedReader reader, DatasetRegistry datasetRegistry) { final TableId tableId = new TableId(namespace.getDataset().getId(), header.getTable()); final ImportId importId = new ImportId(tableId, header.getName()); @@ -149,16 +150,12 @@ private static void readAndDistributeImport(DistributedNamespace namespace, Tabl } - private static void clearDependentConcepts(Collection> allConcepts, Table table) { - for (Concept c : allConcepts) { - for (Connector con : c.getConnectors()) { - if (!con.getTable().equals(table)) { - continue; - } - - con.getConcept().clearMatchingStats(); - } - } + private static void clearDependentConcepts(Stream> allConcepts, Table table) { + allConcepts.map(Concept::getConnectors) + .flatMap(List::stream) + .filter(con -> con.getResolvedTableId().equals(table.getId())) + .map(Connector::getConcept) + .forEach(Concept::clearMatchingStats); } /** @@ -177,19 +174,19 @@ public static WorkerId sendBucket(Bucket bucket, WorkerInformation responsibleWo @SneakyThrows @Override public void addImport(Namespace namespace, InputStream inputStream) { - handleImport(namespace, inputStream, false); + handleImport(namespace, inputStream, false, datasetRegistry); } @Override public void deleteImport(Import imp) { - final DatasetId id = imp.getTable().getDataset().getId(); + final DatasetId id = imp.getTable().getDataset(); final DistributedNamespace namespace = datasetRegistry.get(id); - clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable()); + clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable().resolve()); namespace.getStorage().removeImport(imp.getId()); - namespace.getWorkerHandler().sendToAll(new RemoveImportJob(imp)); + namespace.getWorkerHandler().sendToAll(new RemoveImportJob(imp.getId())); // Remove bucket assignments for consistency report namespace.getWorkerHandler().removeBucketAssignmentsForImportFormWorkers(imp); diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java index 560ab1465b..c8bf780127 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterNamespaceHandler.java @@ -24,7 +24,7 @@ public class ClusterNamespaceHandler implements NamespaceHandler datasetRegistry, Environment environment) { - NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry); + NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry, environment); DistributedExecutionManager executionManager = new DistributedExecutionManager(metaStorage, datasetRegistry, clusterState); WorkerHandler workerHandler = new WorkerHandler(namespaceData.getCommunicationMapper(), namespaceStorage); clusterState.getWorkerHandlers().put(namespaceStorage.getDataset().getId(), workerHandler); @@ -49,7 +49,7 @@ public DistributedNamespace createNamespace(NamespaceStorage namespaceStorage, M @Override public void removeNamespace(DatasetId id, DistributedNamespace namespace) { - clusterState.getShardNodes().values().forEach(shardNode -> shardNode.send(new RemoveWorker(namespace.getDataset()))); + clusterState.getShardNodes().values().forEach(shardNode -> shardNode.send(new RemoveWorker(namespace.getDataset().getId()))); clusterState.getWorkerHandlers().keySet().removeIf(worker -> worker.getDataset().getDataset().equals(id)); } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java index 9d8360a383..7bdc4fa948 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterStorageListener.java @@ -4,6 +4,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.jobs.SimpleJob; import com.bakdata.conquery.models.messages.namespaces.specific.RemoveConcept; @@ -29,35 +30,35 @@ class ClusterStorageListener implements StorageListener { @Override public void onAddSecondaryId(SecondaryIdDescription secondaryId) { - datasetRegistry.get(secondaryId.getDataset().getId()).getWorkerHandler().sendToAll(new UpdateSecondaryId(secondaryId)); + datasetRegistry.get(secondaryId.getDataset()).getWorkerHandler().sendToAll(new UpdateSecondaryId(secondaryId)); } @Override public void onDeleteSecondaryId(SecondaryIdDescription secondaryId) { - datasetRegistry.get(secondaryId.getDataset().getId()).getWorkerHandler().sendToAll(new RemoveSecondaryId(secondaryId)); + datasetRegistry.get(secondaryId.getDataset()).getWorkerHandler().sendToAll(new RemoveSecondaryId(secondaryId.getId())); } @Override public void onAddTable(Table table) { - datasetRegistry.get(table.getDataset().getId()).getWorkerHandler().sendToAll(new UpdateTable(table)); + datasetRegistry.get(table.getDataset()).getWorkerHandler().sendToAll(new UpdateTable(table)); } @Override public void onRemoveTable(Table table) { - datasetRegistry.get(table.getDataset().getId()).getWorkerHandler().sendToAll(new RemoveTable(table)); + datasetRegistry.get(table.getDataset()).getWorkerHandler().sendToAll(new RemoveTable(table.getId())); } @Override public void onAddConcept(Concept concept) { - WorkerHandler handler = datasetRegistry.get(concept.getDataset().getId()).getWorkerHandler(); + WorkerHandler handler = datasetRegistry.get(concept.getDataset()).getWorkerHandler(); SimpleJob simpleJob = new SimpleJob(String.format("sendToAll : Add %s ", concept.getId()), () -> handler.sendToAll(new UpdateConcept(concept))); jobManager.addSlowJob(simpleJob); } @Override - public void onDeleteConcept(Concept concept) { - WorkerHandler handler = datasetRegistry.get(concept.getDataset().getId()).getWorkerHandler(); - SimpleJob simpleJob = new SimpleJob("sendToAll: remove " + concept.getId(), () -> handler.sendToAll(new RemoveConcept(concept))); + public void onDeleteConcept(ConceptId concept) { + WorkerHandler handler = datasetRegistry.get(concept.getDataset()).getWorkerHandler(); + SimpleJob simpleJob = new SimpleJob("sendToAll: remove " + concept, () -> handler.sendToAll(new RemoveConcept(concept))); jobManager.addSlowJob(simpleJob); } } diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java index e69775eddf..ffb80ce5f1 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/InternalMapperFactory.java @@ -4,9 +4,11 @@ import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.models.config.ConqueryConfig; +import com.bakdata.conquery.models.identifiable.ids.IIdInterner; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.ShardWorkers; import com.fasterxml.jackson.databind.DeserializationConfig; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationConfig; @@ -18,28 +20,62 @@ public ObjectMapper createShardCommunicationMapper() { return createInternalObjectMapper(View.InternalCommunication.class); } - public ObjectMapper createWorkerCommunicationMapper(ShardWorkers workers) { + /** + * @return a preconfigured binary object mapper + */ + private ObjectMapper createInternalObjectMapper(Class viewClass) { + final ObjectMapper objectMapper = config.configureObjectMapper(Jackson.copyMapperAndInjectables(Jackson.BINARY_MAPPER)); + + final MutableInjectableValues injectableValues = new MutableInjectableValues(); + objectMapper.setInjectableValues(injectableValues); + + injectableValues.add(Validator.class, validator); + config.injectInto(objectMapper); + new IIdInterner().injectInto(objectMapper); + + if (viewClass != null) { + setViewClass(objectMapper, viewClass); + } + + return objectMapper; + } + + public static void setViewClass(ObjectMapper objectMapper, Class viewClass) { + // Set serialization config + SerializationConfig serializationConfig = objectMapper.getSerializationConfig(); + + serializationConfig = serializationConfig.withView(viewClass); + + objectMapper.setConfig(serializationConfig); + + // Set deserialization config + DeserializationConfig deserializationConfig = objectMapper.getDeserializationConfig(); + + deserializationConfig = deserializationConfig.withView(viewClass); + + objectMapper.setConfig(deserializationConfig); + } + + public ObjectMapper createWorkerCommunicationMapper(WorkerStorage storage) { final ObjectMapper objectMapper = createInternalObjectMapper(View.InternalCommunication.class); - workers.injectInto(objectMapper); + storage.injectInto(objectMapper); return objectMapper; } - public ObjectMapper createWorkerPersistenceMapper(ShardWorkers workers) { + public ObjectMapper createWorkerPersistenceMapper(WorkerStorage storage) { final ObjectMapper objectMapper = createInternalObjectMapper(View.Persistence.Shard.class); - workers.injectInto(objectMapper); - config.injectInto(objectMapper); + storage.injectInto(objectMapper); return objectMapper; } - public ObjectMapper createNamespacePersistenceMapper(DatasetRegistry datasetRegistry) { + public ObjectMapper createNamespacePersistenceMapper(NamespaceStorage namespaceStorage) { final ObjectMapper objectMapper = createInternalObjectMapper(View.Persistence.Manager.class); - datasetRegistry.injectInto(objectMapper); - + namespaceStorage.injectInto(objectMapper); return objectMapper; } @@ -61,53 +97,22 @@ public ObjectMapper createManagerCommunicationMapper(DatasetRegistry datasetR return objectMapper; } + public ObjectMapper createNamespaceCommunicationMapper(NamespaceStorage namespaceStorage) { + ObjectMapper objectMapper = createInternalObjectMapper(View.InternalCommunication.class); - - public ObjectMapper createPreprocessMapper(DatasetRegistry datasetRegistry) { - ObjectMapper objectMapper = createInternalObjectMapper(null); - - datasetRegistry.injectInto(objectMapper); + namespaceStorage.injectInto(objectMapper); return objectMapper; } - /** - * @return a preconfigured binary object mapper - */ - private ObjectMapper createInternalObjectMapper(Class viewClass) { - final ObjectMapper objectMapper = config.configureObjectMapper(Jackson.copyMapperAndInjectables(Jackson.BINARY_MAPPER)); - - final MutableInjectableValues injectableValues = new MutableInjectableValues(); - objectMapper.setInjectableValues(injectableValues); - - injectableValues.add(Validator.class, validator); - config.injectInto(objectMapper); + public ObjectMapper createPreprocessMapper(NamespaceStorage namespaceStorage) { + ObjectMapper objectMapper = createInternalObjectMapper(null); - if (viewClass != null) { - setViewClass(objectMapper, viewClass); - } + namespaceStorage.injectInto(objectMapper); return objectMapper; } - public static void setViewClass(ObjectMapper objectMapper, Class viewClass) { - // Set serialization config - SerializationConfig serializationConfig = objectMapper.getSerializationConfig(); - - serializationConfig = serializationConfig.withView(viewClass); - - objectMapper.setConfig(serializationConfig); - - // Set deserialization config - DeserializationConfig deserializationConfig = objectMapper.getDeserializationConfig(); - - deserializationConfig = deserializationConfig.withView(viewClass); - - objectMapper.setConfig(deserializationConfig); - } - - - /** * Customize the mapper from the environment, that is used in the REST-API. * In contrast to the internal object mapper this uses textual JSON representation @@ -126,6 +131,7 @@ public void customizeApiObjectMapper(ObjectMapper objectMapper, DatasetRegistry< objectMapper.setInjectableValues(injectableValues); injectableValues.add(Validator.class, validator); + new IIdInterner().injectInto(objectMapper); datasetRegistry.injectInto(objectMapper); metaStorage.injectInto(objectMapper); config.injectInto(objectMapper); diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java index f9f808b392..6a40a74705 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalNamespaceHandler.java @@ -39,7 +39,7 @@ public class LocalNamespaceHandler implements NamespaceHandler { @Override public LocalNamespace createNamespace(NamespaceStorage namespaceStorage, MetaStorage metaStorage, DatasetRegistry datasetRegistry, Environment environment) { - NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry); + NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, internalMapperFactory, datasetRegistry, environment); IdColumnConfig idColumns = config.getIdColumns(); SqlConnectorConfig sqlConnectorConfig = config.getSqlConnectorConfig(); diff --git a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java index 8d53dde819..ff55603cef 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/local/LocalStorageListener.java @@ -4,6 +4,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; public class LocalStorageListener implements StorageListener { @@ -30,6 +31,6 @@ public void onAddConcept(Concept concept) { } @Override - public void onDeleteConcept(Concept concept) { + public void onDeleteConcept(ConceptId concept) { } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java index a6490c4b73..655e93da5b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationController.java @@ -225,7 +225,7 @@ public static User flatCopyUser(@NonNull User originUser, String namePrefix, @No // Give read permission to all executions the original user owned copiedPermission.addAll( - storage.getAllExecutions().stream() + storage.getAllExecutions() .filter(originUser::isOwner) .map(exc -> exc.createPermission(Ability.READ.asSet())) .collect(Collectors.toSet()) @@ -233,7 +233,7 @@ public static User flatCopyUser(@NonNull User originUser, String namePrefix, @No // Give read permission to all form configs the original user owned copiedPermission.addAll( - storage.getAllFormConfigs().stream() + storage.getAllFormConfigs() .filter(originUser::isOwner) .map(conf -> conf.createPermission(Ability.READ.asSet())) .collect(Collectors.toSet()) diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java index 788e856cc7..007a1195af 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/AuthorizationHelper.java @@ -39,7 +39,7 @@ public class AuthorizationHelper { public static List getGroupsOf(@NonNull Subject subject, @NonNull MetaStorage storage){ - return storage.getAllGroups().stream() + return storage.getAllGroups() .filter(g -> g.getMembers().contains(subject.getId())) .sorted() .collect(Collectors.toList()); @@ -78,11 +78,11 @@ public static Multimap getEffectiveUserPermissions(U } public static List getUsersByRole(MetaStorage storage, Role role) { - return storage.getAllUsers().stream().filter(u -> u.getRoles().contains(role.getId())).collect(Collectors.toList()); + return storage.getAllUsers().filter(u -> u.getRoles().contains(role.getId())).collect(Collectors.toList()); } public static List getGroupsByRole(MetaStorage storage, Role role) { - return storage.getAllGroups().stream().filter(g -> g.getRoles().contains(role.getId())).collect(Collectors.toList()); + return storage.getAllGroups().filter(g -> g.getRoles().contains(role.getId())).collect(Collectors.toList()); } /** @@ -95,9 +95,10 @@ public static void authorizeDownloadDatasets(@NonNull Subject subject, @NonNull Set datasets = collector.getIdentifiables() - .stream() - .map(NamespacedIdentifiable::getDataset) - .collect(Collectors.toSet()); + .stream() + .map(NamespacedIdentifiable::getDataset) + .map(DatasetId::resolve) + .collect(Collectors.toSet()); subject.authorize(datasets, Ability.DOWNLOAD); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java index b2afa47540..ca4759999b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/basic/LocalAuthenticationRealm.java @@ -4,7 +4,6 @@ import java.io.IOException; import java.util.List; import java.util.concurrent.Executors; - import jakarta.validation.Validator; import com.bakdata.conquery.Conquery; @@ -13,7 +12,7 @@ import com.bakdata.conquery.apiv1.auth.PasswordHashCredential; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.Store; -import com.bakdata.conquery.io.storage.StoreMappings; +import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore; import com.bakdata.conquery.io.storage.xodus.stores.XodusStore; import com.bakdata.conquery.models.auth.ConqueryAuthenticationInfo; @@ -101,7 +100,7 @@ protected void onInit() { // Open/create the database/store File passwordStoreFile = new File(storageDir, storeName); passwordEnvironment = Environments.newInstance(passwordStoreFile, passwordStoreConfig.createConfig()); - passwordStore = StoreMappings.cached( + passwordStore = new CachedStore<>( new SerializingStore<>( new XodusStore( passwordEnvironment, @@ -216,7 +215,7 @@ public boolean removeUser(User user) { @Override public List getAllUsers() { - return ImmutableList.copyOf(passwordStore.getAllKeys()); + return ImmutableList.copyOf(passwordStore.getAllKeys().toList()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java index 2d34ddadc8..d7641393c3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Group.java @@ -40,26 +40,28 @@ public Group(String name, String label, MetaStorage storage) { public Set getEffectivePermissions() { Set permissions = getPermissions(); for (RoleId roleId : roles) { - permissions = Sets.union(permissions, storage.getRole(roleId).getEffectivePermissions()); + permissions = Sets.union(permissions, getMetaStorage().getRole(roleId).getEffectivePermissions()); } return permissions; } + public synchronized void addMember(User user) { + if (members.add(user.getId())) { + log.trace("Added user {} to group {}", user.getId(), getId()); + updateStorage(); + } + } + @Override public void updateStorage() { - storage.updateGroup(this); + getMetaStorage().updateGroup(this); } @Override public GroupId createId() { - return new GroupId(name); - } - - public synchronized void addMember(User user) { - if (members.add(user.getId())) { - log.trace("Added user {} to group {}", user.getId(), getId()); - updateStorage(); - } + GroupId groupId = new GroupId(name); + groupId.setMetaStorage(getMetaStorage()); + return groupId; } public synchronized void removeMember(User user) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java index 279e693356..85267f0846 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/PermissionOwner.java @@ -4,17 +4,15 @@ import java.util.Comparator; import java.util.HashSet; import java.util.Set; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.models.identifiable.ids.specific.PermissionOwnerId; -import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.OptBoolean; import com.google.common.collect.ImmutableSet; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.AccessLevel; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -62,16 +60,11 @@ public abstract class PermissionOwner permissions = new HashSet<>(); - @JacksonInject(useInput = OptBoolean.FALSE) - @NotNull - @EqualsAndHashCode.Exclude - protected MetaStorage storage; - public PermissionOwner(String name, String label, MetaStorage storage) { this.name = name; this.label = label; - this.storage = storage; + setMetaStorage(storage); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java index 3d0ec8c1d8..4102263bd0 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/Role.java @@ -8,8 +8,6 @@ import lombok.AccessLevel; import lombok.NoArgsConstructor; -import java.util.Set; - @NoArgsConstructor(access = AccessLevel.PRIVATE) public class Role extends PermissionOwner { @@ -24,14 +22,16 @@ public Set getEffectivePermissions() { } @Override - public RoleId createId() { - return new RoleId(name); + protected void updateStorage() { + getMetaStorage().updateRole(this); + } @Override - protected void updateStorage() { - storage.updateRole(this); - + public RoleId createId() { + RoleId roleId = new RoleId(name); + roleId.setMetaStorage(getMetaStorage()); + return roleId; } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java index bc3b1fc8d1..b5758f5844 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/entities/User.java @@ -5,6 +5,7 @@ import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -56,25 +57,23 @@ public User(String name, String label, MetaStorage storage) { public Set getEffectivePermissions() { Set permissions = getPermissions(); for (RoleId roleId : roles) { - Role role = storage.getRole(roleId); + Role role = getMetaStorage().getRole(roleId); if (role == null) { log.warn("Could not find role {} to gather permissions", roleId); continue; } permissions = Sets.union(permissions, role.getEffectivePermissions()); } - for (Group group : storage.getAllGroups()) { + + for (Iterator it = getMetaStorage().getAllGroups().iterator(); it.hasNext(); ) { + Group group = it.next(); if (!group.containsMember(this)) { continue; } permissions = Sets.union(permissions, group.getEffectivePermissions()); } - return permissions; - } - @Override - public UserId createId() { - return new UserId(name); + return permissions; } public synchronized void addRole(Role role) { @@ -98,10 +97,64 @@ public Set getRoles() { @Override public void updateStorage() { - storage.updateUser(this); + getMetaStorage().updateUser(this); } - public void authorize(@NonNull Authorized object, @NonNull Ability ability) { + @Override + public UserId createId() { + UserId userId = new UserId(name); + userId.setMetaStorage(getMetaStorage()); + return userId; + } + + /** + * This class is non-static so it's a fixed part of the enclosing User object. + * It's protected for testing purposes only. + */ + public class ShiroUserAdapter extends FilteredUser { + + @Getter + private final ThreadLocal authenticationInfo = + ThreadLocal.withInitial(() -> new ConqueryAuthenticationInfo(User.this, null, null, false, null)); + + @Override + public Object getPrincipal() { + return getId(); + } @Override + public void checkPermission(Permission permission) throws AuthorizationException { + SecurityUtils.getSecurityManager().checkPermission(getPrincipals(), permission); + } + + @Override + public void checkPermissions(Collection permissions) throws AuthorizationException { + SecurityUtils.getSecurityManager().checkPermissions(getPrincipals(), permissions); + } + + @Override + public PrincipalCollection getPrincipals() { + return authenticationInfo.get().getPrincipals(); + } + + @Override + public boolean isPermitted(Permission permission) { + return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permission); + } + + @Override + public boolean[] isPermitted(List permissions) { + return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permissions); + } + + @Override + public boolean isPermittedAll(Collection permissions) { + return SecurityUtils.getSecurityManager().isPermittedAll(getPrincipals(), permissions); + } + + + + + + } public void authorize(@NonNull Authorized object, @NonNull Ability ability) { if (isOwner(object)) { return; } @@ -139,7 +192,7 @@ public boolean[] isPermitted(List authorizeds, Ability abi public boolean isOwner(Authorized object) { - return object instanceof Owned && equals(((Owned) object).getOwner()); + return object instanceof Owned && getId().equals(((Owned) object).getOwner()); } @JsonIgnore @@ -168,52 +221,5 @@ public User getUser() { } - /** - * This class is non-static so it's a fixed part of the enclosing User object. - * It's protected for testing purposes only. - */ - public class ShiroUserAdapter extends FilteredUser { - - @Getter - private final ThreadLocal authenticationInfo = - ThreadLocal.withInitial(() -> new ConqueryAuthenticationInfo(User.this, null, null, false, null)); - - @Override - public void checkPermission(Permission permission) throws AuthorizationException { - SecurityUtils.getSecurityManager().checkPermission(getPrincipals(), permission); - } - - @Override - public void checkPermissions(Collection permissions) throws AuthorizationException { - SecurityUtils.getSecurityManager().checkPermissions(getPrincipals(), permissions); - } - @Override - public PrincipalCollection getPrincipals() { - return authenticationInfo.get().getPrincipals(); - } - - @Override - public boolean isPermitted(Permission permission) { - return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permission); - } - - @Override - public boolean[] isPermitted(List permissions) { - return SecurityUtils.getSecurityManager().isPermitted(getPrincipals(), permissions); - } - - @Override - public boolean isPermittedAll(Collection permissions) { - return SecurityUtils.getSecurityManager().isPermittedAll(getPrincipals(), permissions); - } - - - @Override - public Object getPrincipal() { - return getId(); - } - - - } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java index 4913cf3c6a..1dd426cc91 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/auth/oidc/IntrospectionDelegatingRealm.java @@ -309,7 +309,7 @@ private synchronized Group createGroup(String name, String label) { private void syncGroupMappings(User user, Set mappedGroupsToDo) { // TODO mark mappings as managed by keycloak - for (Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { if (group.containsMember(user)) { if (mappedGroupsToDo.contains(group)) { // Mapping is still valid, remove from todo-list diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java index 78ec8aecea..683d733351 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/StoreFactory.java @@ -4,22 +4,24 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.storage.IdentifiableStore; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.io.storage.WorkerStorage; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Role; import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.*; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.PreviewConfig; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.StructureNode; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; @@ -34,23 +36,23 @@ public interface StoreFactory { Collection discoverNamespaceStorages(); - Collection discoverWorkerStorages(); + Collection discoverWorkerStorages(); // NamespacedStorage (Important for serdes communication between manager and shards) SingletonStore createDatasetStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createSecondaryIdDescriptionStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createSecondaryIdDescriptionStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore
createTableStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore
createTableStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore> createConceptStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore> createConceptStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createImportStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createImportStore(String pathName, ObjectMapper objectMapper); - // WorkerStorage - IdentifiableStore createCBlockStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + // WorkerStorageImpl + IdentifiableStore createCBlockStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createBucketStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createBucketStore(String pathName, ObjectMapper objectMapper); SingletonStore createWorkerInformationStore(String pathName, ObjectMapper objectMapper); @@ -59,24 +61,25 @@ public interface StoreFactory { SingletonStore createWorkerToBucketsStore(String pathName, ObjectMapper objectMapper); - SingletonStore createStructureStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + SingletonStore createStructureStore(String pathName, ObjectMapper objectMapper); // MetaStorage - IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createExecutionsStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createFormConfigStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper); + IdentifiableStore createFormConfigStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createUserStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper); + IdentifiableStore createUserStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createRoleStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper); + IdentifiableStore createRoleStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createGroupStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper); + IdentifiableStore createGroupStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createInternToExternMappingStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + IdentifiableStore createInternToExternMappingStore(String pathName, ObjectMapper objectMapper); - IdentifiableStore createSearchIndexStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + IdentifiableStore createSearchIndexStore(String pathName, ObjectMapper objectMapper); - SingletonStore createPreviewStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper); + SingletonStore createPreviewStore(String pathName, ObjectMapper objectMapper); + + Store createEntity2BucketStore(String pathName, ObjectMapper objectMapper); - CachedStore createEntity2BucketStore(String pathName, ObjectMapper objectMapper); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java index 797df2caab..4488c38e11 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java +++ b/backend/src/main/java/com/bakdata/conquery/models/config/XodusStoreFactory.java @@ -24,12 +24,12 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.IdentifiableStore; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; -import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.io.storage.NamespacedStorageImpl; import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.io.storage.StoreMappings; import com.bakdata.conquery.io.storage.WorkerStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.io.storage.xodus.stores.BigStore; import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; import com.bakdata.conquery.io.storage.xodus.stores.EnvironmentRegistry; @@ -51,7 +51,6 @@ import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; @@ -65,7 +64,6 @@ import com.google.common.collect.MultimapBuilder; import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; -import io.dropwizard.util.Duration; import jetbrains.exodus.env.Environment; import lombok.AllArgsConstructor; import lombok.Getter; @@ -117,50 +115,31 @@ public class XodusStoreFactory implements StoreFactory { C_BLOCKS.storeInfo().getName() ) ); - + @JsonIgnore + private final transient Multimap + openStoresInEnv = + Multimaps.synchronizedSetMultimap(MultimapBuilder.hashKeys().hashSetValues().build()); private Path directory = Path.of("storage"); - private boolean validateOnWrite = false; @NotNull @Valid private XodusConfig xodus = new XodusConfig(); - @JsonIgnore private EnvironmentRegistry registry = new EnvironmentRegistry(); - /** * Number of threads reading from XoduStore. + * * @implNote it's always only one thread reading from disk, dispatching to multiple reader threads. */ @Min(1) private int readerWorkers = Runtime.getRuntime().availableProcessors(); - /** * How many slots of buffering to use before the IO thread is put to sleep. */ @Min(1) private int bufferPerWorker = 20; - @JsonIgnore private ExecutorService readerExecutorService; - - public ExecutorService getReaderExecutorService() { - if (readerExecutorService == null){ - readerExecutorService = new ThreadPoolExecutor( - 1, getReaderWorkers(), - 5, TimeUnit.MINUTES, - new ArrayBlockingQueue<>(getReaderWorkers() * getBufferPerWorker()), - new ThreadPoolExecutor.CallerRunsPolicy() - ); - } - - return readerExecutorService; - } - - private boolean useWeakDictionaryCaching; - @NotNull - private Duration weakCacheDuration = Duration.hours(48); - /** * Flag for the {@link SerializingStore} whether to delete values from the underlying store, that cannot be mapped to an object anymore. */ @@ -182,71 +161,14 @@ public ExecutorService getReaderExecutorService() { @JsonIgnore private transient Validator validator; - @JsonIgnore - private final transient Multimap - openStoresInEnv = - Multimaps.synchronizedSetMultimap(MultimapBuilder.hashKeys().hashSetValues().build()); - @Override public Collection discoverNamespaceStorages() { return loadNamespacedStores("dataset_", (storePath) -> new NamespaceStorage(this, storePath), NAMESPACE_STORES); } @Override - public Collection discoverWorkerStorages() { - return loadNamespacedStores("worker_", (storePath) -> new WorkerStorage(this, storePath), WORKER_STORES); - } - - - private List loadNamespacedStores(String prefix, Function creator, Set storesToTest) { - final File baseDir = getDirectory().toFile(); - - if (baseDir.mkdirs()) { - log.warn("Had to create Storage Dir at `{}`", baseDir); - } - - final List storages = new ArrayList<>(); - - for (File directory : Objects.requireNonNull(baseDir.listFiles((file, name) -> file.isDirectory() && name.startsWith(prefix)))) { - - final String name = directory.getName(); - - ConqueryMDC.setLocation(directory.toString()); - - try (Environment environment = registry.findOrCreateEnvironment(directory, xodus)) { - if (!environmentHasStores(environment, storesToTest)) { - log.warn("No valid {}storage found in {}", prefix, directory); - continue; - } - } - - final T namespacedStorage = creator.apply(name); - - storages.add(namespacedStorage); - } - - return storages; - } - - private boolean environmentHasStores(Environment env, Set storesToTest) { - return env.computeInTransaction(t -> { - final List allStoreNames = env.getAllStoreNames(t); - final boolean complete = new HashSet<>(allStoreNames).containsAll(storesToTest); - if (complete) { - log.trace("Storage contained all stores: {}", storesToTest); - return true; - } - - final HashSet missing = Sets.newHashSet(storesToTest); - allStoreNames.forEach(missing::remove); - log.warn("Environment did not contain all required stores. It is missing: {}. It had {}. {}", missing, allStoreNames, - loadEnvironmentWithMissingStores - ? "Loading environment anyway." - : "Skipping environment." - ); - - return loadEnvironmentWithMissingStores; - }); + public Collection discoverWorkerStorages() { + return loadNamespacedStores("worker_", (storePath) -> new WorkerStorageImpl(this, validator, storePath), WORKER_STORES); } @Override @@ -255,53 +177,33 @@ public SingletonStore createDatasetStore(String pathName, ObjectMapper } @Override - public IdentifiableStore createSecondaryIdDescriptionStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SECONDARY_IDS, objectMapper), centralRegistry); + public IdentifiableStore createSecondaryIdDescriptionStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SECONDARY_IDS, objectMapper)); } @Override - public IdentifiableStore createInternToExternMappingStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, INTERN_TO_EXTERN, objectMapper), centralRegistry); + public IdentifiableStore
createTableStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, TABLES, objectMapper)); } @Override - public IdentifiableStore createSearchIndexStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SEARCH_INDEX, objectMapper), centralRegistry); + public IdentifiableStore> createConceptStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, CONCEPTS, objectMapper)); } @Override - public SingletonStore createPreviewStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.singleton(createStore(findEnvironment(pathName), validator, ENTITY_PREVIEW, objectMapper)); + public IdentifiableStore createImportStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, IMPORTS, objectMapper)); } @Override - public CachedStore createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.cached(createStore(findEnvironment(pathName), validator, ENTITY_TO_BUCKET, objectMapper)); + public IdentifiableStore createCBlockStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, C_BLOCKS, objectMapper)); } @Override - public IdentifiableStore
createTableStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, TABLES, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore> createConceptStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, CONCEPTS, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore createImportStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, IMPORTS, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore createCBlockStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, C_BLOCKS, objectMapper), centralRegistry); - } - - @Override - public IdentifiableStore createBucketStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, BUCKETS, objectMapper), centralRegistry); + public IdentifiableStore createBucketStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, BUCKETS, objectMapper)); } @Override @@ -319,7 +221,7 @@ public SingletonStore createIdMappingStore(String pathName, ObjectM openStoresInEnv.put(bigStore.getDataXodusStore().getEnvironment(), bigStore.getDataXodusStore()); openStoresInEnv.put(bigStore.getMetaXodusStore().getEnvironment(), bigStore.getMetaXodusStore()); - return new SingletonStore<>(new CachedStore<>(bigStore)); + return new SingletonStore<>(bigStore); } } @@ -329,34 +231,57 @@ public SingletonStore createWorkerToBucketsStore(String path } @Override - public SingletonStore createStructureStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { + public SingletonStore createStructureStore(String pathName, ObjectMapper objectMapper) { return StoreMappings.singleton(createStore(findEnvironment(pathName), validator, STRUCTURE, objectMapper)); } @Override - public IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "executions")), validator, EXECUTIONS, objectMapper), centralRegistry); + public IdentifiableStore createExecutionsStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "executions")), validator, EXECUTIONS, objectMapper)); + } + + @Override + public IdentifiableStore createFormConfigStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "formConfigs")), validator, FORM_CONFIG, objectMapper)); + } + + @Override + public IdentifiableStore createUserStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "users")), validator, AUTH_USER, objectMapper)); } @Override - public IdentifiableStore createFormConfigStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "formConfigs")), validator, FORM_CONFIG, objectMapper), centralRegistry); + public IdentifiableStore createRoleStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "roles")), validator, AUTH_ROLE, objectMapper)); } @Override - public IdentifiableStore createUserStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "users")), validator, AUTH_USER, objectMapper), centralRegistry); + public IdentifiableStore createGroupStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "groups")), validator, AUTH_GROUP, objectMapper)); } @Override - public IdentifiableStore createRoleStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "roles")), validator, AUTH_ROLE, objectMapper), centralRegistry); + public IdentifiableStore createInternToExternMappingStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, INTERN_TO_EXTERN, objectMapper)); } + @Override + public IdentifiableStore createSearchIndexStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(createStore(findEnvironment(pathName), validator, SEARCH_INDEX, objectMapper)); + } @Override - public IdentifiableStore createGroupStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(createStore(findEnvironment(resolveSubDir(pathName, "groups")), validator, AUTH_GROUP, objectMapper), centralRegistry); + public SingletonStore createPreviewStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(createStore(findEnvironment(pathName), validator, ENTITY_PREVIEW, objectMapper)); + } + + @Override + public Store createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { + return createStore(findEnvironment(pathName), validator, ENTITY_TO_BUCKET, objectMapper); + } + + private Environment findEnvironment(File path) { + return registry.findOrCreateEnvironment(path, getXodus()); } private File resolveSubDir(String... subdirs) { @@ -369,23 +294,35 @@ private File resolveSubDir(String... subdirs) { return current.toFile(); } - /** - * Returns this.directory if the list is empty. - */ - @NonNull - @JsonIgnore - private File getStorageDir(String pathName) { - return getDirectory().resolve(pathName).toFile(); - } + public Store createStore(Environment environment, Validator validator, StoreMappings storeId, ObjectMapper objectMapper) { + final StoreInfo storeInfo = storeId.storeInfo(); + synchronized (openStoresInEnv) { + if (openStoresInEnv.get(environment).stream().map(XodusStore::getName).anyMatch(name -> storeInfo.getName().equals(name))) { + throw new IllegalStateException("Attempted to open an already opened store:" + storeInfo.getName()); + } + final XodusStore store = new XodusStore(environment, storeInfo.getName(), this::closeStore, this::removeStore); - private Environment findEnvironment(String pathName) { - final File path = getStorageDir(pathName); - return registry.findOrCreateEnvironment(path, getXodus()); + openStoresInEnv.put(environment, store); + + return new CachedStore<>( + new SerializingStore<>( + store, + validator, + objectMapper, + storeInfo.getKeyType(), + storeInfo.getValueType(), + isValidateOnWrite(), + isRemoveUnreadableFromStore(), + getUnreadableDataDumpDirectory(), + getReaderExecutorService() + )); + } } - private Environment findEnvironment(File path) { + private Environment findEnvironment(String pathName) { + final File path = getStorageDir(pathName); return registry.findOrCreateEnvironment(path, getXodus()); } @@ -407,7 +344,7 @@ private void closeStore(XodusStore store) { private void removeStore(XodusStore store) { final Environment env = store.getEnvironment(); - synchronized (openStoresInEnv){ + synchronized (openStoresInEnv) { final Collection stores = openStoresInEnv.get(env); stores.remove(store); @@ -420,12 +357,34 @@ private void removeStore(XodusStore store) { removeEnvironment(env); } + public ExecutorService getReaderExecutorService() { + if (readerExecutorService == null) { + readerExecutorService = new ThreadPoolExecutor( + 1, getReaderWorkers(), + 5, TimeUnit.MINUTES, + new ArrayBlockingQueue<>(getReaderWorkers() * getBufferPerWorker()), + new ThreadPoolExecutor.CallerRunsPolicy() + ); + } + + return readerExecutorService; + } + + /** + * Returns this.directory if the list is empty. + */ + @NonNull + @JsonIgnore + private File getStorageDir(String pathName) { + return getDirectory().resolve(pathName).toFile(); + } + private void removeEnvironment(Environment env) { log.info("Removed last XodusStore in Environment. Removing Environment as well: {}", env.getLocation()); - final List xodusStore= env.computeInReadonlyTransaction(env::getAllStoreNames); + final List xodusStore = env.computeInReadonlyTransaction(env::getAllStoreNames); - if (!xodusStore.isEmpty()){ + if (!xodusStore.isEmpty()) { throw new IllegalStateException("Cannot delete environment, because it still contains these stores:" + xodusStore); } @@ -439,31 +398,54 @@ private void removeEnvironment(Environment env) { } } - public Store createStore(Environment environment, Validator validator, StoreMappings storeId, ObjectMapper objectMapper) { - final StoreInfo storeInfo = storeId.storeInfo(); - synchronized (openStoresInEnv) { + private List loadNamespacedStores(String prefix, Function creator, Set storesToTest) { + final File baseDir = getDirectory().toFile(); - if(openStoresInEnv.get(environment).stream().map(XodusStore::getName).anyMatch(name -> storeInfo.getName().equals(name))){ - throw new IllegalStateException("Attempted to open an already opened store:" + storeInfo.getName()); - } + if (baseDir.mkdirs()) { + log.warn("Had to create Storage Dir at `{}`", baseDir); + } - final XodusStore store = new XodusStore(environment, storeInfo.getName(), this::closeStore, this::removeStore); + final List storages = new ArrayList<>(); - openStoresInEnv.put(environment, store); + for (File directory : Objects.requireNonNull(baseDir.listFiles((file, name) -> file.isDirectory() && name.startsWith(prefix)))) { - return new CachedStore<>( - new SerializingStore<>( - store, - validator, - objectMapper, - storeInfo.getKeyType(), - storeInfo.getValueType(), - isValidateOnWrite(), - isRemoveUnreadableFromStore(), - getUnreadableDataDumpDirectory(), - getReaderExecutorService() - )); + final String name = directory.getName(); + + ConqueryMDC.setLocation(directory.toString()); + + try (Environment environment = registry.findOrCreateEnvironment(directory, xodus)) { + if (!environmentHasStores(environment, storesToTest)) { + log.warn("No valid {}storage found in {}", prefix, directory); + continue; + } + } + + final T namespacedStorage = creator.apply(name); + + storages.add(namespacedStorage); } + + return storages; } + private boolean environmentHasStores(Environment env, Set storesToTest) { + return env.computeInTransaction(t -> { + final List allStoreNames = env.getAllStoreNames(t); + final boolean complete = new HashSet<>(allStoreNames).containsAll(storesToTest); + if (complete) { + log.trace("Storage contained all stores: {}", storesToTest); + return true; + } + + final HashSet missing = Sets.newHashSet(storesToTest); + allStoreNames.forEach(missing::remove); + log.warn("Environment did not contain all required stores. It is missing: {}. It had {}. {}", missing, allStoreNames, + loadEnvironmentWithMissingStores + ? "Loading environment anyway." + : "Skipping environment." + ); + + return loadEnvironmentWithMissingStores; + }); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java index bd20ea676d..68ab45af2f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Column.java @@ -1,19 +1,20 @@ package com.bakdata.conquery.models.datasets; import javax.annotation.Nullable; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.frontend.FrontendValue; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.config.IndexConfig; import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.util.search.TrieSearch; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; -import jakarta.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; @@ -50,12 +51,12 @@ public class Column extends Labeled implements NamespacedIdentifiable< * if this is set this column counts as the secondary id of the given name for this * table */ - @NsIdRef - private SecondaryIdDescription secondaryId; + private SecondaryIdDescriptionId secondaryId; + @JsonIgnore @Override - public ColumnId createId() { - return new ColumnId(table.getId(), getName()); + public DatasetId getDataset() { + return table.getDataset(); } @Override @@ -63,13 +64,6 @@ public String toString() { return "Column(id = " + getId() + ", type = " + getType() + ")"; } - @JsonIgnore - @Override - public Dataset getDataset() { - return table.getDataset(); - } - - /** * We create only an empty search here, because the content is provided through {@link com.bakdata.conquery.models.messages.namespaces.specific.RegisterColumnValues} and filled by the caller. */ @@ -79,6 +73,16 @@ public TrieSearch createTrieSearch(IndexConfig config) { } public void init() { + if (getPosition() >= 0) { + // Column was initialized + return; + } + position = ArrayUtils.indexOf(getTable().getColumns(), this); } + + @Override + public ColumnId createId() { + return new ColumnId(table.getId(), getName()); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java index 47bf5d0056..432cbb1170 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Dataset.java @@ -8,42 +8,59 @@ import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.auth.permissions.Authorized; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; -import com.bakdata.conquery.models.auth.permissions.DatasetPermission; import com.bakdata.conquery.models.identifiable.Labeled; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.OptBoolean; +import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @Getter @Setter @NoArgsConstructor +@EqualsAndHashCode(callSuper = true) public class Dataset extends Labeled implements Injectable, Authorized, NamespacedIdentifiable { - public Dataset(String name) { - setName(name); - } /** * Used to programmatically generate proper {@link com.bakdata.conquery.models.identifiable.ids.NamespacedId}s. */ public static final Dataset PLACEHOLDER = new Dataset("PLACEHOLDER"); - - public static boolean isAllIdsTable(Table table){ - return table.getName().equalsIgnoreCase(ConqueryConstants.ALL_IDS_TABLE); - } - /** * Sorting weight for Frontend. */ private int weight; + /** + * Resolver for {@link com.bakdata.conquery.models.identifiable.ids.NamespacedId}s. + * It is usually injected when this object is loaded from a store, or set manually, when it is created. + **/ + @JacksonInject(useInput = OptBoolean.FALSE) + @Getter + @Setter + @JsonIgnore + @EqualsAndHashCode.Exclude + private transient NamespacedStorageProvider namespacedStorageProvider; + + public Dataset(String name) { + setName(name); + } + + public static boolean isAllIdsTable(Table table){ + return table.getName().equalsIgnoreCase(ConqueryConstants.ALL_IDS_TABLE); + } + @JsonIgnore public Table getAllIdsTable() { //TODO store this somehow? / Add this at dataset creation final Table table = new Table(); - table.setDataset(this); + table.setDataset(this.getId()); table.setName(ConqueryConstants.ALL_IDS_TABLE); + + // We could use the resolvers of this dataset, but actually this table's id should never be resolved return table; } @@ -54,17 +71,19 @@ public MutableInjectableValues inject(MutableInjectableValues mutableInjectableV @Override public DatasetId createId() { - return new DatasetId(getName()); + DatasetId datasetId = new DatasetId(getName()); + datasetId.setNamespacedStorageProvider(getNamespacedStorageProvider()); + return datasetId; } @Override public ConqueryPermission createPermission(Set abilities) { - return DatasetPermission.onInstance(abilities,getId()); + return getId().createPermission(abilities); } @JsonIgnore @Override - public Dataset getDataset() { - return this; + public DatasetId getDataset() { + return this.getId(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java index aeee1a4e4a..73eafe2af5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Import.java @@ -4,10 +4,11 @@ import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.identifiable.NamedImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; @@ -27,8 +28,7 @@ public class Import extends NamedImpl implements NamespacedIdentifiabl @Valid @NotNull - @NsIdRef - private final Table table; + private final TableId table; private long numberOfEntities; @@ -41,7 +41,7 @@ public class Import extends NamedImpl implements NamespacedIdentifiabl @Override public ImportId createId() { - return new ImportId(table.getId(), getName()); + return new ImportId(table, getName()); } public long estimateMemoryConsumption() { @@ -54,7 +54,7 @@ public long estimateMemoryConsumption() { @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return getTable().getDataset(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java index a00bfbfb9e..b4117fd28f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/ImportColumn.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.models.events.stores.root.ColumnStore; import com.bakdata.conquery.models.identifiable.NamedImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ImportColumnId; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonCreator; @@ -45,7 +46,7 @@ public String toString() { @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return parent.getDataset(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java index e43fbd8d06..568a31adfd 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java @@ -3,17 +3,21 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Optional; +import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.core.UriBuilder; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; @@ -28,18 +32,11 @@ import com.google.common.collect.MoreCollectors; import com.google.common.collect.Sets; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; -import jakarta.ws.rs.core.UriBuilder; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; -/** - * @implNote I am using ids as references here instead of {@link NsIdRef} because I want the PreviewConfig to be pretty soft, instead of having to manage it as a dependent for Concepts and Tables. - */ @Data @Slf4j @AllArgsConstructor @@ -54,7 +51,6 @@ public class PreviewConfig { * @implSpec the order of selects is the order of the output fields. */ @Valid - @NotNull private List infoCardSelects = List.of(); @Valid @@ -63,13 +59,11 @@ public class PreviewConfig { /** * Columns that should not be displayed to users in entity preview. */ - @NotNull private Set hidden = Collections.emptySet(); /** * SecondaryIds where the columns should be grouped together. */ - @NotNull private Set grouping = Collections.emptySet(); /** @@ -77,13 +71,11 @@ public class PreviewConfig { * * @implNote This is purely for the frontend, the backend can theoretically be queried for all Connectors. */ - @NotNull private Set allConnectors = Collections.emptySet(); /** * Connectors that shall be selected by default by the frontend. */ - @NotNull private Set defaultConnectors = Collections.emptySet(); /** @@ -93,7 +85,6 @@ public class PreviewConfig { *

* The Frontend will use the concepts filters to render a search for entity preview. */ - @NotNull private Set searchFilters = Collections.emptySet(); @JacksonInject(useInput = OptBoolean.FALSE) @@ -120,19 +111,21 @@ public record InfoCardSelect(@NotNull String label, SelectId select, String desc /** * Defines a group of selects that will be evaluated per quarter and year in the requested period of the entity-preview. */ - public record TimeStratifiedSelects(@NotNull String label, String description, @NotEmpty List selects){ + public record TimeStratifiedSelects(@NotNull String label, String description, @NotEmpty List selects) { } @ValidationMethod(message = "Selects may be referenced only once.") @JsonIgnore public boolean isSelectsUnique() { - return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::select).distinct().count() == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); + return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::select).distinct().count() + == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); } @ValidationMethod(message = "Labels must be unique.") @JsonIgnore public boolean isLabelsUnique() { - return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::label).distinct().count() == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); + return timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).map(InfoCardSelect::label).distinct().count() + == timeStratifiedSelects.stream().map(TimeStratifiedSelects::selects).flatMap(Collection::stream).count(); } @JsonIgnore @@ -186,31 +179,34 @@ public String resolveSelectLabel(SelectResultInfo info) { public Listresolve) + .filter(Objects::nonNull) .collect(Collectors.toList()); } - public List> resolveSearchFilters() { - return getSearchFilters().stream() - .map(filterId -> getDatasetRegistry().findRegistry(filterId.getDataset()).getOptional(filterId)) - .flatMap(Optional::stream) + public List resolveSearchFilters() { + if (searchFilters == null) { + return Collections.emptyList(); + } + + return searchFilters.stream() + .map(FilterId::resolve) + .filter(Objects::nonNull) + .map(Filter::getId) .toList(); } - public Concept resolveSearchConcept() { - if (getSearchFilters().isEmpty()) { + public ConceptId resolveSearchConcept() { + if (searchFilters == null) { return null; } - return getSearchFilters().stream() - .map(filterId -> getDatasetRegistry().findRegistry(filterId.getDataset()).getOptional(filterId)) - .flatMap(Optional::stream) + + return searchFilters.stream() + .map(FilterId::>resolve) .map(filter -> filter.getConnector().getConcept()) .distinct() + .map(Concept::getId) .collect(MoreCollectors.onlyElement()); } - - - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java index adc8ae83ed..609e1496ef 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/SecondaryIdDescription.java @@ -1,11 +1,11 @@ package com.bakdata.conquery.models.datasets; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; -import com.bakdata.conquery.models.index.InternToExternMapper; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.Getter; import lombok.NoArgsConstructor; @@ -17,14 +17,12 @@ @JsonIgnoreProperties({"searchDisabled", "generateSearchSuffixes", "searchMinSuffixLength"}) public class SecondaryIdDescription extends Labeled implements NamespacedIdentifiable { - @NsIdRef - private Dataset dataset; + private DatasetId dataset; private String description; - @NsIdRef @View.ApiManagerPersistence - private InternToExternMapper mapping; + private InternToExternMapperId mapping; /** * If true, SecondaryId will not be displayed to the user or listed in APIs. @@ -33,7 +31,7 @@ public class SecondaryIdDescription extends Labeled im @Override public SecondaryIdDescriptionId createId() { - return new SecondaryIdDescriptionId(dataset.getId(), getName()); + return new SecondaryIdDescriptionId(dataset, getName()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java index 26a5c19389..a856cf6f99 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/Table.java @@ -4,20 +4,25 @@ import java.util.HashSet; import java.util.Set; import java.util.stream.Stream; - import javax.annotation.CheckForNull; import javax.annotation.Nullable; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; +import com.bakdata.conquery.io.jackson.Initializing; import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.config.DatabaseConfig; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; +import com.fasterxml.jackson.annotation.OptBoolean; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -25,19 +30,24 @@ @Getter @Setter @Slf4j -public class Table extends Labeled implements NamespacedIdentifiable { +@JsonDeserialize(converter = Table.Initializer.class) +public class Table extends Labeled implements NamespacedIdentifiable, Initializing { // TODO: 10.01.2020 fk: register imports here? - @NsIdRef - private Dataset dataset; + private DatasetId dataset; + + @JacksonInject(useInput = OptBoolean.FALSE) + @JsonIgnore + private NamespacedStorage storage; + @NotNull @Valid @JsonManagedReference private Column[] columns = new Column[0]; /** * Defines the primary key/column of this table. Only required for SQL mode. - * If unset {@link ...SqlConnectorConfig#primaryColumn} is assumed. + * If unset {@link DatabaseConfig#getPrimaryColumn()} is assumed. */ @Nullable @JsonManagedReference @@ -46,9 +56,9 @@ public class Table extends Labeled implements NamespacedIdentifiable secondaryIds = new HashSet<>(); + final Set secondaryIds = new HashSet<>(); for (Column column : columns) { - final SecondaryIdDescription secondaryId = column.getSecondaryId(); + final SecondaryIdDescriptionId secondaryId = column.getSecondaryId(); if (secondaryId != null && !secondaryIds.add(secondaryId)) { log.error("{} is duplicated", secondaryId); return false; @@ -73,11 +83,12 @@ public boolean isDistinctLabels() { @Override public TableId createId() { - return new TableId(dataset.getId(), getName()); + return new TableId(dataset, getName()); } public Stream findImports(NamespacedStorage storage) { - return storage.getAllImports().stream().filter(imp -> imp.getTable().equals(this)); + TableId thisId = this.getId(); + return storage.getAllImports().filter(imp -> imp.getTable().equals(thisId)); } public Column getColumnByName(@NotNull String columnName) { @@ -91,7 +102,7 @@ public Column getColumnByName(@NotNull String columnName) { * selects the right column for the given secondaryId from this table */ @CheckForNull - public Column findSecondaryIdColumn(SecondaryIdDescription secondaryId) { + public Column findSecondaryIdColumn(SecondaryIdDescriptionId secondaryId) { for (Column col : columns) { if (col.getSecondaryId() == null || !secondaryId.equals(col.getSecondaryId())) { @@ -104,4 +115,18 @@ public Column findSecondaryIdColumn(SecondaryIdDescription secondaryId) { return null; } + @Override + public void init() { + if (dataset == null) { + dataset = storage.getDataset().getId(); + } else if (storage != null && !dataset.equals(storage.getDataset().getId())) { + throw new IllegalStateException("Datasets don't match. Namespace: %s Table: %s".formatted(storage.getDataset().getId(), dataset)); + } + + for (Column column : columns) { + column.init(); + } + } + + public static class Initializer extends Initializing.Converter

{} } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java index 4fcb77d2ae..d421668ea6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Concept.java @@ -4,17 +4,21 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; +import jakarta.validation.Valid; import com.bakdata.conquery.io.cps.CPSBase; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.auth.permissions.Authorized; -import com.bakdata.conquery.models.auth.permissions.ConceptPermission; import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; +import com.bakdata.conquery.models.exceptions.ConfigurationException; +import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryPlanContext; import com.bakdata.conquery.models.query.queryplan.QPNode; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -25,7 +29,6 @@ import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import jakarta.validation.Valid; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; @@ -53,8 +56,15 @@ public abstract class Concept extends ConceptElemen @Valid private List connectors = Collections.emptyList(); - @NsIdRef - private Dataset dataset; + private DatasetId dataset; + + /** + * rawValue is expected to be an Integer, expressing a localId for {@link TreeConcept#getElementByLocalId(int)}. + * + *

+ * If {@link PrintSettings#isPrettyPrint()} is false, {@link ConceptElement#getId()} is used to print. + */ + public abstract String printConceptLocalId(Object rawValue, PrintSettings printSettings); public List getDefaultSelects() { public abstract List getSelects(); + public Select getSelectByName(String name) { + for (Select select : getSelects()) { + if (select.getName().equals(name)) { + return select; + } + } + return null; + } + + public void initElements() throws ConfigurationException, JSONException { + getSelects().forEach(Select::init); + getConnectors().forEach(CONNECTOR::init); + } + @Override @JsonIgnore public Concept getConcept() { @@ -70,7 +94,7 @@ public Concept getConcept() { @Override public ConceptId createId() { - return new ConceptId(dataset.getId(), getName()); + return new ConceptId(dataset, getName()); } public int countElements() { @@ -90,6 +114,17 @@ public QPNode createConceptQuery(QueryPlanContext context, List> f @Override public ConqueryPermission createPermission(Set abilities) { - return ConceptPermission.onInstance(abilities, getId()); + return getId().createPermission(abilities); } + + public CONNECTOR getConnectorByName(String name) { + for (CONNECTOR connector : connectors) { + if (connector.getName().equals(name)) { + return connector; + } + } + return null; + } + + public abstract ConceptElement findById(ConceptElementId id); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java index 3b458d9696..037bda5cba 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ConceptElement.java @@ -2,7 +2,6 @@ import java.util.Collections; import java.util.List; - import javax.annotation.CheckForNull; import com.bakdata.conquery.apiv1.KeyValue; @@ -14,22 +13,17 @@ import lombok.Getter; import lombok.Setter; +@Setter +@Getter @EqualsAndHashCode(callSuper = true) public abstract class ConceptElement>> extends Labeled implements NamespacedIdentifiable { - @Getter - @Setter private String description; - @Getter - @Setter private List additionalInfos = Collections.emptyList(); /** * Initialize this only when needed. It is not needed */ - @Getter - @Setter - @JsonIgnore @CheckForNull private MatchingStats matchingStats; diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java index 6baf7f2154..a077550599 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/Connector.java @@ -10,8 +10,6 @@ import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; @@ -19,9 +17,17 @@ import com.bakdata.conquery.models.identifiable.IdMap; import com.bakdata.conquery.models.identifiable.Labeled; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; -import com.fasterxml.jackson.annotation.*; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; +import com.fasterxml.jackson.annotation.JsonAlias; +import com.fasterxml.jackson.annotation.JsonBackReference; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonManagedReference; +import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMultiset; import com.google.common.collect.Multiset.Entry; import io.dropwizard.validation.ValidationMethod; @@ -61,26 +67,28 @@ public abstract class Connector extends Labeled implements SelectHo @Setter(AccessLevel.NONE) @Valid private transient IdMap> allFiltersMap; - - public Collection> getFilters() { - return allFiltersMap.values(); - } - @NotNull @Getter @Setter @JsonManagedReference @Valid private List

collectRequiredTables() { - return this.getHolder().findConcept().getConnectors().stream().map(Connector::getTable).collect(Collectors.toSet()); + return this.getHolder().findConcept().getConnectors().stream().map(Connector::getResolvedTable).collect(Collectors.toSet()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java index 5f1ad1b075..9d32279997 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/DistinctSelect.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.AllValuesAggregator; @@ -21,19 +20,14 @@ public class DistinctSelect extends MappableSingleColumnSelect { @JsonCreator - public DistinctSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public DistinctSelect(ColumnId column, + InternToExternMapperId mapping) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new AllValuesAggregator<>(getColumn()); - } - - @Override - public ResultType getResultType() { - return new ResultType.ListT(super.getResultType()); + return new AllValuesAggregator<>(getColumn().resolve()); } @Override @@ -42,11 +36,16 @@ public SelectConverter createConverter() { } @Override - public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { if(getMapping() == null){ return super.createPrinter(printerFactory, printSettings); } - return printerFactory.getListPrinter(new MappedPrinter(getMapping()), printSettings); + return printerFactory.getListPrinter(new MappedPrinter(getMapping().resolve()), printSettings); + } + + @Override + public ResultType getResultType() { + return new ResultType.ListT<>(super.getResultType()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java index e4e4288478..0ca305e6aa 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/FirstValueSelect.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.FirstValueAggregator; import com.bakdata.conquery.sql.conversion.model.select.FirstValueSelectConverter; @@ -16,14 +15,16 @@ public class FirstValueSelect extends MappableSingleColumnSelect { @JsonCreator - public FirstValueSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public FirstValueSelect( + ColumnId column, + InternToExternMapperId mapping + ) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new FirstValueAggregator<>(getColumn()); + return new FirstValueAggregator<>(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java index 1112b8bb9a..d976c6b803 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/LastValueSelect.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.LastValueAggregator; import com.bakdata.conquery.sql.conversion.model.select.LastValueSelectConverter; @@ -16,14 +15,16 @@ public class LastValueSelect extends MappableSingleColumnSelect { @JsonCreator - public LastValueSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public LastValueSelect( + ColumnId column, + InternToExternMapperId mapping + ) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new LastValueAggregator<>(getColumn()); + return new LastValueAggregator<>(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java index 458330c893..54e7104d7d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/RandomValueSelect.java @@ -1,11 +1,10 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.value.RandomValueAggregator; import com.bakdata.conquery.sql.conversion.model.select.RandomValueSelectConverter; @@ -15,14 +14,16 @@ @CPSType(id = "RANDOM", base = Select.class) public class RandomValueSelect extends MappableSingleColumnSelect { @JsonCreator - public RandomValueSelect(@NsIdRef Column column, - @NsIdRef InternToExternMapper mapping) { + public RandomValueSelect( + ColumnId column, + InternToExternMapperId mapping + ) { super(column, mapping); } @Override public Aggregator createAggregator() { - return new RandomValueAggregator<>(getColumn()); + return new RandomValueAggregator<>(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java index 98b0c82f81..4e4e0c39c7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/SingleColumnSelect.java @@ -4,18 +4,19 @@ import java.util.EnumSet; import java.util.List; import java.util.Set; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.types.SemanticType; import com.fasterxml.jackson.annotation.JsonIgnore; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; @@ -29,10 +30,9 @@ @Getter public abstract class SingleColumnSelect extends Select { - @NsIdRef @NotNull @NonNull - private Column column; + private ColumnId column; /** * Indicates if the values in the specified column belong to a categorical set @@ -60,7 +60,7 @@ public SelectResultInfo getResultInfo(CQConcept cqConcept) { @Nullable @Override - public List getRequiredColumns() { + public List getRequiredColumns() { return List.of(getColumn()); } @@ -68,11 +68,12 @@ public List getRequiredColumns() { @ValidationMethod(message = "Column does not match required Type.") public boolean isValidColumnType() { - if (getAcceptedColumnTypes().contains(getColumn().getType())) { + MajorTypeId type = getColumn().resolve().getType(); + if (getAcceptedColumnTypes().contains(type)) { return true; } - log.error("Column[{}] is of Type[{}]. Not one of [{}]", column.getId(), column.getType(), getAcceptedColumnTypes()); + log.error("Column[{}] is of Type[{}]. Not one of [{}]", column, type, getAcceptedColumnTypes()); return false; } @@ -81,11 +82,12 @@ public boolean isValidColumnType() { @ValidationMethod(message = "Columns is not for Connectors' Table.") public boolean isForConnectorTable() { - if (getColumn().getTable().equals(((Connector) getHolder()).getTable())) { + Table resolvedTable = ((Connector) getHolder()).getResolvedTable(); + if (getColumn().getTable().equals(resolvedTable.getId())) { return true; } - log.error("Column[{}] ist not for Table[{}]", column.getId(), ((Connector) getHolder()).getTable()); + log.error("Column[{}] ist not for Table[{}]", column, resolvedTable); return false; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java index 5e446bf403..a03861dd07 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountQuartersSelect.java @@ -1,14 +1,14 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; import java.util.List; - import javax.annotation.Nullable; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.DaterangeSelectOrFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.CountQuartersOfDateRangeAggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.CountQuartersOfDatesAggregator; @@ -30,18 +30,15 @@ @CPSType(id = "COUNT_QUARTERS", base = Select.class) public class CountQuartersSelect extends Select implements DaterangeSelectOrFilter { - @NsIdRef @Nullable - private Column column; - @NsIdRef + private ColumnId column; @Nullable - private Column startColumn; - @NsIdRef + private ColumnId startColumn; @Nullable - private Column endColumn; + private ColumnId endColumn; @Override - public List getRequiredColumns() { + public List getRequiredColumns() { if (isSingleColumnDaterange()) { return List.of(column); } @@ -50,11 +47,12 @@ public List getRequiredColumns() { @Override public Aggregator createAggregator() { - return switch (getColumn().getType()) { - case DATE_RANGE -> new CountQuartersOfDateRangeAggregator(getColumn()); - case DATE -> new CountQuartersOfDatesAggregator(getColumn()); - default -> - throw new IllegalArgumentException(String.format("Column '%s' is not of Date (-Range) Type but '%s'", getColumn(), getColumn().getType())); + final Column column = getColumn().resolve(); + final MajorTypeId typeId = column.getType(); + return switch (typeId) { + case DATE_RANGE -> new CountQuartersOfDateRangeAggregator(column); + case DATE -> new CountQuartersOfDatesAggregator(column); + default -> throw new IllegalArgumentException(String.format("Column '%s' is not of Date (-Range) Type but '%s'", getColumn(), typeId)); }; } @@ -67,5 +65,4 @@ public ResultType getResultType() { public SelectConverter createConverter() { return new CountQuartersSqlAggregator(); } - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java index dd4b351e23..0c20e77498 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/CountSelect.java @@ -3,19 +3,18 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.DistinctValuesWrapperAggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.CountAggregator; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.sql.conversion.model.aggregator.CountSqlAggregator; import com.bakdata.conquery.sql.conversion.model.select.SelectConverter; -import jakarta.validation.constraints.NotNull; import lombok.Data; import lombok.NoArgsConstructor; import org.jetbrains.annotations.Nullable; @@ -27,31 +26,31 @@ public class CountSelect extends Select { private boolean distinct = false; - @NsIdRefCollection @NotNull - private List distinctByColumn = Collections.emptyList(); + private List distinctByColumn = Collections.emptyList(); + - @NsIdRef @NotNull - private Column column; + private ColumnId column; @Override public Aggregator createAggregator() { + final Column resolved = getColumn().resolve(); if (!isDistinct()) { - return new CountAggregator(getColumn()); + return new CountAggregator(resolved); } if (distinctByColumn != null && !getDistinctByColumn().isEmpty()) { - return new DistinctValuesWrapperAggregator<>(new CountAggregator(getColumn()), getDistinctByColumn()); + return new DistinctValuesWrapperAggregator(new CountAggregator(resolved), getDistinctByColumn().stream().map(ColumnId::resolve).toList()); } - return new DistinctValuesWrapperAggregator<>(new CountAggregator(getColumn()), List.of(getColumn())); + return new DistinctValuesWrapperAggregator(new CountAggregator(resolved), List.of(getColumn().resolve())); } @Nullable @Override - public List getRequiredColumns() { - final List out = new ArrayList<>(); + public List getRequiredColumns() { + final List out = new ArrayList<>(); out.add(getColumn()); if (distinctByColumn != null) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java index aa164acc16..4747fef16c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateDistanceSelect.java @@ -2,20 +2,19 @@ import java.time.temporal.ChronoUnit; import java.util.EnumSet; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DateDistanceAggregator; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.sql.conversion.model.aggregator.DateDistanceSqlAggregator; import com.bakdata.conquery.sql.conversion.model.select.SelectConverter; import com.fasterxml.jackson.annotation.JsonCreator; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; @@ -28,7 +27,7 @@ public class DateDistanceSelect extends SingleColumnSelect { private ChronoUnit timeUnit = ChronoUnit.YEARS; @JsonCreator - public DateDistanceSelect(@NsIdRef Column column) { + public DateDistanceSelect(ColumnId column) { super(column); } @@ -39,7 +38,7 @@ public EnumSet getAcceptedColumnTypes() { @Override public Aggregator createAggregator() { - return new DateDistanceAggregator(getColumn(), getTimeUnit()); + return new DateDistanceAggregator(getColumn().resolve(), getTimeUnit()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java index a3a9ef9775..127fa40e1e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DateUnionSelect.java @@ -1,14 +1,12 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; import java.util.List; - import javax.annotation.Nullable; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.DaterangeSelectOrFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DateUnionAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -27,18 +25,15 @@ @JsonIgnoreProperties("categorical") public class DateUnionSelect extends Select implements DaterangeSelectOrFilter { - @NsIdRef @Nullable - private Column column; - @NsIdRef + private ColumnId column; @Nullable - private Column startColumn; - @NsIdRef + private ColumnId startColumn; @Nullable - private Column endColumn; + private ColumnId endColumn; @Override - public List getRequiredColumns() { + public List getRequiredColumns() { if (column != null) { return List.of(column); } @@ -48,7 +43,7 @@ public List getRequiredColumns() { @Override public Aggregator createAggregator() { // TODO fix this for 2 columns - return new DateUnionAggregator(getColumn()); + return new DateUnionAggregator(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java index a52ca3634c..d0f35c2325 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/DurationSumSelect.java @@ -1,14 +1,12 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; import java.util.List; - import javax.annotation.Nullable; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.DaterangeSelectOrFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.DurationSumAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -27,18 +25,15 @@ @JsonIgnoreProperties("categorical") public class DurationSumSelect extends Select implements DaterangeSelectOrFilter { - @NsIdRef @Nullable - private Column column; - @NsIdRef + private ColumnId column; @Nullable - private Column startColumn; - @NsIdRef + private ColumnId startColumn; @Nullable - private Column endColumn; + private ColumnId endColumn; @Override - public List getRequiredColumns() { + public List getRequiredColumns() { if (column != null) { return List.of(column); } @@ -47,7 +42,7 @@ public List getRequiredColumns() { @Override public Aggregator createAggregator() { - return new DurationSumAggregator(getColumn()); + return new DurationSumAggregator(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java index a810c8e984..2f79121b10 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/FlagSelect.java @@ -1,15 +1,15 @@ package com.bakdata.conquery.models.datasets.concepts.select.connector.specific; -import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.specific.MultiSelectFilter; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.FlagsAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -34,18 +34,18 @@ @ToString public class FlagSelect extends Select { - @NsIdRefCollection - private final Map flags; + private final Map flags; @Override - public List getRequiredColumns() { - return new ArrayList<>(flags.values()); + public List getRequiredColumns() { + return flags.values().stream().toList(); } @Override public Aggregator createAggregator() { - return new FlagsAggregator(flags); + final Map collect = flags.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().resolve())); + return new FlagsAggregator(collect); } @@ -58,7 +58,7 @@ public boolean isAllColumnsOfSameTable() { @JsonIgnore @ValidationMethod(message = "Columns must be BOOLEAN.") public boolean isAllColumnsBoolean() { - return flags.values().stream().map(Column::getType).allMatch(MajorTypeId.BOOLEAN::equals); + return flags.values().stream().map(ColumnId::resolve).map(Column::getType).allMatch(MajorTypeId.BOOLEAN::equals); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java index af5d6ebe2d..d6eab22730 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/MappableSingleColumnSelect.java @@ -2,15 +2,14 @@ import java.util.Collections; import java.util.Set; - import javax.annotation.Nullable; +import jakarta.validation.Valid; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.query.resultinfo.printers.Printer; @@ -18,34 +17,40 @@ import com.bakdata.conquery.models.query.resultinfo.printers.common.MappedPrinter; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.types.SemanticType; -import jakarta.validation.Valid; import lombok.Getter; +@Getter public abstract class MappableSingleColumnSelect extends SingleColumnSelect { /** * If a mapping was provided the mapping changes the aggregator result before it is processed by a {@link com.bakdata.conquery.io.result.ResultRender.ResultRendererProvider}. */ - @Getter @Valid @Nullable @View.ApiManagerPersistence - @NsIdRef - private final InternToExternMapper mapping; + private final InternToExternMapperId mapping; - public MappableSingleColumnSelect(Column column, @Nullable InternToExternMapper mapping) { + public MappableSingleColumnSelect(ColumnId column, @Nullable InternToExternMapperId mapping) { super(column); this.mapping = mapping; } @Override - public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { if (mapping == null) { return super.createPrinter(printerFactory, printSettings); } - return new MappedPrinter(getMapping()); + return new MappedPrinter(mapping.resolve()); + } + + @Override + public ResultType getResultType() { + if(mapping == null){ + return ResultType.resolveResultType(getColumn().resolve().getType()); + } + return ResultType.Primitive.STRING; } @Override @@ -58,17 +63,9 @@ public SelectResultInfo getResultInfo(CQConcept cqConcept) { return new SelectResultInfo(this, cqConcept, Set.of(new SemanticType.CategoricalT())); } - @Override - public ResultType getResultType() { - if (mapping == null) { - return ResultType.resolveResultType(getColumn().getType()); - } - return ResultType.Primitive.STRING; - } - public void loadMapping() { if (mapping != null) { - mapping.init(); + mapping.resolve().init(); } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java index ccc50623dd..5fb858be2b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/PrefixSelect.java @@ -3,11 +3,10 @@ import java.util.EnumSet; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.PrefixTextAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -28,14 +27,14 @@ public EnumSet getAcceptedColumnTypes() { private String prefix; @JsonCreator - public PrefixSelect(@NsIdRef Column column, String prefix) { + public PrefixSelect(ColumnId column, String prefix) { super(column); this.prefix = prefix; } @Override public Aggregator createAggregator() { - return new PrefixTextAggregator(getColumn(), prefix); + return new PrefixTextAggregator(getColumn().resolve(), prefix); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java index 43657445b5..1d3dbaf693 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/QuartersInYearSelect.java @@ -3,11 +3,10 @@ import java.util.EnumSet; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.connector.SingleColumnSelect; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.specific.QuartersInYearAggregator; import com.bakdata.conquery.models.types.ResultType; @@ -25,13 +24,13 @@ public EnumSet getAcceptedColumnTypes() { } @JsonCreator - public QuartersInYearSelect(@NsIdRef Column column) { + public QuartersInYearSelect(ColumnId column) { super(column); } @Override public Aggregator createAggregator() { - return new QuartersInYearAggregator(getColumn()); + return new QuartersInYearAggregator(getColumn().resolve()); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java index 406d6164b6..bc50b24deb 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/connector/specific/SumSelect.java @@ -4,13 +4,13 @@ import java.util.Collections; import java.util.EnumSet; import java.util.List; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.ColumnAggregator; import com.bakdata.conquery.models.query.queryplan.aggregators.DistinctValuesWrapperAggregator; @@ -28,7 +28,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @@ -39,23 +38,19 @@ @NoArgsConstructor(onConstructor_ = @JsonCreator) public class SumSelect extends Select { - @NsIdRefCollection @NotNull - private List distinctByColumn = Collections.emptyList(); + private List distinctByColumn = Collections.emptyList(); - @NsIdRef @NotNull - private Column column; - - @NsIdRef - private Column subtractColumn; + private ColumnId column; + private ColumnId subtractColumn; - public SumSelect(Column column) { + public SumSelect(ColumnId column) { this(column, null); } - public SumSelect(Column column, Column subtractColumn) { + public SumSelect(ColumnId column, ColumnId subtractColumn) { this.column = column; this.subtractColumn = subtractColumn; } @@ -63,33 +58,35 @@ public SumSelect(Column column, Column subtractColumn) { @Override public Aggregator createAggregator() { if (distinctByColumn != null && !distinctByColumn.isEmpty()) { - return new DistinctValuesWrapperAggregator<>(getAggregator(), getDistinctByColumn()); + return new DistinctValuesWrapperAggregator<>(getAggregator(), getDistinctByColumn().stream().map(ColumnId::resolve).toList()); } return getAggregator(); } private ColumnAggregator getAggregator() { + Column resolved = getColumn().resolve(); if (subtractColumn == null) { - return switch (getColumn().getType()) { - case INTEGER -> new IntegerSumAggregator(getColumn()); - case MONEY -> new MoneySumAggregator(getColumn()); - case DECIMAL -> new DecimalSumAggregator(getColumn()); - case REAL -> new RealSumAggregator(getColumn()); - default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", getColumn().getType())); + return switch (resolved.getType()) { + case INTEGER -> new IntegerSumAggregator(resolved); + case MONEY -> new MoneySumAggregator(resolved); + case DECIMAL -> new DecimalSumAggregator(resolved); + case REAL -> new RealSumAggregator(resolved); + default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", resolved.getType())); }; } - if (getColumn().getType() != getSubtractColumn().getType()) { - throw new IllegalStateException(String.format("Column types are not the same: Column %s\tSubstractColumn %s", getColumn().getType(), getSubtractColumn() + Column resolvedSubstract = getSubtractColumn().resolve(); + if (resolved.getType() != resolvedSubstract.getType()) { + throw new IllegalStateException(String.format("Column types are not the same: Column %s\tSubstractColumn %s", resolved.getType(), resolvedSubstract .getType())); } - return switch (getColumn().getType()) { - case INTEGER -> new IntegerDiffSumAggregator(getColumn(), getSubtractColumn()); - case MONEY -> new MoneyDiffSumAggregator(getColumn(), getSubtractColumn()); - case DECIMAL -> new DecimalDiffSumAggregator(getColumn(), getSubtractColumn()); - case REAL -> new RealDiffSumAggregator(getColumn(), getSubtractColumn()); - default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", getColumn().getType())); + return switch (resolved.getType()) { + case INTEGER -> new IntegerDiffSumAggregator(resolved, resolvedSubstract); + case MONEY -> new MoneyDiffSumAggregator(resolved, resolvedSubstract); + case DECIMAL -> new DecimalDiffSumAggregator(resolved, resolvedSubstract); + case REAL -> new RealDiffSumAggregator(resolved, resolvedSubstract); + default -> throw new IllegalStateException(String.format("Invalid column type '%s' for SUM Aggregator", resolved.getType())); }; } @@ -97,8 +94,8 @@ private ColumnAggregator getAggregator() { private static final EnumSet NUMBER_COMPATIBLE = EnumSet.of(MajorTypeId.INTEGER, MajorTypeId.MONEY, MajorTypeId.DECIMAL, MajorTypeId.REAL); @Override - public List getRequiredColumns() { - final List out = new ArrayList<>(); + public List getRequiredColumns() { + final List out = new ArrayList<>(); out.add(getColumn()); @@ -120,19 +117,19 @@ public SelectConverter createConverter() { @Override public ResultType getResultType() { - return ResultType.resolveResultType(getColumn().getType()); + return ResultType.resolveResultType(getColumn().resolve().getType()); } @ValidationMethod(message = "Column is not of Summable Type.") @JsonIgnore public boolean isSummableColumnType() { - return NUMBER_COMPATIBLE.contains(getColumn().getType()); + return NUMBER_COMPATIBLE.contains(getColumn().resolve().getType()); } @ValidationMethod(message = "Columns are not of same Type.") @JsonIgnore public boolean isColumnsOfSameType() { - return getSubtractColumn() == null || getSubtractColumn().getType().equals(getColumn().getType()); + return getSubtractColumn() == null || getSubtractColumn().resolve().getType().equals(getColumn().resolve().getType()); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java index 128dcf61bd..23588dc9c4 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeCache.java @@ -4,7 +4,6 @@ import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; -import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.util.CalculatedValue; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -36,15 +35,13 @@ public class ConceptTreeCache { * @implNote ConcurrentHashMap does not allow null values, but we want to have null values in the map. So we wrap the values in Optional. */ @JsonIgnore - private final Map>> cached = new ConcurrentHashMap<>();; + private final Map> cached = new ConcurrentHashMap<>();; /** * If id is already in cache, use that. If not calculate it by querying treeConcept. If rowMap was not used to query, cache the response. - * - * @param value */ - public ConceptElement findMostSpecificChild(String value, CalculatedValue> rowMap) throws ConceptConfigurationException { + public ConceptTreeChild findMostSpecificChild(String value, CalculatedValue> rowMap) throws ConceptConfigurationException { if(cached.containsKey(value)) { hits++; @@ -53,7 +50,7 @@ public ConceptElement findMostSpecificChild(String value, CalculatedValue child = treeConcept.findMostSpecificChild(value, rowMap); + final ConceptTreeChild child = treeConcept.findMostSpecificChild(value, rowMap); if(!rowMap.isCalculated()) { cached.put(value, Optional.ofNullable(child)); diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java index d9bfe7c94c..4ef04d6efe 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeChild.java @@ -3,16 +3,16 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptTreeChildId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; -import jakarta.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; @@ -50,36 +50,15 @@ public void clearMatchingStats() { setMatchingStats(null); } - @Override - @JsonIgnore - public int[] getPrefix() { - if (prefix == null) { - int[] pPrefix = getParent().getPrefix(); - prefix = Arrays.copyOf(pPrefix, pPrefix.length + 1); - prefix[prefix.length - 1] = this.getLocalId(); - } - return prefix; - } - public void init() throws ConceptConfigurationException { if (condition != null) { condition.init(this); } } - @Override - public ConceptTreeChildId createId() { - return new ConceptTreeChildId(parent.getId(), getName()); - } - - @Override - public boolean matchesPrefix(int[] conceptPrefix) { - return conceptPrefix.length > depth && conceptPrefix[depth] == localId; - } - @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return getConcept().getDataset(); } @@ -95,4 +74,44 @@ public TreeConcept getConcept() { } throw new IllegalStateException("The node " + this + " seems to have no root"); } + + @Override + @JsonIgnore + public int[] getPrefix() { + if (prefix == null) { + int[] pPrefix = getParent().getPrefix(); + prefix = Arrays.copyOf(pPrefix, pPrefix.length + 1); + prefix[prefix.length - 1] = this.getLocalId(); + } + return prefix; + } + + @Override + public boolean matchesPrefix(int[] conceptPrefix) { + return conceptPrefix.length > depth && conceptPrefix[depth] == localId; + } + + @Override + public ConceptTreeChildId createId() { + return new ConceptTreeChildId(parent.getId(), getName()); + } + + /** + * Parts only contains references to child elements. + * If parts is empty return self. + * If the first part does not match the name of a child return null + */ + ConceptTreeChild findByParts(List parts) { + if (parts.isEmpty()) { + return this; + } + + for (ConceptTreeChild child : children) { + if (parts.get(0).equals(child.getName())) { + final List subList = parts.size() > 1 ? parts.subList(1, parts.size()) : Collections.emptyList(); + return child.findByParts(subList); + } + } + return null; + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java index bb00b8d103..26d09940c7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/ConceptTreeConnector.java @@ -2,45 +2,46 @@ import java.util.ArrayList; import java.util.List; - import javax.annotation.CheckForNull; +import jakarta.validation.Valid; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; import io.dropwizard.validation.ValidationMethod; -import jakarta.validation.Valid; import lombok.Getter; import lombok.Setter; import lombok.extern.slf4j.Slf4j; -@Getter @Setter +@Getter +@Setter @Slf4j public class ConceptTreeConnector extends Connector { private static final long serialVersionUID = 1L; - @NsIdRef @CheckForNull - private Table table; + @CheckForNull + private TableId table; - @NsIdRef @CheckForNull - private Column column = null; + @CheckForNull + private ColumnId column = null; private CTCondition condition = null; - @Valid @JsonManagedReference + @Valid + @JsonManagedReference private List> filters = new ArrayList<>(); @JsonIgnore @ValidationMethod(message = "Table and Column usage are exclusive") public boolean isTableXOrColumn() { - if(table != null){ + if (table != null) { return column == null; } @@ -49,17 +50,34 @@ public boolean isTableXOrColumn() { @JsonIgnore @ValidationMethod(message = "Column is not STRING.") - public boolean isColumnForTree(){ - return column == null || column.getType().equals(MajorTypeId.STRING); + public boolean isColumnForTree() { + return column == null || column.resolve().getType().equals(MajorTypeId.STRING); } - @Override @JsonIgnore - public Table getTable() { - if(column != null){ + @Override + @JsonIgnore + public Table getResolvedTable() { + if (column != null) { + return column.getTable().resolve(); + } + + if (table != null) { + return table.resolve(); + } + return null; + } + + @Override + @JsonIgnore + public TableId getResolvedTableId() { + if (column != null) { return column.getTable(); } - return table; + if (table != null) { + return table; + } + return null; } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java index 8cbc0d80f4..d2772671c1 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/tree/TreeConcept.java @@ -6,23 +6,29 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Stream; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.Initializing; -import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.io.jackson.View; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.SelectHolder; import com.bakdata.conquery.models.datasets.concepts.select.concept.UniversalSelect; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; +import com.bakdata.conquery.models.exceptions.ConfigurationException; +import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.util.CalculatedValue; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonManagedReference; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotNull; import lombok.Getter; +import lombok.NonNull; import lombok.Setter; import lombok.extern.slf4j.Slf4j; @@ -31,7 +37,7 @@ */ @Slf4j @CPSType(id = "TREE", base = Concept.class) -@JsonDeserialize(converter = TreeConcept.TreeConceptInitializer.class) +@JsonDeserialize(converter = TreeConcept.Initializer.class) public class TreeConcept extends Concept implements ConceptTreeNode, SelectHolder, Initializing { @JsonIgnore @@ -43,33 +49,31 @@ public class TreeConcept extends Concept implements Concep @JsonIgnore private final List> localIdMap = new ArrayList<>(); - + @JsonIgnore + private final Map caches = new ConcurrentHashMap<>(); @Getter @Setter @Valid private List children = Collections.emptyList(); - - @JsonIgnore + @View.Internal @Getter @Setter private int localId; - @NotNull @Getter @Setter @Valid @JsonManagedReference private List selects = new ArrayList<>(); - @JsonIgnore - private final Map caches = new ConcurrentHashMap<>(); + private int nChildren = -1; @Override public Concept findConcept() { return getConcept(); } - public ConceptTreeCache getCache(Import imp) { + public ConceptTreeCache getCache(ImportId imp) { return caches.get(imp); } @@ -89,47 +93,21 @@ public boolean matchesPrefix(int[] conceptPrefix) { return conceptPrefix != null && conceptPrefix[0] == 0; } - public void init() { - setLocalId(0); - localIdMap.add(this); - - final List openList = new ArrayList<>(getChildren()); - - for (ConceptTreeConnector con : getConnectors()) { - if (con.getCondition() == null) { - continue; - } - - try { - con.getCondition().init(this); - } catch (ConceptConfigurationException e) { - throw new RuntimeException("Unable to init condition", e); - } - } - - for (int i = 0; i < openList.size(); i++) { - final ConceptTreeChild ctc = openList.get(i); - - try { - ctc.setLocalId(localIdMap.size()); - localIdMap.add(ctc); - ctc.setDepth(ctc.getParent().getDepth() + 1); - - ctc.init(); - - } catch (Exception e) { - throw new RuntimeException("Error trying to consolidate the node " + ctc.getLabel() + " in " + getLabel(), e); - } - - openList.addAll((openList.get(i)).getChildren()); - } + @JsonIgnore + public Stream getAllChildren() { + return localIdMap.stream().filter(ConceptTreeChild.class::isInstance).map(ConceptTreeChild.class::cast); } - public ConceptElement findMostSpecificChild(String stringValue, CalculatedValue> rowMap) throws ConceptConfigurationException { + public ConceptTreeChild findMostSpecificChild(String stringValue, CalculatedValue> rowMap) throws ConceptConfigurationException { return findMostSpecificChild(stringValue, rowMap, null, getChildren()); } - private ConceptElement findMostSpecificChild(String stringValue, CalculatedValue> rowMap, ConceptElement best, List currentList) + private ConceptTreeChild findMostSpecificChild( + String stringValue, + CalculatedValue> rowMap, + ConceptTreeChild best, + List currentList + ) throws ConceptConfigurationException { while (currentList != null && !currentList.isEmpty()) { @@ -166,29 +144,11 @@ private ConceptElement findMostSpecificChild(String stringValue, CalculatedValue return best; } - @JsonIgnore - public Stream getAllChildren() { - return localIdMap.stream().filter(ConceptTreeChild.class::isInstance).map(ConceptTreeChild.class::cast); - } - - @JsonIgnore - private int nChildren = -1; - - @Override - @JsonIgnore - public int countElements() { - if (nChildren > 0) { - return nChildren; - } - - return nChildren = 1 + (int) getAllChildren().count(); - } - - public void initializeIdCache(Import importId) { + public void initializeIdCache(ImportId importId) { caches.computeIfAbsent(importId, id -> new ConceptTreeCache(this)); } - public void removeImportCache(Import imp) { + public void removeImportCache(ImportId imp) { caches.remove(imp); } @@ -199,7 +159,7 @@ public void removeImportCache(Import imp) { * @param ids the local id array to look for * @return the element matching the most specific local id in the array */ - public ConceptTreeNode getElementByLocalIdPath(int[] ids) { + public ConceptTreeNode getElementByLocalIdPath( int @NonNull [] ids) { final int mostSpecific = ids[ids.length - 1]; return getElementByLocalId(mostSpecific); } @@ -208,5 +168,110 @@ public ConceptTreeNode getElementByLocalId(int localId) { return localIdMap.get(localId); } - public static class TreeConceptInitializer extends Initializing.Converter {} + /** + * rawValue is expected to be an Integer, expressing a localId for {@link TreeConcept#getElementByLocalId(int)}. + *

+ * If {@link PrintSettings#isPrettyPrint()} is true, {@link ConceptElement#getLabel()} is used to print. + * If {@link PrintSettings#isPrettyPrint()} is false, {@link ConceptElement#getId()} is used to print. + */ + public String printConceptLocalId(Object rawValue, PrintSettings printSettings) { + + if (rawValue == null) { + return null; + } + + final int localId = (int) rawValue; + + final ConceptTreeNode node = getElementByLocalId(localId); + + if (!printSettings.isPrettyPrint()) { + return node.getId().toString(); + } + + if (node.getDescription() == null) { + return node.getLabel(); + } + + return node.getLabel() + " - " + node.getDescription(); + + } + + @Override + public void init() throws Exception { + initElements(); + } + + @Override + public void initElements() throws ConfigurationException, JSONException { + super.initElements(); + setLocalId(0); + localIdMap.add(this); + + final List openList = new ArrayList<>(getChildren()); + + for (ConceptTreeConnector con : getConnectors()) { + if (con.getCondition() == null) { + continue; + } + + con.getCondition().init(this); + } + + for (int i = 0; i < openList.size(); i++) { + final ConceptTreeChild ctc = openList.get(i); + + try { + ctc.setLocalId(localIdMap.size()); + localIdMap.add(ctc); + ctc.setDepth(ctc.getParent().getDepth() + 1); + + ctc.init(); + + } + catch (Exception e) { + throw new RuntimeException("Error trying to consolidate the node " + ctc.getLabel() + " in " + getLabel(), e); + } + + openList.addAll((openList.get(i)).getChildren()); + } + } + + @Override + @JsonIgnore + public int countElements() { + if (nChildren > 0) { + return nChildren; + } + + return nChildren = 1 + (int) getAllChildren().count(); + } + + public ConceptElement>> findById(ConceptElementId id) { + List parts = new ArrayList<>(); + id.collectComponents(parts); + final ConceptId conceptId = getId(); + List components = conceptId.getComponents(); + + // Check if dataset and concept name match + if (!(parts.get(0).equals(components.get(0)) && parts.get(1).equals(components.get(1)))) { + return null; + } + + if (parts.size() == 2) { + // Perfect match <3 + return this; + } + + for (ConceptTreeChild child : children) { + if (parts.get(2).equals(child.getName())) { + final List subParts = parts.size() > 3 ? parts.subList(3, parts.size()) : Collections.emptyList(); + return child.findByParts(subParts); + } + } + + return null; + } + + public static class Initializer extends Initializing.Converter { + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java b/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java index a5f9f733c8..45a4c0fe53 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java +++ b/backend/src/main/java/com/bakdata/conquery/models/error/ErrorMessages.java @@ -4,6 +4,7 @@ import c10n.annotations.En; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.query.entity.Entity; public interface ErrorMessages { @@ -59,4 +60,8 @@ public interface ErrorMessages { @En("Something went wrong while querying the database: ${0}.") @De("Etwas ist beim Anfragen des Servers fehlgeschlagen: ${0}.") String sqlError(Throwable error); + + @En("The id '${0}' could not be resolved'.") + @De("Die id '${0}' konnte nicht aufgelöst werden.") + String idUnresolvable(Id id); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java b/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java index 0972141c42..c78fa2f572 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/Bucket.java @@ -6,12 +6,13 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.function.IntFunction; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotNull; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Import; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; @@ -27,6 +28,9 @@ import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.preproc.PreprocessedData; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -35,8 +39,6 @@ import io.dropwizard.validation.ValidationMethod; import it.unimi.dsi.fastutil.objects.Object2IntMap; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; -import jakarta.validation.constraints.Min; -import jakarta.validation.constraints.NotNull; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Getter; @@ -56,31 +58,31 @@ @ToString(onlyExplicitlyIncluded = true, callSuper = true) @AllArgsConstructor @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}, access = AccessLevel.PROTECTED) - public class Bucket extends IdentifiableImpl implements NamespacedIdentifiable { @Min(0) private final int bucket; - - @ToString.Include - @JsonManagedReference - @Setter(AccessLevel.PROTECTED) - private ColumnStore[] stores; - /** * start of each Entity in {@code stores}. */ private final Object2IntMap start; - /** * Number of events per Entity in {@code stores}. */ private final Object2IntMap ends; - private final int numberOfEvents; + private final ImportId imp; + @ToString.Include + @JsonManagedReference + @Setter(AccessLevel.PROTECTED) + private ColumnStore[] stores; - @NsIdRef - private final Import imp; + public static Bucket fromPreprocessed(Table table, PreprocessedData container, Import imp) { + final ColumnStore[] storesSorted = sortColumns(table, container.getStores()); + final int numberOfEvents = container.getEnds().values().stream().mapToInt(i -> i).max().orElse(0); + + return new Bucket(container.getBucketId(), new Object2IntOpenHashMap<>(container.getStarts()), new Object2IntOpenHashMap<>(container.getEnds()), numberOfEvents, imp.getId(), storesSorted); + } private static ColumnStore[] sortColumns(Table table, Map stores) { return Arrays.stream(table.getColumns()) @@ -90,28 +92,15 @@ private static ColumnStore[] sortColumns(Table table, Map s .toArray(ColumnStore[]::new); } - public static Bucket fromPreprocessed(Table table, PreprocessedData container, Import imp) { - final ColumnStore[] storesSorted = sortColumns(table, container.getStores()); - final int numberOfEvents = container.getEnds().values().stream().mapToInt(i -> i).max().orElse(0); - - return new Bucket(container.getBucketId(), storesSorted, new Object2IntOpenHashMap<>(container.getStarts()), new Object2IntOpenHashMap<>(container.getEnds()),numberOfEvents, imp); - } - @JsonIgnore @ValidationMethod(message = "Number of events does not match the length of some stores.") public boolean isNumberOfEventsEqualsNumberOfStores() { return Arrays.stream(stores).allMatch(columnStore -> columnStore.getLines() == getNumberOfEvents()); } - - @JsonIgnore - public Table getTable() { - return imp.getTable(); - } - @Override public BucketId createId() { - return new BucketId(imp.getId(), bucket); + return new BucketId(imp, bucket); } /** @@ -126,10 +115,9 @@ public boolean containsEntity(String entity) { } public int getEntityStart(String entityId) { - return start.get(entityId); + return start.getInt(entityId); } - public int getEntityEnd(String entityId) { return ends.getInt(entityId); } @@ -138,14 +126,14 @@ public final boolean has(int event, Column column) { return getStore(column).has(event); } - public String getString(int event, @NotNull Column column) { - return ((StringStore) getStore(column)).getString(event); - } - public ColumnStore getStore(@NotNull Column column) { return stores[column.getPosition()]; } + public String getString(int event, @NotNull Column column) { + return ((StringStore) getStore(column)).getString(event); + } + public long getInteger(int event, @NotNull Column column) { return ((IntegerStore) getStore(column)).getInteger(event); } @@ -170,10 +158,6 @@ public int getDate(int event, @NotNull Column column) { return ((DateStore) getStore(column)).getDate(event); } - public CDateRange getDateRange(int event, Column column) { - return ((DateRangeStore) getStore(column)).getDateRange(event); - } - public boolean eventIsContainedIn(int event, ValidityDate validityDate, CDateSet dateRanges) { final CDateRange dateRange = validityDate.getValidityDate(event, this); @@ -192,11 +176,27 @@ public CDateRange getAsDateRange(int event, Column column) { }; } + public CDateRange getDateRange(int event, Column column) { + return ((DateRangeStore) getStore(column)).getDateRange(event); + } + public Object createScriptValue(int event, @NotNull Column column) { return getStore(column).createScriptValue(event); } - public Map calculateMap(int event) { + public IntFunction> mapCalculator(){ + Column[] columns = getTable().resolve().getColumns(); + + return event -> calculateMap(event, stores, columns); + + } + + @JsonIgnore + public TableId getTable() { + return imp.getTable(); + } + + private static Map calculateMap(int event, ColumnStore[] stores, Column[] columns) { final Map out = new HashMap<>(stores.length); for (int i = 0; i < stores.length; i++) { @@ -204,7 +204,7 @@ public Map calculateMap(int event) { if (!store.has(event)) { continue; } - out.put(getTable().getColumns()[i].getName(), store.createScriptValue(event)); + out.put(columns[i].getName(), store.createScriptValue(event)); } return out; @@ -212,11 +212,11 @@ public Map calculateMap(int event) { @JsonIgnore @Override - public Dataset getDataset() { + public DatasetId getDataset() { return getTable().getDataset(); } public ColumnStore getStore(@NotNull String storeName) { - return getStore(getTable().getColumnByName(storeName)); + return getStore(getTable().resolve().getColumnByName(storeName)); } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java b/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java index d7f2a99f4a..b17692f55e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java @@ -11,14 +11,16 @@ import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.models.common.CDateSet; -import com.bakdata.conquery.models.datasets.Import; -import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.jobs.CalculateCBlocksJob; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.query.entity.Entity; @@ -57,34 +59,35 @@ public class BucketManager { *

* Connector -> Bucket -> [BucketId -> CBlock] */ - private final Map>> connectorToCblocks; + private final Map>> connectorToCblocks; /** * Table -> BucketN -> [Buckets] */ - private final Map>> tableToBuckets; + private final Map>> tableToBuckets; @Getter private final int entityBucketSize; public static BucketManager create(Worker worker, WorkerStorage storage, int entityBucketSize) { - final Map>> connectorCBlocks = new HashMap<>(); - final Map>> tableBuckets = new HashMap<>(); + final Map>> connectorCBlocks = new HashMap<>(); + final Map>> tableBuckets = new HashMap<>(); final Object2IntMap entity2Bucket = new Object2IntOpenHashMap<>(); final IntArraySet assignedBucketNumbers = worker.getInfo().getIncludedBuckets(); log.trace("Trying to load these buckets that map to: {}", assignedBucketNumbers); - for (Bucket bucket : storage.getAllBuckets()) { + storage.getAllBuckets().forEach(bucket -> { + log.trace("Processing bucket {}", bucket.getId()); if (!assignedBucketNumbers.contains(bucket.getBucket())) { log.warn("Found Bucket[{}] in Storage that does not belong to this Worker according to the Worker information.", bucket.getId()); } registerBucket(bucket, entity2Bucket, tableBuckets); - } + }); - for (CBlock cBlock : storage.getAllCBlocks()) { - registerCBlock(cBlock, connectorCBlocks); - } + storage.getAllCBlocks().forEach(cBlock -> + registerCBlock(cBlock, connectorCBlocks) + ); return new BucketManager(worker.getJobManager(), storage, worker, entity2Bucket, connectorCBlocks, tableBuckets, entityBucketSize); } @@ -92,10 +95,10 @@ public static BucketManager create(Worker worker, WorkerStorage storage, int ent /** * register entities, and create query specific indices for bucket */ - private static void registerBucket(Bucket bucket, Object2IntMap entity2Bucket, Map>> tableBuckets) { + private static void registerBucket(Bucket bucket, Object2IntMap entity2Bucket, Map>> tableBuckets) { for (String entity : bucket.entities()) { - if(entity2Bucket.containsKey(entity)){ + if (entity2Bucket.containsKey(entity)) { // This is an unrecoverable state, but should not happen in practice. Just a precaution. assert entity2Bucket.getInt(entity) == bucket.getBucket(); continue; @@ -107,16 +110,16 @@ private static void registerBucket(Bucket bucket, Object2IntMap entity2B tableBuckets .computeIfAbsent(bucket.getTable(), id -> new Int2ObjectAVLTreeMap<>()) .computeIfAbsent(bucket.getBucket(), n -> new ArrayList<>()) - .add(bucket); + .add(bucket.getId()); } /** * Assert validity of operation, and create index for CBlocks. */ - private static void registerCBlock(CBlock cBlock, Map>> connectorCBlocks) { + private static void registerCBlock(CBlock cBlock, Map>> connectorCBlocks) { connectorCBlocks.computeIfAbsent(cBlock.getConnector(), connectorId -> new Int2ObjectAVLTreeMap<>()) .computeIfAbsent(cBlock.getBucket().getBucket(), bucketId -> new HashMap<>(3)) - .put(cBlock.getBucket(), cBlock); + .put(cBlock.getBucket(), cBlock.getId()); } @@ -124,33 +127,26 @@ private static void registerCBlock(CBlock cBlock, Map> allConcepts = storage.getAllConcepts(); + storage.getAllConcepts() + .filter(TreeConcept.class::isInstance) + .flatMap(concept -> concept.getConnectors().stream().map(ConceptTreeConnector.class::cast)) - log.info("BEGIN full update for {} concepts.", allConcepts.size()); + .forEach(connector -> storage.getAllBucketIds().forEach(bucketId -> { - for (Concept c : allConcepts) { - if (!(c instanceof TreeConcept)) { - continue; - } - for (ConceptTreeConnector con : ((TreeConcept) c).getConnectors()) { - for (Bucket bucket : storage.getAllBuckets()) { + final CBlockId cBlockId = new CBlockId(bucketId, connector.getId()); - final CBlockId cBlockId = new CBlockId(bucket.getId(), con.getId()); + if (!connector.getResolvedTableId().equals(bucketId.getImp().getTable())) { + return; + } - if (!con.getTable().equals(bucket.getTable())) { - continue; - } + if (hasCBlock(cBlockId)) { + log.trace("Skip calculation of CBlock[{}], because it was loaded from the storage.", cBlockId); + return; + } - if (hasCBlock(cBlockId)) { - log.trace("Skip calculation of CBlock[{}], because it was loaded from the storage.", cBlockId); - continue; - } - - log.trace("CBlock[{}] missing in Storage. Queuing recalculation", cBlockId); - job.addCBlock(bucket, con); - } - } - } + log.warn("CBlock[{}] missing in Storage. Queuing recalculation", cBlockId); + job.addCBlock(bucketId.resolve(), connector); + })); if (!job.isEmpty()) { jobManager.addSlowJob(job); @@ -171,32 +167,18 @@ public void addBucket(Bucket bucket) { final CalculateCBlocksJob job = new CalculateCBlocksJob(storage, this, worker.getJobsExecutorService()); - for (Concept concept : storage.getAllConcepts()) { - if (!(concept instanceof TreeConcept)) { - continue; - } - for (ConceptTreeConnector connector : ((TreeConcept) concept).getConnectors()) { - if (!connector.getTable().equals(bucket.getTable())) { - continue; - } - - final CBlockId cBlockId = new CBlockId(bucket.getId(), connector.getId()); - - - if (hasCBlock(cBlockId)) { - continue; - } - - job.addCBlock(bucket, connector); - - } - } + storage.getAllConcepts() + .filter(TreeConcept.class::isInstance) + .flatMap(concept -> concept.getConnectors().stream()) + .filter(connector -> connector.getResolvedTableId().equals(bucket.getTable())) + .filter(connector -> !hasCBlock(new CBlockId(bucket.getId(), connector.getId()))) + .forEach(connector -> job.addCBlock(bucket, (ConceptTreeConnector) connector)); jobManager.addSlowJob(job); } - public void removeTable(Table table) { - final Int2ObjectMap> removed = tableToBuckets.remove(table); + public void removeTable(TableId table) { + final Int2ObjectMap> removed = tableToBuckets.remove(table); // It's possible no buckets were registered yet if (removed != null) { @@ -206,85 +188,81 @@ public void removeTable(Table table) { .forEach(this::removeBucket); } - storage.removeTable(table.getId()); + storage.removeTable(table); } - public void removeBucket(Bucket bucket) { - storage.getAllCBlocks() - .stream() + public void removeBucket(BucketId bucket) { + storage.getAllCBlockIds() .filter(cblock -> cblock.getBucket().equals(bucket)) .forEach(this::removeCBlock); - tableToBuckets.getOrDefault(bucket.getTable(), Int2ObjectMaps.emptyMap()) + tableToBuckets.getOrDefault(bucket.getImp().getTable(), Int2ObjectMaps.emptyMap()) .getOrDefault(bucket.getBucket(), Collections.emptyList()) .remove(bucket); - storage.removeBucket(bucket.getId()); + storage.removeBucket(bucket); } - private void removeCBlock(CBlock cBlock) { + private void removeCBlock(CBlockId cBlock) { connectorToCblocks.getOrDefault(cBlock.getConnector(), Int2ObjectMaps.emptyMap()) .getOrDefault(cBlock.getBucket().getBucket(), Collections.emptyMap()) .values() .remove(cBlock); - storage.removeCBlock(cBlock.getId()); + storage.removeCBlock(cBlock); } public Set getEntities() { return Collections.unmodifiableSet(entity2Bucket.keySet()); } - private int getBucket(String id) { - return entity2Bucket.getInt(id); - } - /** * Remove all buckets comprising the import. Which will in-turn remove all CBLocks. */ - public void removeImport(Import imp) { - storage.getAllBuckets() - .stream() + public void removeImport(ImportId imp) { + storage.getAllBucketIds() .filter(bucket -> bucket.getImp().equals(imp)) .forEach(this::removeBucket); - for (Concept concept : storage.getAllConcepts()) { - if (!(concept instanceof TreeConcept)) { - continue; - } + storage.getAllConcepts() + .filter(TreeConcept.class::isInstance) + .forEach(concept -> ((TreeConcept) concept).removeImportCache(imp)); - ((TreeConcept) concept).removeImportCache(imp); - } - storage.removeImport(imp.getId()); + storage.removeImport(imp); } - public List getEntityBucketsForTable(Entity entity, Table table) { + public List getEntityBucketsForTable(Entity entity, TableId table) { final int bucketId = getBucket(entity.getId()); return tableToBuckets.getOrDefault(table, Int2ObjectMaps.emptyMap()) .getOrDefault(bucketId, Collections.emptyList()); } + private int getBucket(String id) { + return entity2Bucket.getInt(id); + } + /** * Collects all Entites, that have any of the concepts on the connectors in a specific time. */ - public Set getEntitiesWithConcepts(Collection> concepts, Set connectors, CDateSet restriction) { + public Set getEntitiesWithConcepts(Collection> concepts, Set connectors, CDateSet restriction) { final long requiredBits = ConceptNode.calculateBitMask(concepts); final Set out = new HashSet<>(); - for (Connector connector : connectors) { + for (ConnectorId connector : connectors) { if (!connectorToCblocks.containsKey(connector)) { continue; } - for (Map bucketCBlockMap : connectorToCblocks.get(connector).values()) { - for (CBlock cblock : bucketCBlockMap.values()) { - for (String entity : cblock.getBucket().entities()) { + for (Map bucketCBlockMap : connectorToCblocks.get(connector).values()) { + for (CBlockId cBlockId : bucketCBlockMap.values()) { + for (String entity : cBlockId.getBucket().resolve().entities()) { - if (cblock.isConceptIncluded(entity, requiredBits) && restriction.intersects(cblock.getEntityDateRange(entity))) { + CBlock cBlock = cBlockId.resolve(); + if (cBlock.isConceptIncluded(entity, requiredBits) && restriction.intersects(cBlock.getEntityDateRange(entity))) { out.add(entity); } } @@ -295,20 +273,20 @@ public Set getEntitiesWithConcepts(Collection> concept return out; } - public Map getEntityCBlocksForConnector(Entity entity, Connector connector) { + public Map getEntityCBlocksForConnector(Entity entity, ConnectorId connector) { final int bucketId = getBucket(entity.getId()); return connectorToCblocks.getOrDefault(connector, Int2ObjectMaps.emptyMap()) .getOrDefault(bucketId, Collections.emptyMap()); } - public boolean hasEntityCBlocksForConnector(Entity entity, Connector connector) { + public boolean hasEntityCBlocksForConnector(Entity entity, ConnectorId connector) { final int bucketId = getBucket(entity.getId()); - final Map cblocks = connectorToCblocks.getOrDefault(connector, Int2ObjectMaps.emptyMap()) - .getOrDefault(bucketId, Collections.emptyMap()); + final Map cblocks = connectorToCblocks.getOrDefault(connector, Int2ObjectMaps.emptyMap()) + .getOrDefault(bucketId, Collections.emptyMap()); - for (Bucket bucket : cblocks.keySet()) { - if (bucket.containsEntity(entity.getId())) { + for (BucketId bucket : cblocks.keySet()) { + if (bucket.resolve().containsEntity(entity.getId())) { return true; } } @@ -329,14 +307,13 @@ public void removeConcept(Concept concept) { // Just drop all CBlocks at once for the connectors for (Connector connector : concept.getConnectors()) { - final Int2ObjectMap> removed = connectorToCblocks.remove(connector); + final Int2ObjectMap> removed = connectorToCblocks.remove(connector.getId()); // It's possible that no data has been loaded yet if (removed != null) { removed.values().stream() .map(Map::values) .flatMap(Collection::stream) - .map(CBlock::getId) .forEach(storage::removeCBlock); } } @@ -355,25 +332,11 @@ public void addConcept(Concept concept) { for (ConceptTreeConnector connector : ((TreeConcept) concept).getConnectors()) { - for (Bucket bucket : storage.getAllBuckets()) { - if (!bucket.getTable().equals(connector.getTable())) { - continue; - } - - final CBlockId cBlockId = new CBlockId(bucket.getId(), connector.getId()); - - if (hasCBlock(cBlockId)) { - continue; - } - - job.addCBlock(bucket, connector); - } + storage.getAllBuckets() + .filter(bucket -> bucket.getTable().equals(connector.getResolvedTableId())) + .filter(bucket -> !hasCBlock(new CBlockId(bucket.getId(), connector.getId()))) + .forEach(bucket -> job.addCBlock(bucket, connector)); } - - if(job.isEmpty()){ - return; - } - jobManager.addSlowJob(job); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java b/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java index cfc5e0182b..11b03741b6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/CBlock.java @@ -4,15 +4,14 @@ import java.util.Collection; import java.util.HashMap; import java.util.Map; +import java.util.function.IntFunction; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.io.jackson.serializer.CBlockDeserializer; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.conditions.CTCondition; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeCache; @@ -23,13 +22,15 @@ import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.query.queryplan.specific.ConceptNode; import com.bakdata.conquery.util.CalculatedValue; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; @@ -51,12 +52,10 @@ public class CBlock extends IdentifiableImpl implements NamespacedIdentifiable { //TODO Index per StringStore for isOfInterest @ToString.Include - @NsIdRef - private final Bucket bucket; + private final BucketId bucket; @NotNull - @NsIdRef @ToString.Include - private final ConceptTreeConnector connector; + private final ConnectorId connector; /** * We leverage the fact that a Bucket contains entities from bucketSize * {@link Bucket#getBucket()} to (1 + bucketSize) * {@link Bucket#getBucket()} - 1 to layout our internal structure. * This is maps the first Entities entry in this bucket to 0. @@ -100,7 +99,8 @@ public static CBlock createCBlock(ConceptTreeConnector connector, Bucket bucket, final Map includedConcepts = calculateConceptElementPathBloomFilter(bucketSize, bucket, mostSpecificChildren); final Map entitySpans = calculateEntityDateIndices(bucket); - return new CBlock(bucket, connector, root, includedConcepts, entitySpans, mostSpecificChildren); + final CBlock cBlock = new CBlock(bucket.getId(), connector.getId(), root, includedConcepts, entitySpans, mostSpecificChildren); + return cBlock; } /** @@ -114,9 +114,9 @@ private static int[][] calculateSpecificChildrenPaths(Bucket bucket, ConceptTree final TreeConcept treeConcept = connector.getConcept(); // If we have a column, and it is of string-type, we initialize a cache. - if (connector.getColumn() != null && bucket.getStore(connector.getColumn()) instanceof StringStore) { + if (connector.getColumn() != null && bucket.getStore(connector.getColumn().resolve()) instanceof StringStore) { - column = connector.getColumn(); + column = connector.getColumn().resolve(); treeConcept.initializeIdCache(bucket.getImp()); } @@ -136,6 +136,8 @@ else if (treeConcept.countElements() == 1) { final ConceptTreeCache cache = treeConcept.getCache(bucket.getImp()); + IntFunction> mapCalculator = bucket.mapCalculator(); + for (int event = 0; event < bucket.getNumberOfEvents(); event++) { @@ -152,7 +154,7 @@ else if (treeConcept.countElements() == 1) { // Lazy evaluation of map to avoid allocations if possible. // Copy event for closure. final int _event = event; - final CalculatedValue> rowMap = new CalculatedValue<>(() -> bucket.calculateMap(_event)); + final CalculatedValue> rowMap = new CalculatedValue<>(() -> mapCalculator.apply(_event)); if (connectorCondition != null && !connectorCondition.matches(stringValue, rowMap)) { mostSpecificChildren[event] = Connector.NOT_CONTAINED; @@ -165,7 +167,7 @@ else if (treeConcept.countElements() == 1) { continue; } - final ConceptElement child = cache == null + final ConceptTreeChild child = cache == null ? treeConcept.findMostSpecificChild(stringValue, rowMap) : cache.findMostSpecificChild(stringValue, rowMap); @@ -225,21 +227,6 @@ private static Map calculateConceptElementPathBloomFilter(int buck return includedConcepts; } - /** - * Calculates the bloom filter from the precomputed path to the most specific {@link ConceptTreeChild}. - */ - public static long calculateBitMask(int pathIndex, int[] mostSpecificChild) { - - for (int index = pathIndex; index > 0; index--) { - // TODO how could they be > Long.SIZE? - if (mostSpecificChild[index] < Long.SIZE) { - return 1L << mostSpecificChild[index]; - } - } - - return 0; - } - /** * For every included entity, calculate min and max and store them as statistics in the CBlock. * @@ -248,7 +235,7 @@ public static long calculateBitMask(int pathIndex, int[] mostSpecificChild) { private static Map calculateEntityDateIndices(Bucket bucket) { final Map spans = new HashMap<>(); - final Table table = bucket.getTable(); + final Table table = bucket.getTable().resolve(); for (Column column : table.getColumns()) { @@ -297,6 +284,21 @@ private static Map calculateEntityDateIndices(Bucket bucket) return spans; } + /** + * Calculates the bloom filter from the precomputed path to the most specific {@link ConceptTreeChild}. + */ + public static long calculateBitMask(int pathIndex, int[] mostSpecificChild) { + + for (int index = pathIndex; index > 0; index--) { + // TODO how could they be > Long.SIZE? + if (mostSpecificChild[index] < Long.SIZE) { + return 1L << mostSpecificChild[index]; + } + } + + return 0; + } + public int[] getPathToMostSpecificChild(int event) { if (mostSpecificChildren == null) { return null; @@ -321,7 +323,7 @@ public CDateRange getEntityDateRange(String entity) { @Override @JsonIgnore public CBlockId createId() { - return new CBlockId(bucket.getId(), connector.getId()); + return new CBlockId(bucket, connector); } public boolean isConceptIncluded(String entity, long requiredBits) { @@ -341,7 +343,7 @@ public boolean isConceptIncluded(String entity, long requiredBits) { @Override @JsonIgnore - public Dataset getDataset() { + public DatasetId getDataset() { return bucket.getDataset(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java index 418f5f74ce..4ccf7d45f7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/EmptyBucket.java @@ -2,6 +2,7 @@ import java.math.BigDecimal; import java.util.Map; +import java.util.function.IntFunction; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; @@ -24,18 +25,11 @@ public EmptyBucket() { this.setStores(new ColumnStore[0]); } - - @Override - public boolean eventIsContainedIn(int event, ValidityDate column, CDateSet dateRanges) { - return false; - } - @Override public boolean containsEntity(String entity) { return false; } - @Override public int getEntityStart(String entityId) { throw new IllegalStateException("ALL_IDS Bucket does not do anything"); @@ -46,7 +40,6 @@ public int getEntityEnd(String entityId) { throw new IllegalStateException("ALL_IDS Bucket does not do anything"); } - @Override public String getString(int event, Column column) { throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); @@ -83,8 +76,8 @@ public int getDate(int event, Column column) { } @Override - public CDateRange getDateRange(int event, Column column) { - throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); + public boolean eventIsContainedIn(int event, ValidityDate column, CDateSet dateRanges) { + return false; } @Override @@ -93,7 +86,12 @@ public CDateRange getAsDateRange(int event, Column column) { } @Override - public Map calculateMap(int event) { + public CDateRange getDateRange(int event, Column column) { + throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); + } + + @Override + public IntFunction> mapCalculator() { throw new IllegalStateException("Bucket for ALL_IDS_TABLE may not be evaluated."); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java index 55dafbee3b..1923172899 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java @@ -9,6 +9,10 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.core.UriBuilder; +import jakarta.validation.constraints.NotNull; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.apiv1.execution.ExecutionStatus; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; @@ -17,8 +21,6 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; import com.bakdata.conquery.io.cps.CPSBase; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRef; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Subject; @@ -50,7 +52,8 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.OptBoolean; import com.google.common.base.Preconditions; -import jakarta.validation.constraints.NotNull; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import lombok.AccessLevel; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -78,15 +81,13 @@ public abstract class ManagedExecution extends IdentifiableImpl datasetRegistry; - public ManagedExecution(@NonNull User owner, @NonNull Dataset dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { + public ManagedExecution(@NonNull UserId owner, @NonNull DatasetId dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { this.owner = owner; this.dataset = dataset; this.metaStorage = metaStorage; @@ -189,7 +190,7 @@ protected String makeAutoLabel(PrintSettings cfg) { @JsonIgnore public Namespace getNamespace() { - return datasetRegistry.get(getDataset().getId()); + return datasetRegistry.get(getDataset()); } protected abstract void doInitExecutable(); @@ -224,7 +225,9 @@ public ManagedExecutionId createId() { if (queryId == null) { queryId = UUID.randomUUID(); } - return new ManagedExecutionId(dataset.getId(), queryId); + ManagedExecutionId managedExecutionId = new ManagedExecutionId(dataset, queryId); + managedExecutionId.setMetaStorage(getMetaStorage()); + return managedExecutionId; } /** @@ -310,7 +313,7 @@ public void setStatusBase(@NonNull Subject subject, @NonNull ExecutionStatus sta status.setContainsDates(containsDates); if (owner != null) { - User user = owner; + User user = owner.resolve(); status.setOwner(user.getId()); status.setOwnerName(user.getLabel()); } @@ -353,7 +356,7 @@ public FullExecutionStatus buildStatusFull(Subject subject, Namespace namespace) public void setStatusFull(FullExecutionStatus status, Subject subject, Namespace namespace) { setStatusBase(subject, status); - setAdditionalFieldsForStatusWithColumnDescription(subject, status, namespace); + setAdditionalFieldsForStatusWithColumnDescription(subject, status); setAdditionalFieldsForStatusWithSource(subject, status, namespace); setAdditionalFieldsForStatusWithGroups(status); setAvailableSecondaryIds(status); @@ -379,7 +382,7 @@ private void setAdditionalFieldsForStatusWithGroups(FullExecutionStatus status) * This is usually not done very often and should be reasonable fast, so don't cache this. */ List permittedGroups = new ArrayList<>(); - for (Group group : getMetaStorage().getAllGroups()) { + for (Group group : getMetaStorage().getAllGroups().toList()) { for (Permission perm : group.getPermissions()) { if (perm.implies(createPermission(Ability.READ.asSet()))) { permittedGroups.add(group.getId()); @@ -390,7 +393,7 @@ private void setAdditionalFieldsForStatusWithGroups(FullExecutionStatus status) status.setGroups(permittedGroups); } - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status, Namespace namespace) { + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { // Implementation specific } diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java b/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java index 1c77dd5911..ee76301a50 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/Owned.java @@ -1,8 +1,8 @@ package com.bakdata.conquery.models.execution; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; public interface Owned extends Authorized { - User getOwner(); + UserId getOwner(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java b/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java index b626b88ddb..887dc8c871 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/Shareable.java @@ -44,7 +44,7 @@ default , S extends Identifiable & Shareable & Au final S shareable = (S) this; // Collect groups that do not have access to this instance and remove their probable permission - for (Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { if (patch.getGroups().contains(group.getId())) { continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java b/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java index baa4706bf9..8e1ef40ecf 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java @@ -14,7 +14,6 @@ import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.FormConfigPatch; -import com.bakdata.conquery.io.jackson.serializer.MetaIdRef; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Subject; @@ -30,7 +29,9 @@ import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.util.VariableDefaultValue; +import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.JsonNode; import lombok.AllArgsConstructor; import lombok.Data; @@ -44,6 +45,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ArrayUtils; import org.apache.shiro.authz.Permission; +import org.jetbrains.annotations.Nullable; @Slf4j @Data @@ -70,8 +72,7 @@ public class FormConfig extends IdentifiableImpl implements Sharea */ @NotNull private JsonNode values; - @MetaIdRef - private User owner; + private UserId owner; @VariableDefaultValue private LocalDateTime creationTime = LocalDateTime.now(); @@ -83,7 +84,9 @@ public FormConfig(String formType, JsonNode values) { @Override public FormConfigId createId() { - return new FormConfigId(dataset, formType, formId); + FormConfigId formConfigId = new FormConfigId(dataset, formType, formId); + formConfigId.setMetaStorage(getMetaStorage()); + return formConfigId; } /** @@ -91,33 +94,38 @@ public FormConfigId createId() { * actual form field values. */ public FormConfigOverviewRepresentation overview(MetaStorage storage, Subject subject) { - String ownerName = Optional.ofNullable(owner).map(User::getLabel).orElse(null); + String ownerName = getOwnerName(); return FormConfigOverviewRepresentation.builder() - .id(getId()) - .formType(formType) - .label(label) - .tags(tags) - .ownerName(ownerName) - .own(subject.isOwner(this)) - .createdAt(getCreationTime().atZone(ZoneId.systemDefault())) - .shared(shared) - // system? - .build(); + .id(getId()) + .formType(formType) + .label(label) + .tags(tags) + .ownerName(ownerName) + .own(subject.isOwner(this)) + .createdAt(getCreationTime().atZone(ZoneId.systemDefault())) + .shared(shared) + // system? + .build(); + } + + @JsonIgnore + private @Nullable String getOwnerName() { + return Optional.ofNullable(owner).map(UserId::resolve).map(User.class::cast).map(User::getLabel).orElse(null); } /** * Return the full representation of the configuration with the configured form fields and meta data. */ public FormConfigFullRepresentation fullRepresentation(MetaStorage storage, Subject requestingUser){ - String ownerName = Optional.ofNullable(owner).map(User::getLabel).orElse(null); + String ownerName = getOwnerName(); /* Calculate which groups can see this query. * This is usually not done very often and should be reasonable fast, so don't cache this. */ List permittedGroups = new ArrayList<>(); - for(Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { for(Permission perm : group.getPermissions()) { if(perm.implies(createPermission(Ability.READ.asSet()))) { permittedGroups.add(group.getId()); @@ -129,8 +137,8 @@ public FormConfigFullRepresentation fullRepresentation(MetaStorage storage, Subj .id(getId()).formType(formType) .label(label) .tags(tags) - .ownerName(ownerName) - .own(requestingUser.isOwner(this)) + .ownerName(ownerName) + .own(requestingUser.isOwner(this)) .createdAt(getCreationTime().atZone(ZoneId.systemDefault())) .shared(shared) .groups(permittedGroups) @@ -144,6 +152,10 @@ public ConqueryPermission createPermission(Set abilities) { return FormConfigPermission.onInstance(abilities, getId()); } + public Consumer valueSetter() { + return (patch) -> setValues(patch.getValues()); + } + /** * API representation for the overview of all {@link FormConfig}s which does not * include the form fields an their values. @@ -187,8 +199,4 @@ public static class FormConfigFullRepresentation extends FormConfigOverviewRepre private JsonNode values; } - public Consumer valueSetter() { - return (patch) -> setValues(patch.getValues()); - } - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java index 6000c90819..ba579d6639 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormConfigProcessor.java @@ -85,7 +85,7 @@ public Stream getConfigsByFormType(@NonNull Su final Set formTypesFinal = requestedFormType; - final Stream stream = storage.getAllFormConfigs().stream() + final Stream stream = storage.getAllFormConfigs() .filter(c -> dataset.getId().equals(c.getDataset())) .filter(c -> formTypesFinal.contains(c.getFormType())) .filter(c -> subject.isPermitted(c, Ability.READ)); @@ -116,7 +116,7 @@ public FormConfig addConfig(Subject subject, Dataset targetDataset, FormConfigAP subject.authorize(namespace.getDataset(), Ability.READ); - final FormConfig internalConfig = config.intern(storage.getUser(subject.getId()), targetDataset.getId()); + final FormConfig internalConfig = config.intern(subject.getId(), targetDataset.getId()); // Add the config immediately to the submitted dataset addConfigToDataset(internalConfig); diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java index 7e544fbff9..68be676506 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/frontendconfiguration/FormScanner.java @@ -11,7 +11,6 @@ import java.util.List; import java.util.Map; import java.util.Set; - import javax.annotation.Nullable; import com.bakdata.conquery.apiv1.forms.Form; @@ -43,7 +42,7 @@ public class FormScanner extends Task { * task accounts the change. */ private final ConqueryConfig config; - private List formConfigProviders = new ArrayList<>(); + private final List formConfigProviders = new ArrayList<>(); public FormScanner(ConqueryConfig config) { super("form-scanner"); diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java index f7d8a0dfaf..16c5f7d5f8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ExternalExecution.java @@ -24,6 +24,9 @@ import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ExternalStateImpl; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -55,7 +58,7 @@ public class ExternalExecution extends ManagedForm { private UUID externalTaskId; - public ExternalExecution(ExternalForm form, User user, Dataset dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { + public ExternalExecution(ExternalForm form, UserId user, DatasetId dataset, MetaStorage metaStorage, DatasetRegistry datasetRegistry) { super(form, user, dataset, metaStorage, datasetRegistry); } @@ -85,7 +88,7 @@ public void start() { // Create service user final Dataset dataset = getNamespace().getDataset(); - final User originalUser = getOwner(); + final User originalUser = getOwner().resolve(); final FormBackendConfig formBackendConfig = getConfig().getPluginConfigs(FormBackendConfig.class) .filter(c -> c.supportsFormType(getSubmittedForm().getFormType())) .collect(MoreCollectors.onlyElement()); diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java index ce763811ad..daa0761867 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedForm.java @@ -6,10 +6,10 @@ import com.bakdata.conquery.apiv1.forms.FormConfigAPI; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -46,7 +46,7 @@ public abstract class ManagedForm extends ManagedExecution { @Getter private Form submittedForm; - protected ManagedForm(F submittedForm, User owner, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + protected ManagedForm(F submittedForm, UserId owner, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(owner, submittedDataset, storage, datasetRegistry); this.submittedForm = submittedForm; } @@ -64,7 +64,7 @@ public void start() { .tags(this.getTags()) .values(getSubmittedForm().getValues()).build(); - final FormConfig formConfig = build.intern(getOwner(), getDataset().getId()); + final FormConfig formConfig = build.intern(getOwner(), getDataset()); getMetaStorage().addFormConfig(formConfig); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java index 049ec237fd..5c8f7b9104 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/managed/ManagedInternalForm.java @@ -12,23 +12,21 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.InternalExecution; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.identifiable.IdMap; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ColumnDescriptor; -import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.Namespace; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.AccessLevel; import lombok.EqualsAndHashCode; @@ -50,21 +48,20 @@ public class ManagedInternalForm extends ManagedF /** - * Mapping of a result table name to a set of queries. - * This is required by forms that have multiple results (CSVs) as output. + * Subqueries that are sent to the workers. */ @JsonIgnore @EqualsAndHashCode.Exclude - private Map subQueries; - + private final IdMap flatSubQueries = new IdMap<>(); /** - * Subqueries that are sent to the workers. + * Mapping of a result table name to a set of queries. + * This is required by forms that have multiple results (CSVs) as output. */ @JsonIgnore @EqualsAndHashCode.Exclude - private final IdMap flatSubQueries = new IdMap<>(); + private Map subQueries; - public ManagedInternalForm(F form, User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedInternalForm(F form, UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(form, user, submittedDataset, storage, datasetRegistry); } @@ -94,23 +91,7 @@ private Map createSubExecutions() { )); } - - @Override - public void start() { - synchronized (this) { - subQueries.values().forEach(flatSubQueries::add); - } - flatSubQueries.values().forEach(ManagedExecution::start); - super.start(); - } - - @Override - public List generateColumnDescriptions(boolean isInitialized, ConqueryConfig config) { - return subQueries.values().iterator().next().generateColumnDescriptions(isInitialized, config); - } - - - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status, Namespace namespace) { + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { // Set the ColumnDescription if the Form only consits of a single subquery if (subQueries == null) { // If subqueries was not set the Execution was not initialized, do it manually @@ -133,6 +114,20 @@ public void cancel() { subQueries.values().forEach(ManagedQuery::cancel); } + @Override + public void start() { + synchronized (this) { + subQueries.values().forEach(flatSubQueries::add); + } + flatSubQueries.values().forEach(ManagedExecution::start); + super.start(); + } + + @Override + public List generateColumnDescriptions(boolean isInitialized, ConqueryConfig config) { + return subQueries.values().iterator().next().generateColumnDescriptions(isInitialized, config); + } + @Override @JsonIgnore public List getResultInfos() { @@ -160,7 +155,7 @@ public long resultRowCount() { return subQueries.values().iterator().next().resultRowCount(); } - public boolean allSubQueriesDone(ExecutionManager executionManager) { + public boolean allSubQueriesDone() { synchronized (this) { return flatSubQueries.values().stream().allMatch(q -> q.getState().equals(ExecutionState.DONE)); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/CentralRegistry.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/CentralRegistry.java deleted file mode 100644 index ae74f0de63..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/CentralRegistry.java +++ /dev/null @@ -1,106 +0,0 @@ -package com.bakdata.conquery.models.identifiable; - -import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ConcurrentMap; -import java.util.function.Function; - -import com.bakdata.conquery.models.error.ConqueryError.ExecutionCreationResolveError; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonMappingException; -import lombok.NoArgsConstructor; -import lombok.ToString; -import lombok.extern.slf4j.Slf4j; - - -@Slf4j -@SuppressWarnings({"rawtypes", "unchecked"}) -@NoArgsConstructor -@ToString(of = "map") -public class CentralRegistry { - - private final IdMap map = new IdMap<>(); - private final ConcurrentMap, Function> cacheables = new ConcurrentHashMap<>(); - - public synchronized CentralRegistry register(Identifiable ident) { - map.add(ident); - return this; - } - - public synchronized Function registerCacheable(Id id, Function supplier) { - return cacheables.put(id, supplier); - } - - public > T resolve(Id name) { - final T result = get(name); - - if (result == null) { - throw new ExecutionCreationResolveError(name); - } - - return result; - } - - public Identifiable update(Identifiable ident) { - return map.update(ident); - } - - public synchronized Optional updateCacheable(Id id, Function supplier) { - Function old = cacheables.put(id, supplier); - if (old != null) { - // If the cacheable was still there, the Object was never cached. - return Optional.empty(); - } - // The supplier might have been invoked already and the object gone into the IdMap - // So we invalidate it - return Optional.ofNullable(map.remove(id)); - } - - public > Optional getOptional(Id name) { - return Optional.ofNullable(get(name)); - } - - public synchronized void remove(Identifiable ident) { - Id id = ident.getId(); - map.remove(id); - } - - public static CentralRegistry get(DeserializationContext ctxt) throws JsonMappingException { - return (CentralRegistry) ctxt.findInjectableValue(CentralRegistry.class.getName(), null, null); - } - - public void clear() { - map.clear(); - cacheables.clear(); - } - - /** - * Needs to be protected in order to be overwritten by {@link InjectingCentralRegistry} - */ - protected > T get(Id name) { - Object res = map.get(name); - if (res != null) { - return (T) res; - } - synchronized (this) { - // Retry synchronized to make sure it has not been resolved from cacheables in the mean time - Object res2 = map.get(name); - if (res2 != null) { - return (T) res2; - } - Function supplier = cacheables.get(name); - if (supplier == null) { - return null; - } - - // Transfer object to the IdMap - final T apply = (T) supplier.apply(name); - register(apply); - cacheables.remove(name); - } - - return (T) map.get(name); - } - -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdResolvingException.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdResolvingException.java new file mode 100644 index 0000000000..190d2302db --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdResolvingException.java @@ -0,0 +1,29 @@ +package com.bakdata.conquery.models.identifiable; + +import com.bakdata.conquery.io.cps.CPSType; +import com.bakdata.conquery.models.error.ConqueryError; +import com.bakdata.conquery.models.error.ErrorMessages; +import com.bakdata.conquery.models.identifiable.ids.Id; + +/** + * TODO as {@link com.bakdata.conquery.models.error.ConqueryError} ? + */ +@CPSType(base = ConqueryError.class, id = "CQ_ID_RESOLVE_ERROR") +public class IdResolvingException extends ConqueryError { + + private final Id id; + + public IdResolvingException(Id id) { + this.id = id; + } + + public IdResolvingException(Id id, Throwable cause) { + super(ConqueryError.asConqueryError(cause)); + this.id = id; + } + + @Override + public String getMessageTemplate(ErrorMessages errorMessages) { + return errorMessages.idUnresolvable(id); + } +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java index 4c6037f87c..f77daa1736 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/Identifiable.java @@ -1,7 +1,5 @@ package com.bakdata.conquery.models.identifiable; -import jakarta.validation.Valid; - import com.bakdata.conquery.models.identifiable.ids.Id; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.ToString; @@ -9,7 +7,6 @@ public interface Identifiable>> { @JsonIgnore - @Valid @ToString.Include ID getId(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java index dd9a276e61..219ea5438d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/IdentifiableImpl.java @@ -1,9 +1,14 @@ package com.bakdata.conquery.models.identifiable; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.OptBoolean; +import lombok.AccessLevel; +import lombok.Getter; import lombok.NoArgsConstructor; +import lombok.Setter; import lombok.ToString; @NoArgsConstructor @@ -14,24 +19,12 @@ public abstract class IdentifiableImpl other = (IdentifiableImpl) obj; if (getId() == null) { - if (other.getId() != null) { - return false; - } - } else if (!getId().equals(other.getId())) { - return false; + return other.getId() == null; + } + else { + return getId().equals(other.getId()); + } + } + + @Override + public String toString() { + return this.getClass().getSimpleName()+"["+ getId() + "]"; + } + + @ToString.Include + @JsonIgnore + @Override + public ID getId() { + if (cachedId == null) { + + cachedId = createId(); } - return true; + return cachedId; } + + public abstract ID createId(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/NamespacedStorageProvider.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/NamespacedStorageProvider.java new file mode 100644 index 0000000000..6a7b1885cd --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/NamespacedStorageProvider.java @@ -0,0 +1,43 @@ +package com.bakdata.conquery.models.identifiable; + +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; +import org.jetbrains.annotations.NotNull; + +/** + * Interface for classes that can resolve an {@link NamespacedId} to a concrete object. + */ +public interface NamespacedStorageProvider extends Injectable { + + static NamespacedStorageProvider getResolver(DeserializationContext ctxt) throws JsonMappingException { + return (NamespacedStorageProvider) ctxt + .findInjectableValue(NamespacedStorageProvider.class.getName(), null, null); + } + + /** + * Almost identical to {@link NamespacedStorageProvider#getStorage(DatasetId)}, but throws an {@link IllegalArgumentException} if no storage could be resolved. + * @return the storage or throws an {@link IllegalArgumentException} if the storage could not be resolved. + */ + @NotNull + default NamespacedStorage resolveStorage(DatasetId datasetId) { + NamespacedStorage storage = getStorage(datasetId); + if (storage == null) { + throw new IllegalArgumentException("Unknown dataset: %s".formatted(datasetId)); + } + return storage; + } + + /** + * Returns the storage corresponding to the given dataset. + * @param datasetId the dataset to query + * @return The storage or null if no storage corresponds to the dataset + * + * @implNote Don't call {@link Dataset#getNamespacedStorageProvider()} as it is probably not yet set. + */ + NamespacedStorage getStorage(DatasetId datasetId); +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java index 5c46e6a297..395bfd7369 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IIdInterner.java @@ -4,24 +4,48 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.models.identifiable.ids.IdUtil.Parser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonMappingException; -public enum IIdInterner { - - INSTANCE; +public class IIdInterner implements Injectable { private final Map, ParserIIdInterner> perParserInterner = new ConcurrentHashMap<>(); + public static IIdInterner get(DeserializationContext context) throws JsonMappingException { + return (IIdInterner) context.findInjectableValue(IIdInterner.class, null, null); + } + @SuppressWarnings("unchecked") - public static > ParserIIdInterner forParser(Parser parser) { - return (ParserIIdInterner) INSTANCE.perParserInterner.computeIfAbsent(parser, k -> new ParserIIdInterner<>()); + public > ParserIIdInterner forParser(Parser parser) { + return (ParserIIdInterner) perParserInterner.computeIfAbsent(parser, k -> new ParserIIdInterner<>()); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(this.getClass(), this); } public static class ParserIIdInterner> { private final Map, ID> interned = new ConcurrentHashMap<>(); public ID putIfAbsent(List components, ID id) { - return interned.putIfAbsent(components, id); + ID old = interned.putIfAbsent(components, id); + + if (old == null) { + return id; + } + checkConflict(id, old); + return old; + } + + public static void checkConflict(Id id, Id cached) { + if (!cached.equals(id)) { + throw new IllegalStateException("The cached id '%s' (%s) conflicted with the new entry of '%s' (%s)" + .formatted(cached, cached.getClass().getSimpleName(), id, id.getClass().getSimpleName())); + } } public ID get(List components) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java index 3d46c9268b..b5b10dcea2 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/Id.java @@ -2,15 +2,21 @@ import java.lang.ref.WeakReference; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.Objects; import com.bakdata.conquery.io.jackson.serializer.IdDeserializer; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.identifiable.IdResolvingException; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.util.ConqueryEscape; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonValue; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import lombok.Getter; import lombok.RequiredArgsConstructor; +import lombok.Setter; @RequiredArgsConstructor @JsonDeserialize(using = IdDeserializer.class) @@ -24,12 +30,28 @@ public abstract class Id { @JsonIgnore private WeakReference escapedId = new WeakReference<>(null); - @Override - public abstract boolean equals(Object obj); + /** + * Injected by deserializer + */ + @JsonIgnore + @Setter + @Getter + private NamespacedStorageProvider namespacedStorageProvider; + + /** + * Injected by deserializer for resolving meta Ids + */ + @JsonIgnore + @Setter + @Getter + private MetaStorage metaStorage; @Override public abstract int hashCode(); + @Override + public abstract boolean equals(Object obj); + @Override @JsonValue public final String toString() { @@ -67,4 +89,24 @@ public final List collectComponents() { return result; } + + public TYPE resolve() { + if (this instanceof NamespacedId namespacedId) { + return (TYPE) namespacedId.resolve(getNamespacedStorageProvider().getStorage(namespacedId.getDataset())); + } + if (this instanceof MetaId) { + return metaStorage.resolve((Id & MetaId)this); + } + throw new IllegalStateException("Tried to resolve an id that is neither NamespacedId not MetaId: %s".formatted(this)); + } + + public IdResolvingException newIdResolveException() { + return new IdResolvingException(this); + } + + public IdResolvingException newIdResolveException(Exception e) { + return new IdResolvingException(this, e); + } + + public abstract void collectIds(Collection> collect); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java index bff1bc6521..586f229fc1 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/IdUtil.java @@ -9,7 +9,6 @@ import com.bakdata.conquery.models.identifiable.IdentifiableImpl; import com.bakdata.conquery.util.ConqueryEscape; import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableList; import lombok.experimental.UtilityClass; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.reflect.MethodUtils; @@ -21,31 +20,10 @@ public final class IdUtil { public static final Joiner JOINER = Joiner.on(JOIN_CHAR); private static final Map, Class> CLASS_TO_ID_MAP = new ConcurrentHashMap<>(); - public static > ID intern(ID id) { - @SuppressWarnings("unchecked") - ID old = IIdInterner.forParser((Parser) createParser(id.getClass())).putIfAbsent(id.collectComponents(), id); - if (old == null) { - return id; - } - checkConflict(id, old); - return old; - } - public static > Parser createParser(Class idClass) { return (Parser) idClass.getDeclaredClasses()[0].getEnumConstants()[0]; } - public static void checkConflict(Id id, Id cached) { - if (!cached.equals(id)) { - throw new IllegalStateException("The cached id '" - + cached - + "'(" - + cached.getClass().getSimpleName() - + ") conflicted with a new entry of " - + id.getClass().getSimpleName()); - } - } - public static > Class findIdClass(Class cl) { Class result = CLASS_TO_ID_MAP.get(cl); @@ -80,12 +58,8 @@ public static > Class findIdClass(Class cl) { public interface Parser> { - default ID parse(String id) { - return parse(split(id)); - } - - default ID parse(String... id) { - return parse(Arrays.asList(id)); + static List asComponents(String id) { + return Arrays.asList(split(id)); } static String[] split(String id) { @@ -98,18 +72,17 @@ static String[] split(String id) { return parts; } + default ID parse(String id) { + return parse(split(id)); + } + + default ID parse(String... id) { + return parse(Arrays.asList(id)); + } + default ID parse(List parts) { //first check if we get the result with the list (which might be a sublist) - ID result = IIdInterner.forParser(this).get(parts); - if (result == null) { - result = createId(parts); - //if not make a minimal list and use that to compute so that we do not keep the sublist - ID secondResult = IIdInterner.forParser(this).putIfAbsent(ImmutableList.copyOf(parts), result); - if (secondResult != null) { - checkConflict(result, secondResult); - return secondResult; - } - } + ID result = createId(parts); return result; } @@ -137,28 +110,27 @@ default ID checkNoRemaining(ID id, IdIterator remaining, List allParts) default ID parse(IdIterator parts) { //first check if we get the result with the list (which might be a sublist) - List input = parts.getRemaining(); - ID result = IIdInterner.forParser(this).get(input); - if (result == null) { - parts.internNext(); - result = parseInternally(parts); - //if not make a minimal list and use that to compute so that we do not keep the sublist - ID secondResult = IIdInterner.forParser(this).putIfAbsent(ImmutableList.copyOf(input), result); - if (secondResult != null) { - checkConflict(result, secondResult); - return secondResult; - } - return result; - } - parts.consumeAll(); + + parts.internNext(); + ID result = parseInternally(parts); return result; } default ID parsePrefixed(String dataset, String id) { + List result = asComponents(dataset, id); + return parse(result); + } + + static List asComponents(String dataset, String id) { String[] result; String[] split = split(id); - //if already prefixed + + if (dataset == null) { + return Arrays.asList(split); + } + if (split.length > 0 && split[0].equals(dataset)) { + //if already prefixed result = split; } else { @@ -166,7 +138,7 @@ default ID parsePrefixed(String dataset, String id) { result[0] = dataset; System.arraycopy(split, 0, result, 1, split.length); } - return parse(Arrays.asList(result)); + return Arrays.asList(result); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/MetaId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/MetaId.java new file mode 100644 index 0000000000..8bc164aecc --- /dev/null +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/MetaId.java @@ -0,0 +1,12 @@ +package com.bakdata.conquery.models.identifiable.ids; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.identifiable.Identifiable; + +/** + * Marker interface for Ids that are resolvable in a {@link com.bakdata.conquery.io.storage.MetaStorage} + */ +public interface MetaId { + + Identifiable get(MetaStorage storage); +} diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java index d19838ffc8..48a18ca78c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedId.java @@ -1,20 +1,69 @@ package com.bakdata.conquery.models.identifiable.ids; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.io.storage.WorkerStorage; +import com.bakdata.conquery.models.identifiable.IdResolvingException; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; /** - * Marker interface for {@link Id}s that are loaded via Namespaced CentralRegistry - * (see {@link com.bakdata.conquery.models.worker.IdResolveContext#findRegistry(DatasetId)}, - * as opposed to Registry in the {@link com.bakdata.conquery.io.storage.MetaStorage} + * Marker interface for {@link Id}s that are bound to a {@link com.bakdata.conquery.models.worker.Namespace}/{@link com.bakdata.conquery.models.datasets.Dataset}. */ public interface NamespacedId { - @JsonIgnore - DatasetId getDataset(); + static WorkerStorage assertWorkerStorage(NamespacedStorage storage) { + if (!(storage instanceof WorkerStorage workerStorage)) { + throw new IllegalArgumentException("Cannot be retrieved from %s".formatted(storage)); + } + return workerStorage; + } + + static NamespaceStorage assertNamespaceStorage(NamespacedStorage storage) { + if (!(storage instanceof NamespaceStorage namespaceStorage)) { + throw new IllegalArgumentException("Cannot be retrieved from %s".formatted(storage)); + } + return namespaceStorage; + } default String toStringWithoutDataset() { return StringUtils.removeStart(toString(), getDataset().toString() + IdUtil.JOIN_CHAR); } + + @JsonIgnore + DatasetId getDataset(); + + /** + * Almost identical to {@link NamespacedId#get(NamespacedStorage)}, but throws an {@link IdResolvingException} if no object could be resolved. + * @return the object or throws an {@link IdResolvingException} if the Object could not be resolved. + */ + @NotNull + default NamespacedIdentifiable resolve(NamespacedStorage storage) { + try { + NamespacedIdentifiable o = get(storage); + if (o == null) { + throw newIdResolveException(); + } + return o; + } + catch (IdResolvingException e) { + throw e; + } + catch (Exception e) { + throw newIdResolveException(e); + } + } + + /** + * Return the object identified by the given id from the given storage. + * @return the object or null if no object could be resolved. If the id type is not supported + * throws a IllegalArgumentException + */ + NamespacedIdentifiable get(NamespacedStorage storage); + + IdResolvingException newIdResolveException(); + + IdResolvingException newIdResolveException(Exception e); } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java index f31c1b5f33..e9178fa585 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/NamespacedIdentifiable.java @@ -1,8 +1,8 @@ package com.bakdata.conquery.models.identifiable.ids; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; public interface NamespacedIdentifiable> & NamespacedId> extends Identifiable { - Dataset getDataset(); + DatasetId getDataset(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java index 51eef836ed..f33a42a623 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/BucketId.java @@ -1,13 +1,18 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertWorkerStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +30,28 @@ public DatasetId getDataset() { return imp.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertWorkerStorage(storage).getBucket(this); + } + @Override public void collectComponents(List components) { imp.collectComponents(components); components.add(bucket); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + imp.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return imp.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java index b153c6f8b0..44f09e3eaf 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/CBlockId.java @@ -1,13 +1,18 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertWorkerStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +30,29 @@ public DatasetId getDataset() { return connector.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertWorkerStorage(storage).getCBlock(this); + } + @Override public void collectComponents(List components) { bucket.collectComponents(components); connector.collectComponents(components); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + bucket.collectIds(collect); + connector.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return bucket.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java index 18342624a0..7998adcf10 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ColumnId.java @@ -1,13 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +27,28 @@ public DatasetId getDataset() { return table.getDataset(); } + @Override + public Column get(NamespacedStorage storage) { + return storage.getTable(getTable()).getColumnByName(getColumn()); + } + @Override public void collectComponents(List components) { table.collectComponents(components); components.add(column); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + table.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return table.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java index 6951c25e42..1899cee104 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptElementId.java @@ -2,8 +2,8 @@ import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -23,7 +23,7 @@ public ConceptElementId parseInternally(IdIterator parts) { return ConceptId.Parser.INSTANCE.parse(parts); } String childName = parts.next(); - ConceptElementId parent = ConceptElementId.Parser.INSTANCE.parse(parts); + ConceptElementId parent = Parser.INSTANCE.parse(parts); return new ConceptTreeChildId(parent, childName); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java index 4decf7e430..f1c8c3a55a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptId.java @@ -1,17 +1,27 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.auth.permissions.ConceptPermission; +import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; -import java.util.List; - @Getter @AllArgsConstructor @EqualsAndHashCode(callSuper=false) -public class ConceptId extends ConceptElementId> implements NamespacedId { +public class ConceptId extends ConceptElementId> implements NamespacedId, Authorized { private final DatasetId dataset; private final String name; @@ -20,7 +30,12 @@ public class ConceptId extends ConceptElementId> implements Namespace public DatasetId getDataset() { return dataset; } - + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(this); + } + @Override public ConceptId findConcept() { return this; @@ -32,7 +47,24 @@ public void collectComponents(List components) { components.add(name); } - public static enum Parser implements IdUtil.Parser { + + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public ConqueryPermission createPermission(Set abilities) { + return ConceptPermission.onInstance(abilities, this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + + public enum Parser implements IdUtil.Parser { INSTANCE; @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java index 438f57fbb3..71ec378244 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptSelectId.java @@ -1,11 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -20,9 +24,9 @@ public ConceptSelectId(ConceptId concept, String select) { } @Override - public void collectComponents(List components) { - concept.collectComponents(components); - super.collectComponents(components); + public void collectIds(Collection> collect) { + collect.add(this); + concept.collectIds(collect); } @Override @@ -30,6 +34,27 @@ public DatasetId getDataset() { return concept.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(concept).getSelectByName(getSelect()); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return concept.getNamespacedStorageProvider(); + } + + @Override + public ConceptId findConcept() { + return concept; + } + + @Override + public void collectComponents(List components) { + concept.collectComponents(components); + super.collectComponents(components); + } + public enum Parser implements IdUtil.Parser { INSTANCE; @@ -40,9 +65,4 @@ public ConceptSelectId parseInternally(IdIterator parts) { return new ConceptSelectId(parent, name); } } - - @Override - public ConceptId findConcept() { - return concept; - } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java index 85cb4b68c8..d38675af17 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConceptTreeChildId.java @@ -1,12 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeChild; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -21,7 +26,16 @@ public class ConceptTreeChildId extends ConceptElementId imple public DatasetId getDataset() { return parent.getDataset(); } - + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + Concept concept = storage.getConcept(findConcept()); + if (concept == null) { + return null; + } + return concept.findById(this); + } + @Override public ConceptId findConcept() { return parent.findConcept(); @@ -33,6 +47,17 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + parent.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return parent.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java index 8b6be8cdef..79932af141 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorId.java @@ -1,13 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +27,28 @@ public DatasetId getDataset() { return concept.getDataset(); } + @Override + public Connector get(NamespacedStorage storage) { + return storage.getConcept(getConcept()).getConnectorByName(getConnector()); + } + @Override public void collectComponents(List components) { concept.collectComponents(components); components.add(connector); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + concept.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return concept.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java index d18929a627..fde78b020f 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ConnectorSelectId.java @@ -1,11 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -19,6 +23,27 @@ public ConnectorSelectId(ConnectorId connector, String select) { this.connector = connector; } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + connector.collectIds(collect); + } + + @Override + public DatasetId getDataset() { + return connector.getDataset(); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(findConcept()).getConnectorByName(getConnector().getConnector()).getSelectByName(getSelect()); + } + + @Override + public ConceptId findConcept() { + return connector.getConcept(); + } + @Override public void collectComponents(List components) { connector.collectComponents(components); @@ -26,8 +51,8 @@ public void collectComponents(List components) { } @Override - public DatasetId getDataset() { - return connector.getDataset(); + public NamespacedStorageProvider getNamespacedStorageProvider() { + return connector.getNamespacedStorageProvider(); } public enum Parser implements IdUtil.Parser { @@ -40,9 +65,4 @@ public ConnectorSelectId parseInternally(IdIterator parts) { return new ConnectorSelectId(parent, name); } } - - @Override - public ConceptId findConcept() { - return connector.getConcept(); - } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java index f52218cb49..187af3c161 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/DatasetId.java @@ -1,14 +1,21 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import java.util.Set; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; +import com.bakdata.conquery.models.auth.permissions.DatasetPermission; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.fasterxml.jackson.annotation.JsonIgnore; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -16,7 +23,7 @@ @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false, doNotUseGetters = true) -public class DatasetId extends Id implements NamespacedId { +public class DatasetId extends Id implements NamespacedId, Authorized { private final String name; @@ -26,11 +33,26 @@ public DatasetId getDataset() { return this; } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getDataset(); + } + @Override public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public ConqueryPermission createPermission(Set abilities) { + return DatasetPermission.onInstance(abilities, this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java index 733fe01398..6ddedf0405 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FilterId.java @@ -1,13 +1,15 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.concepts.filters.Filter; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +27,28 @@ public DatasetId getDataset() { return connector.getDataset(); } + @Override + public Filter get(NamespacedStorage storage) { + return storage.getConcept(connector.getConcept()).getConnectorByName(connector.getConnector()).getFilterByName(getFilter()); + } + @Override public void collectComponents(List components) { connector.collectComponents(components); components.add(filter); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + connector.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return connector.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java index a46bb68d4a..58c6d50a6b 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/FormConfigId.java @@ -1,12 +1,16 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import java.util.UUID; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.forms.configs.FormConfig; +import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -14,7 +18,7 @@ @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false) -public class FormConfigId extends Id { +public class FormConfigId extends Id implements MetaId { private final DatasetId dataset; @@ -29,6 +33,17 @@ public void collectComponents(List components) { } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getFormConfig(this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java index 9019702861..6de6bd59fe 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/GroupId.java @@ -1,20 +1,23 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Group; +import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.EqualsAndHashCode; import lombok.Getter; +@Getter @EqualsAndHashCode(callSuper=false) public class GroupId extends PermissionOwnerId { public static final String TYPE = "group"; - @Getter private final String group; public GroupId(String group) { @@ -27,6 +30,16 @@ public void collectComponents(List components) { components.add(group); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getGroup(this); + } + public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java index 490d23cd9f..8edf685ee5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportColumnId.java @@ -1,13 +1,16 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.ImportColumn; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,12 +28,28 @@ public DatasetId getDataset() { return imp.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + throw new UnsupportedOperationException("%s is never stored".formatted(this.getClass().getSimpleName())); + } + @Override public void collectComponents(List components) { imp.collectComponents(components); components.add(column); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + imp.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return imp.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java index c4bac13f29..046ce44df7 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ImportId.java @@ -1,16 +1,20 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; +import java.util.List; + +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; -import java.util.List; - @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false) @@ -24,12 +28,28 @@ public DatasetId getDataset() { return table.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getImport(this); + } + @Override public void collectComponents(List components) { table.collectComponents(components); components.add(tag); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + table.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return table.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java index 5ddd7a0f14..6f09df15a9 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/InternToExternMapperId.java @@ -1,11 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertNamespaceStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.index.InternToExternMapper; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -24,6 +30,21 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertNamespaceStorage(storage).getInternToExternMapper(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java index f0a2d62e40..74ca53b650 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ManagedExecutionId.java @@ -1,12 +1,21 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import java.util.Set; import java.util.UUID; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.auth.permissions.Authorized; +import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; +import com.bakdata.conquery.models.auth.permissions.ExecutionPermission; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -14,7 +23,7 @@ @AllArgsConstructor @Getter @EqualsAndHashCode(callSuper = false, doNotUseGetters = true) -public class ManagedExecutionId extends Id { +public class ManagedExecutionId extends Id implements MetaId, Authorized { private final DatasetId dataset; private final UUID execution; @@ -25,6 +34,22 @@ public void collectComponents(List components) { components.add(execution); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getExecution(this); + } + + @Override + public ConqueryPermission createPermission(Set abilities) { + return ExecutionPermission.onInstance(abilities, this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java index 4e8072ba28..744aeb3b14 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/PermissionOwnerId.java @@ -5,13 +5,14 @@ import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.ids.MetaId; import lombok.EqualsAndHashCode; import lombok.RequiredArgsConstructor; @RequiredArgsConstructor @EqualsAndHashCode(callSuper = false) -public abstract class PermissionOwnerId> extends Id { +public abstract class PermissionOwnerId> extends Id implements MetaId { public enum Parser implements IdUtil.Parser> { diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java index 388c0ed586..4df43d6cb0 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/RoleId.java @@ -1,19 +1,22 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Role; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.EqualsAndHashCode; import lombok.Getter; +@Getter @EqualsAndHashCode(callSuper=false) public class RoleId extends PermissionOwnerId { public static final String TYPE = "role"; - @Getter private final String role; public RoleId(String mandator) { @@ -26,6 +29,16 @@ public void collectComponents(List components) { components.add(role); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getRole(this); + } + enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java index 31005a7a59..8aee086b19 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SearchIndexId.java @@ -1,12 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import static com.bakdata.conquery.models.identifiable.ids.NamespacedId.assertNamespaceStorage; + +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.index.InternToExternMapper; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.index.search.SearchIndex; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -25,6 +30,21 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return assertNamespaceStorage(storage).getSearchIndex(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java index f0e1b83e56..4c214ee448 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SecondaryIdDescriptionId.java @@ -1,11 +1,14 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -25,6 +28,22 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public SecondaryIdDescription get(NamespacedStorage storage) { + return storage.getSecondaryId(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java index 618186f345..220dd5821c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/SelectId.java @@ -4,8 +4,8 @@ import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java index 2f1bb9bb40..ac543cf94d 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/StructureNodeId.java @@ -1,11 +1,13 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.models.datasets.concepts.StructureNode; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -31,6 +33,22 @@ public void collectComponents(List components) { components.add(structureNode); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + if (parent != null) { + parent.collectIds(collect); + } + else { + dataset.collectIds(collect); + } + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java index a4c04cb280..dd7c9ab8bd 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableId.java @@ -1,15 +1,17 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; - +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; -import lombok.Data; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -27,6 +29,22 @@ public void collectComponents(List components) { components.add(table); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getTable(this); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return dataset.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java index 67e0c6aa03..e5c8e60408 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/TableImportDescriptorId.java @@ -1,10 +1,11 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.preproc.TableImportDescriptor; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -22,6 +23,11 @@ public void collectComponents(List components) { components.add(importDescriptor); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java index 3273813723..2ce00dc287 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/UserId.java @@ -1,11 +1,14 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.models.identifiable.Identifiable; +import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -27,6 +30,16 @@ public void collectComponents(List components) { components.add(name); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + } + + @Override + public Identifiable get(MetaStorage storage) { + return storage.getUser(this); + } + public enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java index e8c9d8615f..aaae4f622a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/ValidityDateId.java @@ -1,12 +1,16 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; +import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -23,12 +27,30 @@ public DatasetId getDataset() { return connector.getDataset(); } + @Override + public NamespacedIdentifiable get(NamespacedStorage storage) { + return storage.getConcept(getConnector().getConcept()) + .getConnectorByName(getConnector().getConnector()) + .getValidityDateByName(getValidityDate()); + } + @Override public void collectComponents(List components) { connector.collectComponents(components); components.add(validityDate); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + connector.collectIds(collect); + } + + @Override + public NamespacedStorageProvider getNamespacedStorageProvider() { + return connector.getNamespacedStorageProvider(); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java index 627298a398..2706b0d2f3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/ids/specific/WorkerId.java @@ -1,10 +1,11 @@ package com.bakdata.conquery.models.identifiable.ids.specific; +import java.util.Collection; import java.util.List; import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.identifiable.ids.IdIterator; +import com.bakdata.conquery.models.identifiable.ids.IdUtil; import com.bakdata.conquery.models.worker.WorkerInformation; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; @@ -24,6 +25,12 @@ public void collectComponents(List components) { components.add(worker); } + @Override + public void collectIds(Collection> collect) { + collect.add(this); + dataset.collectIds(collect); + } + public static enum Parser implements IdUtil.Parser { INSTANCE; diff --git a/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java b/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java index 6bd986ed3d..b937353964 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java +++ b/backend/src/main/java/com/bakdata/conquery/models/identifiable/mapping/EntityIdMap.java @@ -7,11 +7,15 @@ import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ColumnConfig; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonValue; +import com.fasterxml.jackson.annotation.OptBoolean; import com.univocity.parsers.common.record.Record; import com.univocity.parsers.csv.CsvParser; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -26,12 +30,15 @@ @Getter @EqualsAndHashCode @Slf4j -@NoArgsConstructor +@AllArgsConstructor +// For Jackson +@NoArgsConstructor(access = AccessLevel.PRIVATE) public class EntityIdMap { @Setter @JsonIgnore @EqualsAndHashCode.Exclude + @JacksonInject(useInput = OptBoolean.FALSE) private NamespaceStorage storage; /** @@ -49,9 +56,9 @@ public class EntityIdMap { /** * Read incoming CSV-file extracting Id-Mappings for {@link ExternalId} and {@link EntityPrintId}. */ - public static EntityIdMap generateIdMapping(CsvParser parser, List mappers) { + public static EntityIdMap generateIdMapping(CsvParser parser, List mappers, NamespaceStorage namespaceStorage) { - EntityIdMap mapping = new EntityIdMap(); + EntityIdMap mapping = new EntityIdMap(namespaceStorage); Record record; diff --git a/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java b/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java index 1ed80b263e..b60769e303 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java +++ b/backend/src/main/java/com/bakdata/conquery/models/index/MapInternToExternMapper.java @@ -11,9 +11,9 @@ import com.bakdata.conquery.io.jackson.Initializing; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.NamedImpl; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.bakdata.conquery.util.io.FileUtil; import com.fasterxml.jackson.annotation.JacksonInject; @@ -64,7 +64,7 @@ public class MapInternToExternMapper extends NamedImpl i @JsonIgnore @NotNull - private Dataset dataset; + private DatasetId dataset; @ToString.Include @NotEmpty @@ -95,7 +95,7 @@ public synchronized void init() { return; } - dataset = storage.getDataset(); + dataset = storage.getDataset().getId(); final URI resolvedURI = FileUtil.getResolvedUri(config.getIndex().getBaseUrl(), csv); log.trace("Resolved mapping reference csv url '{}': {}", this.getId(), resolvedURI); @@ -145,7 +145,7 @@ public String external(String internalValue) { @Override public InternToExternMapperId createId() { - return new InternToExternMapperId(getDataset().getId(), getName()); + return new InternToExternMapperId(getDataset(), getName()); } public static class Initializer extends Initializing.Converter {} diff --git a/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java b/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java index dd080e10ad..5956889a61 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java +++ b/backend/src/main/java/com/bakdata/conquery/models/index/search/SearchIndex.java @@ -1,10 +1,10 @@ package com.bakdata.conquery.models.index.search; import com.bakdata.conquery.io.cps.CPSBase; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.Named; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.SearchIndexId; import com.fasterxml.jackson.annotation.JsonTypeInfo; @@ -15,5 +15,5 @@ public interface SearchIndex extends Identifiable, Named> getAllSelectFilters(NamespaceStorage storage) { - return storage.getAllConcepts().stream() + return storage.getAllConcepts() .flatMap(c -> c.getConnectors().stream()) .flatMap(co -> co.collectAllFilters().stream()) .filter(SelectFilter.class::isInstance) diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java index 3df723a45b..dafbb00495 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/CollectColumnValuesJob.java @@ -11,12 +11,12 @@ import java.util.stream.Collectors; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.stores.root.StringStore; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.jobs.Job; import com.bakdata.conquery.models.jobs.UpdateFilterSearchJob; import com.bakdata.conquery.models.messages.namespaces.ActionReactionMessage; @@ -44,8 +44,7 @@ public class CollectColumnValuesJob extends WorkerMessage implements ActionReactionMessage { @Getter - @NsIdRefCollection - private final Set columns; + private final Set columns; /** * This exists only on the manager for the afterAllReaction. @@ -56,8 +55,8 @@ public class CollectColumnValuesJob extends WorkerMessage implements ActionReact @Override public void react(Worker context) throws Exception { - final Map> table2Buckets = context.getStorage().getAllBuckets().stream() - .collect(Collectors.groupingBy(Bucket::getTable)); + final Map> table2Buckets = context.getStorage().getAllBuckets() + .collect(Collectors.groupingBy(Bucket::getTable)); final ListeningExecutorService jobsExecutorService = MoreExecutors.listeningDecorator(context.getJobsExecutorService()); @@ -68,14 +67,15 @@ public void react(Worker context) throws Exception { final List> futures = columns.stream() .filter(column -> table2Buckets.get(column.getTable()) != null) + .map(ColumnId::resolve) .map(column -> jobsExecutorService.submit(() -> { - final List buckets = table2Buckets.get(column.getTable()); + final List buckets = table2Buckets.get(column.getTable().getId()); final Set values = buckets.stream() .flatMap(bucket -> ((StringStore) bucket.getStore(column)).streamValues()) .collect(Collectors.toSet()); - context.send(new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column, values)); + context.send(new RegisterColumnValues(getMessageId(), context.getInfo().getId(), column.getId(), values)); log.trace("Finished collections values for column {} as number {}", column, done.incrementAndGet()); }) ) @@ -120,7 +120,8 @@ public void execute() { log.debug("{} shrinking searches", this); - for (Column column : columns) { + for (ColumnId columnId : columns) { + Column column = columnId.resolve(); try { filterSearch.shrinkSearch(column); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java index 32587e4bdb..ec8a068bf8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RegisterColumnValues.java @@ -5,8 +5,7 @@ import java.util.UUID; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.messages.ReactionMessage; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; @@ -33,8 +32,7 @@ public class RegisterColumnValues extends NamespaceMessage implements ReactionMe private WorkerId workerId; - @NsIdRef - private final Column column; + private final ColumnId column; @ToString.Exclude private final Collection values; @@ -48,13 +46,13 @@ public int size() { @Override public void react(DistributedNamespace context) throws Exception { if (log.isTraceEnabled()) { - log.trace("Registering {} values for column '{}': {}", size(), column.getId(), Arrays.toString(values.toArray())); + log.trace("Registering {} values for column '{}': {}", size(), column, Arrays.toString(values.toArray())); } else { - log.debug("Registering {} values for column '{}'", size(), column.getId()); + log.debug("Registering {} values for column '{}'", size(), column); } - context.getFilterSearch().registerValues(column, values); + context.getFilterSearch().registerValues(column.resolve(), values); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java index f1a856fcab..d061dcbde8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveConcept.java @@ -1,8 +1,7 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; @@ -14,14 +13,13 @@ @CPSType(id="REMOVE_CONCEPT", base=NamespacedMessage.class) @RequiredArgsConstructor(onConstructor_=@JsonCreator) @Getter @ToString public class RemoveConcept extends WorkerMessage { - - @NsIdRef - private final Concept concept; + private final ConceptId concept; + @Override public void react(Worker context) throws Exception { synchronized (context.getStorage()) { - context.removeConcept(concept); + context.removeConcept(concept.resolve()); } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java index 7c4b6ca355..6fa6bd86b5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveImportJob.java @@ -1,24 +1,24 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; import com.fasterxml.jackson.annotation.JsonCreator; +import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; import lombok.extern.slf4j.Slf4j; - @CPSType(id="REMOVE_IMPORT", base= NamespacedMessage.class) -@RequiredArgsConstructor(onConstructor_=@JsonCreator) @ToString +@RequiredArgsConstructor(onConstructor_=@JsonCreator) +@ToString @Slf4j +@Getter // Needed by SmileParser public class RemoveImportJob extends WorkerMessage { - @NsIdRef - private final Import imp; + private final ImportId imp; @Override public void react(Worker context) throws Exception { diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java index 62a3206c4e..1200eeb3e6 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveSecondaryId.java @@ -1,8 +1,7 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; @@ -17,12 +16,11 @@ @AllArgsConstructor(onConstructor_=@JsonCreator) @Getter @Setter @ToString(callSuper=true) public class RemoveSecondaryId extends WorkerMessage { - @NsIdRef - private SecondaryIdDescription secondaryId; + private SecondaryIdDescriptionId secondaryId; @Override public void react(Worker context) throws Exception { log.info("Received Deletion of SecondaryId {}", secondaryId); - context.removeSecondaryId(secondaryId.getId()); + context.removeSecondaryId(secondaryId); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java index 02d8dddac8..6faade2ac5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RemoveTable.java @@ -1,8 +1,7 @@ package com.bakdata.conquery.models.messages.namespaces.specific; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; -import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; import com.bakdata.conquery.models.worker.Worker; @@ -17,8 +16,7 @@ @AllArgsConstructor(onConstructor_=@JsonCreator) @Getter @Setter @ToString(callSuper=true) public class RemoveTable extends WorkerMessage { - @NsIdRef - private Table table; + private TableId table; @Override public void react(Worker context) throws Exception { diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java index 692fa2f986..a8c1b74347 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/ReportConsistency.java @@ -14,7 +14,11 @@ import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.worker.DistributedNamespace; import com.google.common.collect.Sets; -import lombok.*; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.NoArgsConstructor; +import lombok.NonNull; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** @@ -38,7 +42,7 @@ public class ReportConsistency extends NamespaceMessage { @Override public void react(DistributedNamespace context) throws Exception { - Set managerImports = context.getStorage().getAllImports().stream().map(Import::getId).collect(Collectors.toSet()); + Set managerImports = context.getStorage().getAllImports().map(Import::getId).collect(Collectors.toSet()); Set assignedWorkerBuckets = context.getWorkerHandler().getBucketsForWorker(workerId); diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java index c1624fb420..5be15f728e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/RequestConsistency.java @@ -24,10 +24,10 @@ public class RequestConsistency extends WorkerMessage { @Override public void react(Worker context) throws Exception { // Gather ImportIds - Set workerImports = context.getStorage().getAllImports().stream().map(Import::getId).collect(Collectors.toSet()); + Set workerImports = context.getStorage().getAllImports().map(Import::getId).collect(Collectors.toSet()); // Gather BucketIds - Set workerBuckets = context.getStorage().getAllBuckets().stream().map(Bucket::getId).collect(Collectors.toSet()); + Set workerBuckets = context.getStorage().getAllBuckets().map(Bucket::getId).collect(Collectors.toSet()); // Send report context.send(new ReportConsistency(context.getInfo().getId(), workerImports, workerBuckets)); diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java index 5efc018639..9d5821b198 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateElementMatchingStats.java @@ -1,12 +1,15 @@ package com.bakdata.conquery.models.messages.namespaces.specific; +import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefKeys; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.MatchingStats; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; @@ -26,27 +29,40 @@ public class UpdateElementMatchingStats extends NamespaceMessage { private final WorkerId source; @ToString.Exclude - @NsIdRefKeys - private final Map, MatchingStats.Entry> values; + private final Map, MatchingStats.Entry> values; @Override public void react(DistributedNamespace context) throws Exception { - for (Entry, MatchingStats.Entry> entry : values.entrySet()) { + // We collect the concepts outside the loop to update the storage afterward + Map> conceptsToUpdate = new HashMap<>(); + + for (Entry, MatchingStats.Entry> entry : values.entrySet()) { try { - final ConceptElement target = entry.getKey(); + ConceptElementId element = entry.getKey(); + ConceptId conceptId = element.findConcept(); + + // mapping function cannot use Id::resolve here yet, somehow the nsIdResolver is not set because it + // stems from a map key. Jackson seems to use a different serializer. + Concept concept = conceptsToUpdate.computeIfAbsent(conceptId, id -> context.getStorage().getConcept(id)); + + final ConceptElement target = concept.findById(element); + final MatchingStats.Entry value = entry.getValue(); + conceptsToUpdate.put(conceptId, concept); + MatchingStats matchingStats = target.getMatchingStats(); if (matchingStats == null) { matchingStats = new MatchingStats(); target.setMatchingStats(matchingStats); } matchingStats.putEntry(source, value); - } - catch (Exception e) { - log.error("Failed to set matching stats for '{}'", entry.getKey()); + } catch (Exception e) { + log.error("Failed to set matching stats for '{}' (enable TRACE for exception)", entry.getKey(), (Exception) (log.isTraceEnabled() ? e : null)); } } + + conceptsToUpdate.values().forEach(context.getStorage()::updateConcept); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java index 5f783a8c19..19f54c8d13 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/namespaces/specific/UpdateMatchingStatsMessage.java @@ -10,7 +10,6 @@ import java.util.stream.Collectors; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; @@ -20,6 +19,8 @@ import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.jobs.Job; import com.bakdata.conquery.models.messages.namespaces.NamespacedMessage; import com.bakdata.conquery.models.messages.namespaces.WorkerMessage; @@ -27,6 +28,7 @@ import com.bakdata.conquery.util.progressreporter.ProgressReporter; import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.base.Functions; +import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -38,8 +40,8 @@ @RequiredArgsConstructor(onConstructor_ = {@JsonCreator}) public class UpdateMatchingStatsMessage extends WorkerMessage { - @NsIdRefCollection - private final Collection> concepts; + @Getter + private final Collection concepts; @Override @@ -50,16 +52,11 @@ public void react(Worker worker) throws Exception { @RequiredArgsConstructor private static class UpdateMatchingStatsJob extends Job { private final Worker worker; - private final Collection> concepts; - - @Override - public String getLabel() { - return String.format("Calculate Matching Stats for %s", worker.getInfo().getDataset()); - } + private final Collection concepts; @Override public void execute() throws Exception { - if (worker.getStorage().getAllCBlocks().isEmpty()) { + if (worker.getStorage().getAllCBlocks().findAny().isEmpty()) { log.debug("Worker {} is empty, skipping.", worker); return; } @@ -69,16 +66,17 @@ public void execute() throws Exception { log.info("BEGIN update Matching stats for {} Concepts", concepts.size()); - final Map, CompletableFuture> + final Map> subJobs = concepts.stream() .collect(Collectors.toMap(Functions.identity(), concept -> CompletableFuture.runAsync(() -> { - final Map, MatchingStats.Entry> + final Concept resolved = concept.resolve(); + final Map, MatchingStats.Entry> matchingStats = - new HashMap<>(concept.countElements()); + new HashMap<>(resolved.countElements()); - calculateConceptMatches(concept, matchingStats, worker); + calculateConceptMatches(resolved, matchingStats, worker); worker.send(new UpdateElementMatchingStats(worker.getInfo().getId(), matchingStats)); @@ -110,7 +108,7 @@ public void execute() throws Exception { return; } - log.trace("Still waiting for `{}`", concept.getId()); + log.trace("Still waiting for `{}`", concept); }); } @@ -121,19 +119,23 @@ public void execute() throws Exception { } + @Override + public String getLabel() { + return String.format("Calculate Matching Stats for %s", worker.getInfo().getDataset()); + } - private static void calculateConceptMatches(Concept concept, Map, MatchingStats.Entry> results, Worker worker) { + private static void calculateConceptMatches(Concept concept, Map, MatchingStats.Entry> results, Worker worker) { log.debug("BEGIN calculating for `{}`", concept.getId()); - for (CBlock cBlock : worker.getStorage().getAllCBlocks()) { + for (CBlock cBlock : worker.getStorage().getAllCBlocks().toList()) { - if (!cBlock.getConnector().getConcept().equals(concept)) { + if (!cBlock.getConnector().getConcept().equals(concept.getId())) { continue; } try { - final Bucket bucket = cBlock.getBucket(); - final Table table = bucket.getTable(); + final Bucket bucket = cBlock.getBucket().resolve(); + final Table table = bucket.getTable().resolve(); for (String entity : bucket.entities()) { @@ -145,9 +147,7 @@ private static void calculateConceptMatches(Concept concept, Map new MatchingStats.Entry()).addEvent(table, bucket, event, entity); - + results.computeIfAbsent(concept.getId(), (ignored) -> new MatchingStats.Entry()).addEvent(table, bucket, event, entity); continue; } @@ -158,7 +158,7 @@ private static void calculateConceptMatches(Concept concept, Map element = ((TreeConcept) concept).getElementByLocalIdPath(localIds); while (element != null) { - results.computeIfAbsent(((ConceptElement) element), (ignored) -> new MatchingStats.Entry()) + results.computeIfAbsent(((ConceptElement) element).getId(), (ignored) -> new MatchingStats.Entry()) .addEvent(table, bucket, event, entity); element = element.getParent(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java b/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java index 02fc258311..65f85fcf44 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/messages/network/NetworkMessageContext.java @@ -11,6 +11,7 @@ import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.ShardWorkers; +import io.dropwizard.core.setup.Environment; import lombok.Getter; @Getter @@ -34,13 +35,15 @@ public static class ShardNodeNetworkContext extends NetworkMessageContext to track actually included buckets,to split phase bucket assignment. - private int numberOfBuckets; + private final int numberOfBuckets; /** * The specific columns and their associated MajorType for validation. */ - private PPColumn[] columns; + private final PPColumn[] columns; /** * A hash to check if any of the underlying files for generating this CQPP has changed. */ - private int validityHash; + private final int validityHash; + + @JsonIgnore + @JacksonInject + private NamespaceStorage namespaceStorage; public Import createImportDescription(Table table, Map stores) { - final Import imp = new Import(table); + final Import imp = new Import(table.getId()); imp.setName(getName()); imp.setNumberOfEntries(getRows()); @@ -87,8 +91,6 @@ public Import createImportDescription(Table table, Map stor /** * Verify that the supplied table matches the preprocessed' data in shape. - * - * @return */ public List assertMatch(Table table) { final List errors = new ArrayList<>(); diff --git a/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java b/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java index 340433ae6c..5f180edf28 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java +++ b/backend/src/main/java/com/bakdata/conquery/models/preproc/PreprocessedReader.java @@ -20,32 +20,22 @@ */ @RequiredArgsConstructor(access = AccessLevel.PACKAGE) public class PreprocessedReader implements AutoCloseable, Iterator { - @Override - public void close() throws IOException { - parser.close(); - } - - @Accessors(fluent = true) - @RequiredArgsConstructor - public enum LastRead { - DATA(null), HEADER(DATA), BEGIN(HEADER); - - @Getter - private final LastRead next; - } - + private final JsonParser parser; @Getter private LastRead lastRead = LastRead.BEGIN; private int bucketsRemaining; - private final JsonParser parser; - public PreprocessedReader(InputStream inputStream, ObjectMapper objectMapper) throws IOException { - parser = objectMapper.copy().enable(JsonGenerator.Feature.AUTO_CLOSE_TARGET) - .getFactory() - .createParser(inputStream); + parser = objectMapper + .enable(JsonGenerator.Feature.AUTO_CLOSE_TARGET) + .getFactory() + .createParser(inputStream); } + @Override + public void close() throws IOException { + parser.close(); + } public PreprocessedHeader readHeader() throws IOException { Preconditions.checkState(lastRead.equals(LastRead.BEGIN)); @@ -57,7 +47,6 @@ public PreprocessedHeader readHeader() throws IOException { return header; } - @Override public boolean hasNext() { return bucketsRemaining > 0; @@ -71,4 +60,13 @@ public PreprocessedData next() { return parser.readValueAs(PreprocessedData.class); } + @Accessors(fluent = true) + @RequiredArgsConstructor + public enum LastRead { + DATA(null), HEADER(DATA), BEGIN(HEADER); + + @Getter + private final LastRead next; + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java index 2c533a42bc..1379a72a99 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/DistributedExecutionManager.java @@ -42,50 +42,13 @@ @Slf4j public class DistributedExecutionManager extends ExecutionManager { - @Data - @AllArgsConstructor(access = AccessLevel.PRIVATE) - public static class DistributedState implements InternalState { - @Setter - @NonNull - private ExecutionState state; - private Map> results; - private CountDownLatch executingLock; - - public DistributedState() { - this(ExecutionState.RUNNING, new ConcurrentHashMap<>(), new CountDownLatch(1)); - } - - @NotNull - @Override - public ExecutionState getState() { - return state; - } - - @Override - public Stream streamQueryResults() { - return results.values().stream().flatMap(Collection::stream); - } - - @Override - public CountDownLatch getExecutingLock() { - return executingLock; - } - - public boolean allResultsArrived(Set allWorkers) { - Set finishedWorkers = results.keySet(); - return finishedWorkers.equals(allWorkers); - } - } - private final ClusterState clusterState; - public DistributedExecutionManager(MetaStorage storage, DatasetRegistry datasetRegistry, ClusterState state) { super(storage, datasetRegistry); clusterState = state; } - @Override protected void doExecute(E execution) { @@ -102,6 +65,10 @@ protected void doExecute(E exec workerHandler.sendToAll(createExecutionMessage(execution)); } + private WorkerHandler getWorkerHandler(DatasetId datasetId) { + return clusterState.getWorkerHandlers().get(datasetId); + } + private WorkerMessage createExecutionMessage(ManagedExecution execution) { if (execution instanceof ManagedQuery mq) { return new ExecuteQuery(mq.getId(), mq.getQuery()); @@ -116,8 +83,12 @@ else if (execution instanceof ManagedInternalForm form) { } - private WorkerHandler getWorkerHandler(DatasetId datasetId) { - return clusterState.getWorkerHandlers().get(datasetId); + @Override + public void doCancelQuery(ManagedExecution execution) { + log.debug("Sending cancel message to all workers."); + + execution.cancel(); + getWorkerHandler(execution.createId().getDataset()).sendToAll(new CancelQuery(execution.getId())); } /** @@ -152,7 +123,7 @@ public v distributedState.results.put(result.getWorkerId(), result.getResults()); // If all known workers have returned a result, the query is DONE. - if (distributedState.allResultsArrived(getWorkerHandler(execution.getDataset().getId()).getAllWorkerIds())) { + if (distributedState.allResultsArrived(getWorkerHandler(execution.getDataset()).getAllWorkerIds())) { execution.finish(ExecutionState.DONE); @@ -162,7 +133,7 @@ public v // State changed to DONE or FAILED ExecutionState execStateAfterResultCollect = getResult(id).getState(); if (execStateAfterResultCollect != ExecutionState.RUNNING) { - final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner(), getStorage()).map(Group::getName).orElse("none"); + final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner().resolve(), getStorage()).map(Group::getName).orElse("none"); ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).dec(); ExecutionMetrics.getQueryStateCounter(execStateAfterResultCollect, primaryGroupName).inc(); @@ -175,12 +146,39 @@ public v } - @Override - public void doCancelQuery(ManagedExecution execution) { - log.debug("Sending cancel message to all workers."); + @Data + @AllArgsConstructor(access = AccessLevel.PRIVATE) + public static class DistributedState implements InternalState { + @Setter + @NonNull + private ExecutionState state; + private Map> results; + private CountDownLatch executingLock; - execution.cancel(); - getWorkerHandler(execution.createId().getDataset()).sendToAll(new CancelQuery(execution.getId())); + public DistributedState() { + this(ExecutionState.RUNNING, new ConcurrentHashMap<>(), new CountDownLatch(1)); + } + + @NotNull + @Override + public ExecutionState getState() { + return state; + } + + @Override + public CountDownLatch getExecutingLock() { + return executingLock; + } + + @Override + public Stream streamQueryResults() { + return results.values().stream().flatMap(Collection::stream); + } + + public boolean allResultsArrived(Set allWorkers) { + Set finishedWorkers = results.keySet(); + return finishedWorkers.equals(allWorkers); + } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java index 6d94b2573d..663bf5be6c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ExecutionManager.java @@ -13,7 +13,6 @@ import com.bakdata.conquery.metrics.ExecutionMetrics; import com.bakdata.conquery.models.auth.AuthorizationHelper; import com.bakdata.conquery.models.auth.entities.Group; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.error.ConqueryError; import com.bakdata.conquery.models.execution.ExecutionState; @@ -21,6 +20,7 @@ import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.ExternalExecution; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; @@ -36,44 +36,9 @@ @Slf4j public abstract class ExecutionManager { - /** - * Holds all informations about an execution, which cannot/should not be serialized/cached in a store. - */ - public interface State { - - /** - * The current {@link ExecutionState} of the execution. - */ - @NotNull - ExecutionState getState(); - - void setState(ExecutionState state); - - /** - * Synchronization barrier for web requests. - * Barrier is activated upon starting an execution so request can wait for execution completion. - * When the execution is finished the barrier is removed. - */ - CountDownLatch getExecutingLock(); - } - - public interface InternalState extends State{ - Stream streamQueryResults(); - } - private final MetaStorage storage; - private final DatasetRegistry datasetRegistry; - /** - * Cache for execution states. - */ - private final Cache executionStates = - CacheBuilder.newBuilder() - .softValues() - .removalListener(this::executionRemoved) - .build(); - /** * Manage state of evicted Queries, setting them to NEW. */ @@ -95,7 +60,6 @@ private void executionRemoved(RemovalNotification rem } } - public ManagedExecution getExecution(ManagedExecutionId execution) { return storage.getExecution(execution); } @@ -109,7 +73,14 @@ public R getResult(ManagedExecutionId id) { throw new NoSuchElementException("No execution found for %s".formatted(id)); } return (R) state; - } + } /** + * Cache for execution states. + */ + private final Cache executionStates = + CacheBuilder.newBuilder() + .softValues() + .removalListener(this::executionRemoved) + .build(); public Optional tryGetResult(ManagedExecutionId id) { return Optional.ofNullable((R) executionStates.getIfPresent(id)); @@ -123,16 +94,20 @@ public void addState(ManagedExecutionId id, State result) { executionStates.put(id, result); } - public final ManagedExecution runQuery(Namespace namespace, QueryDescription query, User user, ConqueryConfig config, boolean system) { + public final ManagedExecution runQuery(Namespace namespace, QueryDescription query, UserId user, ConqueryConfig config, boolean system) { final ManagedExecution execution = createExecution(query, user, namespace, system); - execute(namespace, execution, config); + execute(execution, config); return execution; } + // Visible for testing + public final ManagedExecution createExecution(QueryDescription query, UserId user, Namespace namespace, boolean system) { + return createExecution(query, UUID.randomUUID(), user, namespace, system); + } - public final void execute(Namespace namespace, ManagedExecution execution, ConqueryConfig config) { + public final void execute(ManagedExecution execution, ConqueryConfig config) { clearQueryResults(execution); @@ -158,7 +133,7 @@ public final void execute(Namespace namespace, ManagedExecution execution, Conqu execution.start(); - final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner(), storage).map(Group::getName).orElse("none"); + final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(execution.getOwner().resolve(), storage).map(Group::getName).orElse("none"); ExecutionMetrics.getRunningQueriesCounter(primaryGroupName).inc(); if (execution instanceof InternalExecution internalExecution) { @@ -172,16 +147,9 @@ public final void execute(Namespace namespace, ManagedExecution execution, Conqu } } - protected abstract void doExecute(E execution); - - // Visible for testing - public final ManagedExecution createExecution(QueryDescription query, User user, Namespace namespace, boolean system) { - return createExecution(query, UUID.randomUUID(), user, namespace, system); - } - - public final ManagedExecution createExecution(QueryDescription query, UUID queryId, User user, Namespace namespace, boolean system) { + public final ManagedExecution createExecution(QueryDescription query, UUID queryId, UserId user, Namespace namespace, boolean system) { // Transform the submitted query into an initialized execution - ManagedExecution managed = query.toManagedExecution(user, namespace.getDataset(), storage, datasetRegistry); + ManagedExecution managed = query.toManagedExecution(user, namespace.getDataset().getId(), storage, datasetRegistry); managed.setSystem(system); managed.setQueryId(queryId); managed.setMetaStorage(storage); @@ -192,6 +160,12 @@ public final ManagedExecution createExecution(QueryDescription query, UUID query return managed; } + public void clearQueryResults(ManagedExecution execution) { + executionStates.invalidate(execution.getId()); + } + + protected abstract void doExecute(E execution); + public final void cancelQuery(final ManagedExecution execution) { executionStates.invalidate(execution.getId()); @@ -202,6 +176,7 @@ public final void cancelQuery(final ManagedExecution execution) { doCancelQuery(execution); } + public abstract void doCancelQuery(final ManagedExecution execution); public void updateState(ManagedExecutionId id, ExecutionState execState) { State state = executionStates.getIfPresent(id); @@ -213,13 +188,6 @@ public void updateState(ManagedExecutionId id, ExecutionState execState) { log.warn("Could not update execution state of {} to {}, because it had no state.", id, execState); } - - public abstract void doCancelQuery(final ManagedExecution execution); - - public void clearQueryResults(ManagedExecution execution) { - executionStates.invalidate(execution.getId()); - } - public Stream streamQueryResults(E execution) { final InternalState resultParts = (InternalState) executionStates.getIfPresent(execution.getId()); @@ -262,4 +230,31 @@ public ExecutionState awaitDone(ManagedExecution execution, int time, TimeUnit u } return stateAfterWait.getState(); } + + /** + * Holds all informations about an execution, which cannot/should not be serialized/cached in a store. + */ + public interface State { + + /** + * The current {@link ExecutionState} of the execution. + */ + @NotNull + ExecutionState getState(); + + void setState(ExecutionState state); + + /** + * Synchronization barrier for web requests. + * Barrier is activated upon starting an execution so request can wait for execution completion. + * When the execution is finished the barrier is removed. + */ + CountDownLatch getExecutingLock(); + } + + public interface InternalState extends State{ + Stream streamQueryResults(); + } + + } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java index dc14fff906..6df19ef2fa 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/ManagedQuery.java @@ -17,15 +17,14 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.InternalExecution; import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.QueryUtils; import com.fasterxml.jackson.annotation.JsonIgnore; import lombok.AccessLevel; @@ -53,7 +52,7 @@ public class ManagedQuery extends ManagedExecution implements SingleTableResult, - public ManagedQuery(Query query, User owner, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedQuery(Query query, UserId owner, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(owner, submittedDataset, storage, datasetRegistry); this.query = query; } @@ -72,7 +71,6 @@ public synchronized void finish(ExecutionState executionState) { super.finish(executionState); } - public Stream streamResults(OptionalLong maybeLimit) { final Stream results = getNamespace().getExecutionManager().streamQueryResults(this); @@ -104,12 +102,12 @@ public void setStatusBase(@NonNull Subject subject, @NonNull ExecutionStatus sta status.setQueryType(query.getClass().getAnnotation(CPSType.class).id()); if (query instanceof SecondaryIdQuery secondaryIdQuery) { - status.setSecondaryId((secondaryIdQuery).getSecondaryId().getId()); + status.setSecondaryId((secondaryIdQuery).getSecondaryId()); } } @Override - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status, Namespace namespace) { + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java b/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java index 04cf01e424..16b61fb384 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/NamespacedIdentifiableHolding.java @@ -13,5 +13,5 @@ */ public interface NamespacedIdentifiableHolding { - void collectNamespacedObjects(Set> identifiables); + void collectNamespacedObjects(Set> identifiables); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java index a6698b8f48..3f2c418b93 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/QueryExecutionContext.java @@ -8,13 +8,13 @@ import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.BucketManager; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import lombok.AllArgsConstructor; @@ -52,7 +52,7 @@ public class QueryExecutionContext { private final int today = CDate.ofLocalDate(LocalDate.now()); - public List getEntityBucketsForTable(Entity entity, Table table) { + public List getEntityBucketsForTable(Entity entity, TableId table) { return bucketManager.getEntityBucketsForTable(entity, table); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java b/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java index 38b258928f..39fa43d827 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/QueryPlanContext.java @@ -6,7 +6,6 @@ import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.worker.Worker; import lombok.AccessLevel; @@ -41,8 +40,4 @@ public ModificationShieldedWorkerStorage getStorage() { return worker.getStorage(); } - public CentralRegistry getCentralRegistry() { - return worker.getStorage().getCentralRegistry(); - } - } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java index 45d4db741b..a8e82f32c5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewExecution.java @@ -16,18 +16,18 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.CDate; import com.bakdata.conquery.models.common.QuarterUtils; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.AbsoluteFormQuery; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ColumnDescriptor; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; @@ -61,22 +61,26 @@ public class EntityPreviewExecution extends ManagedInternalForm datasetRegistry) { + public EntityPreviewExecution(EntityPreviewForm entityPreviewQuery, UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { super(entityPreviewQuery, user, submittedDataset, storage, datasetRegistry); } - @Override - public boolean isSystem() { - // This Form should NEVER be started manually. Nor persisted - return true; - } - @Override public void doInitExecutable() { super.doInitExecutable(); previewConfig = getNamespace().getPreviewConfig(); } + protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { + status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); + } + + @Override + public boolean isSystem() { + // This Form should NEVER be started manually. Nor persisted + return true; + } + /** * Collects status of {@link EntityPreviewForm#getValuesQuery()} and {@link EntityPreviewForm#getInfoCardQuery()}. *

@@ -339,7 +343,8 @@ private static Map> getQuarterLines(EntityResult return quarterLines; } - protected void setAdditionalFieldsForStatusWithColumnDescription(Subject subject, FullExecutionStatus status) { + @Override + protected void setAdditionalFieldsForStatusWithSource(Subject subject, FullExecutionStatus status, Namespace namespace) { status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); } @@ -351,14 +356,16 @@ public List generateColumnDescriptions(boolean isInitialized, // Add grouping semantics to secondaryIds to group by if (descriptor.getSemantics() .stream() - .anyMatch(semanticType -> semanticType instanceof SemanticType.SecondaryIdT desc && previewConfig.isGroupingColumn(desc.getSecondaryId()))) { + .anyMatch(semanticType -> semanticType instanceof SemanticType.SecondaryIdT desc + && previewConfig.isGroupingColumn(desc.getSecondaryId().resolve()) + )) { descriptor.getSemantics().add(new SemanticType.GroupT()); } // Add hidden semantics to fields flagged for hiding. if (descriptor.getSemantics() .stream() - .anyMatch(semanticType -> semanticType instanceof SemanticType.ColumnT desc && previewConfig.isHidden(desc.getColumn()))) { + .anyMatch(semanticType -> semanticType instanceof SemanticType.ColumnT desc && previewConfig.isHidden(desc.getColumn().resolve()))) { descriptor.getSemantics().add(new SemanticType.HiddenT()); } } @@ -367,11 +374,6 @@ public List generateColumnDescriptions(boolean isInitialized, return descriptors; } - @Override - protected void setAdditionalFieldsForStatusWithSource(Subject subject, FullExecutionStatus status, Namespace namespace) { - status.setColumnDescriptions(generateColumnDescriptions(isInitialized(), getConfig())); - } - @Override public List getResultInfos() { return getValuesQuery().getResultInfos(); diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java index 62747276a3..a71354834e 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/preview/EntityPreviewForm.java @@ -23,17 +23,19 @@ import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.AbsoluteFormQuery; import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.query.visitor.QueryVisitor; @@ -76,13 +78,7 @@ public class EntityPreviewForm extends Form implements InternalForm { private final Map timeOverViews; - @Nullable - @Override - public JsonNode getValues() { - return null; // will not be implemented. - } - - public static EntityPreviewForm create(String entity, String idKind, Range dateRange, List sources, List infos, List timeStratifiedSelects, DatasetRegistry datasetRegistry) { // We use this query to filter for the single selected query. final Query entitySelectQuery = new ConceptQuery(new CQExternal(List.of(idKind), new String[][]{{"HEAD"}, {entity}}, true)); @@ -96,6 +92,30 @@ public static EntityPreviewForm create(String entity, String idKind, Range dateRange, List sources, Query entitySelectQuery) { + // Query exporting selected Sources of the Entity. + final TableExportQuery exportQuery = new TableExportQuery(entitySelectQuery); + + exportQuery.setDateRange(dateRange); + exportQuery.setTables(sources.stream().map(ConnectorId::resolve).map(CQConcept::forConnector).collect(Collectors.toList())); + exportQuery.setRawConceptValues(false); + return exportQuery; + } + + @NotNull + private static AbsoluteFormQuery createInfoCardQuery(Range dateRange, List infos, Query entitySelectQuery) { - // Query exporting a few additional infos on the entity. - return new AbsoluteFormQuery(entitySelectQuery, dateRange, - ArrayConceptQuery.createFromFeatures( - infos.stream() - .map(CQConcept::forSelect) - .collect(Collectors.toList()) - ), - List.of(ExportForm.ResolutionAndAlignment.of(Resolution.COMPLETE, Alignment.NO_ALIGN)) - ); + @Nullable + @Override + public JsonNode getValues() { + return null; // will not be implemented. } - @NotNull - private static TableExportQuery createExportQuery(Range dateRange, List sources, Query entitySelectQuery) { - // Query exporting selected Sources of the Entity. - final TableExportQuery exportQuery = new TableExportQuery(entitySelectQuery); - - exportQuery.setDateRange(dateRange); - exportQuery.setTables(sources.stream().map(CQConcept::forConnector).collect(Collectors.toList())); - exportQuery.setRawConceptValues(false); - return exportQuery; + @Override + public void authorize(Subject subject, Dataset submittedDataset, @NonNull List visitors, MetaStorage storage) { + QueryDescription.authorizeQuery(this, subject, submittedDataset, visitors, storage); } + @Override + public String getLocalizedTypeLabel() { + // If we successfully keep away system queries from the users, this should not be called except for buildStatusFull, where it is ignored. + return getClass().getAnnotation(CPSType.class).id(); + } @Override public Map createSubQueries() { @@ -160,18 +172,7 @@ public Map createSubQueries() { } @Override - public void authorize(Subject subject, Dataset submittedDataset, @NonNull List visitors, MetaStorage storage) { - QueryDescription.authorizeQuery(this, subject, submittedDataset, visitors, storage); - } - - @Override - public String getLocalizedTypeLabel() { - // If we successfully keep away system queries from the users, this should not be called except for buildStatusFull, where it is ignored. - return getClass().getAnnotation(CPSType.class).id(); - } - - @Override - public ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new EntityPreviewExecution(this, user, submittedDataset, storage, datasetRegistry); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java index 3e943f2dbe..96da92712c 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/ConceptQueryPlan.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.EmptyBucket; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -100,11 +101,12 @@ public Optional execute(QueryExecutionContext ctx, Entit nextTable(ctx, currentTable); - final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable); + final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable.getId()); log.trace("Table[{}] has {} buckets for Entity[{}]", currentTable, tableBuckets, entity); - for (Bucket bucket : tableBuckets) { + for (BucketId bucketId : tableBuckets) { + Bucket bucket = bucketId.resolve(); if (!isOfInterest(bucket)) { continue; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java index 26591d05f5..035d8e434a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/SecondaryIdQueryPlan.java @@ -16,6 +16,7 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.QueryPlanContext; @@ -55,11 +56,6 @@ public class SecondaryIdQueryPlan implements QueryPlan { private final Set

tablesWithoutSecondaryId; private final ConceptQueryPlan queryPlan; - - - private Map childPerKey = new HashMap<>(); - - /** * TODO borrow these from {@link QueryExecutionContext} * @@ -67,8 +63,20 @@ public class SecondaryIdQueryPlan implements QueryPlan { */ @Getter(AccessLevel.NONE) private final Queue childPlanReusePool = new LinkedList<>(); - private final int subPlanRetentionLimit; + private Map childPerKey = new HashMap<>(); + + @Override + public void init(QueryExecutionContext ctx, Entity entity) { + queryPlan.init(ctx, entity); + + // Dump the created children into reuse-pool + childPlanReusePool.clear(); + + childPerKey.values().stream().limit(subPlanRetentionLimit).forEach(childPlanReusePool::add); + + childPerKey = new HashMap<>(); + } /** * This is the same execution as a typical ConceptQueryPlan. The difference @@ -107,9 +115,11 @@ private void executeQueriesWithSecondaryId(QueryExecutionContext ctx, Entity ent nextTable(ctxWithPhase, currentTable); - final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable); + final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable.getId()); + + for (BucketId bucketId : tableBuckets) { + Bucket bucket = bucketId.resolve(); - for (Bucket bucket : tableBuckets) { String entityId = entity.getId(); nextBlock(bucket); @@ -154,17 +164,14 @@ private void executeQueriesWithSecondaryId(QueryExecutionContext ctx, Entity ent } } - private boolean discardSubPlan(ConceptQueryPlan plan) { - return childPlanReusePool.add(plan); - } - private void executeQueriesWithoutSecondaryId(QueryExecutionContext ctx, Entity entity, Table currentTable) { nextTable(ctx, currentTable); - final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable); + final List tableBuckets = ctx.getBucketManager().getEntityBucketsForTable(entity, currentTable.getId()); - for (Bucket bucket : tableBuckets) { + for (BucketId bucketId : tableBuckets) { + Bucket bucket = bucketId.resolve(); String entityId = entity.getId(); nextBlock(bucket); if (!bucket.containsEntity(entityId) || !isOfInterest(bucket)) { @@ -242,23 +249,15 @@ private ConceptQueryPlan createChild(QueryExecutionContext currentContext, Bucke final QueryExecutionContext context = QueryUtils.determineDateAggregatorForContext(currentContext, plan::getValidityDateAggregator); plan.init(context, queryPlan.getEntity()); - plan.nextTable(context, currentBucket.getTable()); + plan.nextTable(context, currentBucket.getTable().resolve()); plan.isOfInterest(currentBucket); plan.nextBlock(currentBucket); return plan; } - @Override - public void init(QueryExecutionContext ctx, Entity entity) { - queryPlan.init(ctx, entity); - - // Dump the created children into reuse-pool - childPlanReusePool.clear(); - - childPerKey.values().stream().limit(subPlanRetentionLimit).forEach(childPlanReusePool::add); - - childPerKey = new HashMap<>(); + private boolean discardSubPlan(ConceptQueryPlan plan) { + return childPlanReusePool.add(plan); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java index 10c870ce4e..f81b79a312 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java @@ -9,9 +9,13 @@ import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -20,6 +24,7 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; +import org.jetbrains.annotations.NotNull; /** * The QueryPlan creates a full dump of the given table within a certain @@ -39,7 +44,7 @@ public class TableExportQueryPlan implements QueryPlan { private final Map tables; @ToString.Exclude - private final Map positions; + private final Map positions; /** * If true, Connector {@link Column}s will be output raw. @@ -48,18 +53,6 @@ public class TableExportQueryPlan implements QueryPlan { @Getter private final boolean rawConceptValues; - - @Override - public boolean isOfInterest(Entity entity) { - return subPlan.isOfInterest(entity); - } - - @Override - public Optional> getValidityDateAggregator() { - // TODO create a fake aggregator and feed it inside the loop, return it here. - return Optional.empty(); - } - @Override public void init(QueryExecutionContext ctxt, Entity entity) { subPlan.init(ctxt, entity); @@ -84,9 +77,12 @@ public Optional execute(QueryExecutionContext ctx, Entity final CQTable cqTable = entry.getKey(); final ValidityDate validityDate = cqTable.findValidityDate(); final QPNode query = entry.getValue(); - final Map cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, cqTable.getConnector()); + final Map cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, cqTable.getConnector()); + final Connector connector = cqTable.getConnector().resolve(); - for (Bucket bucket : ctx.getEntityBucketsForTable(entity, cqTable.getConnector().getTable())) { + for (BucketId bucketId : ctx.getEntityBucketsForTable(entity, connector.getResolvedTableId())) { + Bucket bucket = bucketId.resolve(); + CBlock cBlock = cblocks.get(bucketId).resolve(); if (!shouldEvaluateBucket(query, bucket, entity, ctx)) { continue; @@ -106,7 +102,7 @@ public Optional execute(QueryExecutionContext ctx, Entity continue; } - final Object[] resultRow = collectRow(totalColumns, cqTable, bucket, event, validityDate, cblocks.get(bucket)); + final Object[] resultRow = collectRow(totalColumns, cqTable, bucket, event, validityDate, cBlock); results.add(resultRow); } @@ -116,6 +112,18 @@ public Optional execute(QueryExecutionContext ctx, Entity return Optional.of(new MultilineEntityResult(entity.getId(), results)); } + @Override + public boolean isOfInterest(Entity entity) { + return subPlan.isOfInterest(entity); + } + + @NotNull + @Override + public Optional> getValidityDateAggregator() { + // TODO create a fake aggregator and feed it inside the loop, return it here. + return Optional.empty(); + } + /** * Test if the Bucket should even be evaluated for the {@link QPNode}. *

@@ -128,7 +136,7 @@ private boolean shouldEvaluateBucket(QPNode query, Bucket bucket, Entity entity, return false; } - query.nextTable(ctx, bucket.getTable()); + query.nextTable(ctx, bucket.getTable().resolve()); query.nextBlock(bucket); return query.isOfInterest(bucket); @@ -140,7 +148,7 @@ private boolean shouldEvaluateBucket(QPNode query, Bucket bucket, Entity entity, private boolean isRowIncluded(QPNode query, Bucket bucket, Entity entity, int event, QueryExecutionContext ctx) { query.init(entity, ctx); - query.nextTable(ctx, bucket.getTable()); + query.nextTable(ctx, bucket.getTable().resolve()); query.nextBlock(bucket); query.acceptEvent(bucket, event); @@ -158,16 +166,18 @@ private Object[] collectRow(int totalColumns, CQTable exportDescription, Bucket entry[0] = List.of(date); } - entry[1] = exportDescription.getConnector().getTable().getLabel(); + final Connector connector = exportDescription.getConnector().resolve(); + entry[1] = connector.getResolvedTable().getLabel(); - for (Column column : exportDescription.getConnector().getTable().getColumns()) { + for (Column column : connector.getResolvedTable().getColumns()) { // ValidityDates are handled separately. if (validityDate != null && validityDate.containsColumn(column)){ continue; } - if (!positions.containsKey(column)) { + final ColumnId columnId = column.getId(); + if (!positions.containsKey(columnId)) { continue; } @@ -175,9 +185,9 @@ private Object[] collectRow(int totalColumns, CQTable exportDescription, Bucket continue; } - final int position = positions.get(column); + final int position = positions.get(columnId); - if (!rawConceptValues && column.equals(exportDescription.getConnector().getColumn())) { + if (!rawConceptValues && columnId.equals(connector.getColumn())) { entry[position] = cblock.getMostSpecificChildLocalId(event); continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java index 4f3187ce9e..39c6610208 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/DistinctValuesWrapperAggregator.java @@ -10,7 +10,6 @@ import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; -import com.bakdata.conquery.models.types.ResultType; import com.google.common.collect.ImmutableList; import lombok.Getter; import lombok.RequiredArgsConstructor; diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java index df4c440b78..6d018c772a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptElementsAggregator.java @@ -10,6 +10,8 @@ import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -26,7 +28,7 @@ public class ConceptElementsAggregator extends Aggregator> { private Column column; private Entity entity; - private Map cblocks; + private Map cblocks; private CBlock cblock; private final Map tableConnectors; @@ -35,7 +37,7 @@ public ConceptElementsAggregator(TreeConcept concept) { super(); tableConnectors = concept.getConnectors().stream() .filter(conn -> conn.getColumn() != null) - .collect(Collectors.toMap(Connector::getTable, Functions.identity())); + .collect(Collectors.toMap(Connector::getResolvedTable, Functions.identity())); } @Override @@ -52,13 +54,13 @@ public void nextTable(QueryExecutionContext ctx, Table currentTable) { return; } - column = connector.getColumn(); - cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, connector); + column = connector.getColumn().resolve(); + cblocks = ctx.getBucketManager().getEntityCBlocksForConnector(entity, connector.getId()); } @Override public void nextBlock(Bucket bucket) { - cblock = cblocks.get(bucket); + cblock = cblocks.get(bucket.getId()).resolve(); } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java index 83bf74264c..773e813861 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/aggregators/specific/value/ConceptValuesAggregator.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; @@ -32,7 +33,7 @@ public ConceptValuesAggregator(TreeConcept concept) { this.concept = concept; tableConnectors = concept.getConnectors().stream() .filter(conn -> conn.getColumn() != null) - .collect(Collectors.toMap(Connector::getTable, Functions.identity())); + .collect(Collectors.toMap(Connector::getResolvedTable, Functions.identity())); } @Override @@ -49,7 +50,8 @@ public void nextTable(QueryExecutionContext ctx, Table currentTable) { return; } - column = connector.getColumn(); + final ColumnId columnId = connector.getColumn(); + column = columnId != null ? columnId.resolve() : null; } @Override diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java index ae790ac71b..09f682a164 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ConceptNode.java @@ -10,8 +10,11 @@ import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; +import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.queryplan.QPChainNode; @@ -28,7 +31,7 @@ public class ConceptNode extends QPChainNode { private final CQTable table; private final SecondaryIdDescription selectedSecondaryId; private boolean tableActive; - private Map preCurrentRow; + private Map preCurrentRow; private CBlock currentRow; public ConceptNode(QPNode child, List> concepts, CQTable table, SecondaryIdDescription selectedSecondaryId) { @@ -66,17 +69,18 @@ public void init(Entity entity, QueryExecutionContext context) { @Override public void nextTable(QueryExecutionContext ctx, Table currentTable) { - tableActive = table.getConnector().getTable().equals(currentTable) + Connector connector = table.getConnector().resolve(); + tableActive = connector.getResolvedTableId().equals(currentTable.getId()) && ctx.getActiveSecondaryId() == selectedSecondaryId; if(tableActive) { - super.nextTable(ctx.withConnector(table.getConnector()), currentTable); + super.nextTable(ctx.withConnector(connector), currentTable); } } @Override public void nextBlock(Bucket bucket) { if (tableActive) { - currentRow = Objects.requireNonNull(preCurrentRow.get(bucket)); + currentRow = Objects.requireNonNull(preCurrentRow.get(bucket.getId()).resolve()); super.nextBlock(bucket); } } @@ -98,7 +102,7 @@ public boolean isOfInterest(Bucket bucket) { return false; } - final CBlock cBlock = Objects.requireNonNull(preCurrentRow.get(bucket)); + final CBlock cBlock = Objects.requireNonNull(preCurrentRow.get(bucket.getId()).resolve()); if(cBlock.isConceptIncluded(entity.getId(), requiredBits)) { return super.isOfInterest(bucket); @@ -137,7 +141,7 @@ public boolean isContained() { @Override public void collectRequiredTables(Set

requiredTables) { super.collectRequiredTables(requiredTables); - requiredTables.add(table.getConnector().getTable()); + requiredTables.add(table.getConnector().resolve().getResolvedTable()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java index eba1af8b94..0a8601b6a8 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/specific/ValidityDateNode.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.models.query.queryplan.specific; import java.util.Map; -import java.util.Objects; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.common.daterange.CDateRange; @@ -9,6 +8,8 @@ import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; +import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; +import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.queryplan.QPChainNode; import com.bakdata.conquery.models.query.queryplan.QPNode; @@ -19,10 +20,9 @@ public class ValidityDateNode extends QPChainNode { private final ValidityDate validityDate; + protected Map preCurrentRow; private transient CDateSet restriction; - protected Map preCurrentRow; - public ValidityDateNode(ValidityDate validityDate, QPNode child) { super(child); Preconditions.checkNotNull(validityDate, this.getClass().getSimpleName() + " needs a validityDate"); @@ -42,12 +42,11 @@ public boolean acceptEvent(Bucket bucket, int event) { } @Override - public boolean isOfInterest(Bucket bucket) { - final CBlock cBlock = Objects.requireNonNull(preCurrentRow.get(bucket)); - - final CDateRange range = cBlock.getEntityDateRange(entity.getId()); + public void nextTable(QueryExecutionContext ctx, Table currentTable) { + super.nextTable(ctx.withValidityDateColumn(validityDate), currentTable); + restriction = ctx.getDateRestriction(); - return restriction.intersects(range) && super.isOfInterest(bucket); + preCurrentRow = ctx.getBucketManager().getEntityCBlocksForConnector(getEntity(), context.getConnector().getId()); } @Override @@ -56,10 +55,11 @@ public boolean isContained() { } @Override - public void nextTable(QueryExecutionContext ctx, Table currentTable) { - super.nextTable(ctx.withValidityDateColumn(validityDate), currentTable); - restriction = ctx.getDateRestriction(); + public boolean isOfInterest(Bucket bucket) { + final CBlock cBlock = preCurrentRow.get(bucket.getId()).resolve(); - preCurrentRow = ctx.getBucketManager().getEntityCBlocksForConnector(getEntity(), context.getConnector()); + final CDateRange range = cBlock.getEntityDateRange(entity.getId()); + + return restriction.intersects(range) && super.isOfInterest(bucket); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SecondaryIdResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SecondaryIdResultInfo.java index a50a4efea7..63d1812332 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SecondaryIdResultInfo.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SecondaryIdResultInfo.java @@ -20,7 +20,7 @@ public class SecondaryIdResultInfo extends ResultInfo { public SecondaryIdResultInfo(SecondaryIdDescription secondaryId) { - super(Set.of(new SemanticType.SecondaryIdT(secondaryId))); + super(Set.of(new SemanticType.SecondaryIdT(secondaryId.getId()))); this.secondaryId = secondaryId; type = ResultType.Primitive.STRING; @@ -33,12 +33,12 @@ public String getDescription() { } @Override - public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { + public Printer createPrinter(PrinterFactory printerFactory, PrintSettings printSettings) { if (secondaryId.getMapping() == null) { return printerFactory.getStringPrinter(printSettings); } else { - return new MappedPrinter(secondaryId.getMapping()); + return new MappedPrinter(secondaryId.getMapping().resolve()); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java index 33db8e3e8c..2b9ac53a40 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/resultinfo/SelectResultInfo.java @@ -23,7 +23,7 @@ public class SelectResultInfo extends ResultInfo { private final CQConcept cqConcept; public SelectResultInfo(Select select, CQConcept cqConcept, Set semantics) { - super(Sets.union(semantics, Set.of(new SemanticType.SelectResultT(select)))); + super(Sets.union(semantics, Set.of(new SemanticType.SelectResultT(select.getId())))); this.select = select; this.cqConcept = cqConcept; } diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java b/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java index 1836b01877..4984115649 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/results/FormShardResult.java @@ -47,7 +47,7 @@ public void addResult(DistributedExecutionManager executionManager) { ); } - if (managedInternalForm.allSubQueriesDone(executionManager)) { + if (managedInternalForm.allSubQueriesDone()) { managedInternalForm.finish(ExecutionState.DONE); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java b/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java index ed077b225a..04bab285bc 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java +++ b/backend/src/main/java/com/bakdata/conquery/models/types/SemanticType.java @@ -2,12 +2,14 @@ import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.models.config.IdColumnConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeInfo; import lombok.Data; @@ -73,8 +75,7 @@ public static class IdT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class SecondaryIdT extends SemanticType { - @NsIdRef - private final SecondaryIdDescription secondaryId; + private final SecondaryIdDescriptionId secondaryId; } /** @@ -102,8 +103,7 @@ public static class HiddenT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class SelectResultT extends SemanticType { - @NsIdRef - private final Select select; + private final SelectId select; } /** @@ -115,8 +115,7 @@ public static class SelectResultT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class ConceptColumnT extends SemanticType { - @NsIdRef - private final Concept concept; + private final ConceptId concept; } @@ -130,8 +129,7 @@ public static class ConceptColumnT extends SemanticType { @Data @RequiredArgsConstructor(onConstructor_ = @JsonCreator) public static class ColumnT extends SemanticType { - @NsIdRef - private final Column column; + private final ColumnId column; } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java b/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java index e9c3e07d3c..08e353f19a 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/DatasetRegistry.java @@ -4,22 +4,23 @@ import java.io.IOException; import java.util.Collection; import java.util.List; -import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; +import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.mode.NamespaceHandler; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.IndexKey; @@ -35,7 +36,7 @@ @Slf4j @RequiredArgsConstructor @JsonIgnoreType -public class DatasetRegistry extends IdResolveContext implements Closeable { +public class DatasetRegistry implements Closeable, NamespacedStorageProvider, Injectable { private final ConcurrentMap datasets = new ConcurrentHashMap<>(); @Getter @@ -48,18 +49,19 @@ public class DatasetRegistry extends IdResolveContext imple private final NamespaceHandler namespaceHandler; + @Getter private final IndexService indexService; public N createNamespace(Dataset dataset, MetaStorage metaStorage, Environment environment) throws IOException { // Prepare empty storage NamespaceStorage datasetStorage = new NamespaceStorage(config.getStorage(), "dataset_" + dataset.getName()); - final ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(this); + final ObjectMapper persistenceMapper = internalMapperFactory.createNamespacePersistenceMapper(datasetStorage); // Each store injects its own IdResolveCtx so each needs its own mapper - datasetStorage.openStores(Jackson.copyMapperAndInjectables((persistenceMapper))); + datasetStorage.openStores(Jackson.copyMapperAndInjectables((persistenceMapper)), environment.metrics()); datasetStorage.loadData(); datasetStorage.updateDataset(dataset); - datasetStorage.updateIdMapping(new EntityIdMap()); + datasetStorage.updateIdMapping(new EntityIdMap(datasetStorage)); datasetStorage.setPreviewConfig(new PreviewConfig()); datasetStorage.close(); @@ -89,15 +91,6 @@ public void removeNamespace(DatasetId id) { } } - @Override - public CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException { - if (!datasets.containsKey(dataset)) { - throw new NoSuchElementException(String.format("Did not find Dataset[%s] in [%s]", dataset, datasets.keySet())); - } - - return datasets.get(dataset).getStorage().getCentralRegistry(); - } - public List getAllDatasets() { return datasets.values().stream().map(Namespace::getStorage).map(NamespaceStorage::getDataset).collect(Collectors.toList()); } @@ -128,15 +121,18 @@ public void close() { @Override public MutableInjectableValues inject(MutableInjectableValues values) { - // Make this class also available under DatasetRegistry - super.inject(values).add(DatasetRegistry.class, this); - indexService.inject(values); - - return values; + // Make this class also available under DatasetRegistry + return values.add(NamespacedStorageProvider.class, this) + .add(this.getClass(), this); } public void resetIndexService() { indexService.evictCache(); } + + @Override + public NamespacedStorage getStorage(DatasetId datasetId) { + return datasets.get(datasetId).getStorage(); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java b/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java index 0f7f8824e7..c018a2a797 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/DistributedNamespace.java @@ -12,6 +12,7 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.messages.namespaces.specific.CollectColumnValuesJob; import com.bakdata.conquery.models.messages.namespaces.specific.UpdateMatchingStatsMessage; @@ -53,17 +54,17 @@ public DistributedNamespace( @Override void updateMatchingStats() { - final Collection> concepts = getStorage().getAllConcepts() - .stream() - .filter(concept -> concept.getMatchingStats() == null) - .collect(Collectors.toSet()); + final Collection concepts = getStorage().getAllConcepts() + .filter(concept -> concept.getMatchingStats() == null) + .map(Concept::getId) + .collect(Collectors.toSet()); getWorkerHandler().sendToAll(new UpdateMatchingStatsMessage(concepts)); } @Override void registerColumnValuesInSearch(Set columns) { log.trace("Sending columns to collect values on shards: {}", Arrays.toString(columns.toArray())); - getWorkerHandler().sendToAll(new CollectColumnValuesJob(columns, this)); + getWorkerHandler().sendToAll(new CollectColumnValuesJob(columns.stream().map(Column::getId).collect(Collectors.toSet()), this)); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/IdResolveContext.java b/backend/src/main/java/com/bakdata/conquery/models/worker/IdResolveContext.java deleted file mode 100644 index d65f7850c0..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/IdResolveContext.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.bakdata.conquery.models.worker; - -import java.util.NoSuchElementException; -import java.util.Optional; - -import com.bakdata.conquery.io.jackson.Injectable; -import com.bakdata.conquery.io.jackson.MutableInjectableValues; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.fasterxml.jackson.databind.DeserializationContext; -import com.fasterxml.jackson.databind.JsonMappingException; -import io.dropwizard.jackson.Jackson; - -/** - * Superclass for implementations that map ids to existing objects in the conquery id system. - * This is a bridge between {@link Jackson} and conquery id serdes. - */ -public abstract class IdResolveContext implements Injectable { - - public static IdResolveContext get(DeserializationContext ctxt) throws JsonMappingException { - IdResolveContext namespaces = (IdResolveContext) ctxt - .findInjectableValue(IdResolveContext.class.getName(), null, null); - if(namespaces == null) { - throw new NoSuchElementException("Could not find injected namespaces"); - } - return namespaces; - } - - @Override - public MutableInjectableValues inject(MutableInjectableValues values) { - return values.add(IdResolveContext.class, this); - } - - public abstract CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException; - - public & NamespacedId, T extends Identifiable> T resolve(ID id) { - return findRegistry(id.getDataset()).resolve(id); - } - - public & NamespacedId, T extends Identifiable> Optional getOptional(ID id) { - return findRegistry(id.getDataset()).getOptional(id); - } - - public & NamespacedId, T extends Identifiable> Optional getOptional(DatasetId dataset, ID id) { - return findRegistry(dataset).getOptional(id); - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java b/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java index d2ad40c0c4..0e04fa8e81 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/Namespace.java @@ -3,7 +3,6 @@ import java.io.IOException; import java.util.Collection; import java.util.List; -import java.util.NoSuchElementException; import java.util.Set; import com.bakdata.conquery.apiv1.query.concept.specific.external.EntityResolver; @@ -14,8 +13,6 @@ import com.bakdata.conquery.models.datasets.PreviewConfig; import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.MappableSingleColumnSelect; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.jobs.SimpleJob; import com.bakdata.conquery.models.jobs.UpdateFilterSearchJob; @@ -31,7 +28,7 @@ @Getter @ToString(onlyExplicitlyIncluded = true) @RequiredArgsConstructor -public abstract class Namespace extends IdResolveContext { +public abstract class Namespace { private final ObjectMapper preprocessMapper; @@ -80,10 +77,6 @@ public void remove() { storage.removeStorage(); } - public CentralRegistry getCentralRegistry() { - return getStorage().getCentralRegistry(); - } - public int getNumberOfEntities() { return getStorage().getNumberOfEntities(); } @@ -92,25 +85,17 @@ public PreviewConfig getPreviewConfig() { return getStorage().getPreviewConfig(); } - @Override - public CentralRegistry findRegistry(DatasetId dataset) throws NoSuchElementException { - if (!this.getDataset().getId().equals(dataset)) { - throw new NoSuchElementException("Wrong dataset: '" + dataset + "' (expected: '" + this.getDataset().getId() + "')"); - } - return storage.getCentralRegistry(); - } - public void updateInternToExternMappings() { - storage.getAllConcepts().stream() - .flatMap(c -> c.getConnectors().stream()) - .flatMap(con -> con.getSelects().stream()) - .filter(MappableSingleColumnSelect.class::isInstance) - .map(MappableSingleColumnSelect.class::cast) - .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s::loadMapping))); - - storage.getSecondaryIds().stream() - .filter(desc -> desc.getMapping() != null) - .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s.getMapping()::init))); + storage.getAllConcepts() + .flatMap(c -> c.getConnectors().stream()) + .flatMap(con -> con.getSelects().stream()) + .filter(MappableSingleColumnSelect.class::isInstance) + .map(MappableSingleColumnSelect.class::cast) + .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s::loadMapping))); + + storage.getSecondaryIds() + .filter(desc -> desc.getMapping() != null) + .forEach((s) -> jobManager.addSlowJob(new SimpleJob("Update internToExtern Mappings [" + s.getId() + "]", s.getMapping().resolve()::init))); } /** diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java b/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java index d365519aa9..8aed99c098 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/ShardWorkers.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.models.worker; -import java.util.NoSuchElementException; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -8,22 +7,23 @@ import java.util.concurrent.atomic.AtomicInteger; import com.bakdata.conquery.commands.ShardNode; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.config.ThreadPoolDefinition; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.WorkerId; import com.bakdata.conquery.models.jobs.SimpleJob; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.ObjectMapper; +import io.dropwizard.core.setup.Environment; import io.dropwizard.lifecycle.Managed; -import jakarta.validation.Validator; import lombok.Getter; import lombok.NonNull; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; /** @@ -32,25 +32,20 @@ * Each Shard contains one {@link Worker} per {@link Dataset}. */ @Slf4j -public class ShardWorkers extends IdResolveContext implements Managed { - @Getter @Setter - private AtomicInteger nextWorker = new AtomicInteger(0); +public class ShardWorkers implements NamespacedStorageProvider, Managed { @Getter private final ConcurrentHashMap workers = new ConcurrentHashMap<>(); @JsonIgnore private final transient ConcurrentMap dataset2Worker = new ConcurrentHashMap<>(); - /** * Shared ExecutorService among Workers for Jobs. */ private final ThreadPoolExecutor jobsThreadPool; private final ThreadPoolDefinition queryThreadPoolDefinition; - private final InternalMapperFactory internalMapperFactory; - private final int entityBucketSize; - private final int secondaryIdSubPlanRetention; + private final AtomicInteger nextWorker = new AtomicInteger(0); public ShardWorkers(ThreadPoolDefinition queryThreadPoolDefinition, InternalMapperFactory internalMapperFactory, int entityBucketSize, int secondaryIdSubPlanRetention) { @@ -66,54 +61,40 @@ public ShardWorkers(ThreadPoolDefinition queryThreadPoolDefinition, InternalMapp jobsThreadPool.prestartAllCoreThreads(); } - public Worker createWorker(WorkerStorage storage, boolean failOnError) { + public Worker createWorker(WorkerStorage storage, boolean failOnError, Environment environment) { - final ObjectMapper persistenceMapper = internalMapperFactory.createWorkerPersistenceMapper(this); - final ObjectMapper communicationMapper = internalMapperFactory.createWorkerCommunicationMapper(this); + final ObjectMapper persistenceMapper = internalMapperFactory.createWorkerPersistenceMapper(storage); + final ObjectMapper communicationMapper = internalMapperFactory.createWorkerCommunicationMapper(storage); final Worker worker = - new Worker(queryThreadPoolDefinition, storage, jobsThreadPool, failOnError, entityBucketSize, persistenceMapper, communicationMapper, secondaryIdSubPlanRetention); + new Worker(queryThreadPoolDefinition, storage, jobsThreadPool, failOnError, entityBucketSize, persistenceMapper, communicationMapper, secondaryIdSubPlanRetention, environment); addWorker(worker); return worker; } - public Worker createWorker(Dataset dataset, StoreFactory storageConfig, @NonNull String name, Validator validator, boolean failOnError) { - - final ObjectMapper persistenceMapper = internalMapperFactory.createWorkerPersistenceMapper(this); + private void addWorker(Worker worker) { + nextWorker.incrementAndGet(); + workers.put(worker.getInfo().getId(), worker); + dataset2Worker.put(worker.getStorage().getDataset().getId(), worker); + } - final ObjectMapper communicationMapper = internalMapperFactory.createWorkerCommunicationMapper(this); + public Worker createWorker(Dataset dataset, StoreFactory storageConfig, @NonNull String name, Environment environment, boolean failOnError) { final Worker worker = - Worker.newWorker(dataset, queryThreadPoolDefinition, jobsThreadPool, storageConfig, name, failOnError, entityBucketSize, persistenceMapper, communicationMapper, secondaryIdSubPlanRetention); + Worker.newWorker(dataset, queryThreadPoolDefinition, jobsThreadPool, storageConfig, name, failOnError, entityBucketSize, internalMapperFactory, secondaryIdSubPlanRetention, environment); addWorker(worker); return worker; } - private void addWorker(Worker worker) { - nextWorker.incrementAndGet(); - workers.put(worker.getInfo().getId(), worker); - dataset2Worker.put(worker.getStorage().getDataset().getId(), worker); - } - public Worker getWorker(WorkerId worker) { return Objects.requireNonNull(workers.get(worker)); } - - @Override - public CentralRegistry findRegistry(DatasetId dataset) { - if (!dataset2Worker.containsKey(dataset)) { - throw new NoSuchElementException(String.format("Did not find Dataset[%s] in [%s]", dataset, dataset2Worker.keySet())); - } - - return dataset2Worker.get(dataset).getStorage().getCentralRegistry(); - } - public void removeWorkerFor(DatasetId dataset) { final Worker worker = dataset2Worker.get(dataset); @@ -133,7 +114,7 @@ removed from dataset2Worker (which is used in deserialization of NamespacedIds, removed.remove(); } catch(Exception e) { - log.error("Failed to remove storage "+removed, e); + log.error("Failed to remove storage {}", removed, e); } } @@ -161,4 +142,14 @@ public void stop() { w.close(); } } + + @Override + public NamespacedStorage getStorage(DatasetId datasetId) { + return dataset2Worker.get(datasetId).getStorage(); + } + + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(NamespacedStorageProvider.class, this); + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/SingletonNamespaceCollection.java b/backend/src/main/java/com/bakdata/conquery/models/worker/SingletonNamespaceCollection.java deleted file mode 100644 index 4f1d3799b3..0000000000 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/SingletonNamespaceCollection.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.bakdata.conquery.models.worker; - -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - -@RequiredArgsConstructor -public class SingletonNamespaceCollection extends IdResolveContext { - - @NonNull - private final CentralRegistry registry; - - @Override - public CentralRegistry findRegistry(DatasetId dataset) { - return registry; - } -} diff --git a/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java b/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java index 3e20e742fc..d24ec985d5 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java +++ b/backend/src/main/java/com/bakdata/conquery/models/worker/Worker.java @@ -4,11 +4,12 @@ import java.io.IOException; import java.util.concurrent.ExecutorService; -import com.bakdata.conquery.io.jackson.serializer.NsIdRef; import com.bakdata.conquery.io.mina.MessageSender; import com.bakdata.conquery.io.mina.NetworkSession; import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.io.storage.WorkerStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; +import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.StoreFactory; import com.bakdata.conquery.models.config.ThreadPoolDefinition; import com.bakdata.conquery.models.datasets.Dataset; @@ -18,7 +19,9 @@ import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.BucketManager; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.models.jobs.JobManager; import com.bakdata.conquery.models.messages.namespaces.NamespaceMessage; import com.bakdata.conquery.models.messages.network.MessageToManagerNode; @@ -26,6 +29,7 @@ import com.bakdata.conquery.models.messages.network.specific.ForwardToNamespace; import com.bakdata.conquery.models.query.QueryExecutor; import com.fasterxml.jackson.databind.ObjectMapper; +import io.dropwizard.core.setup.Environment; import lombok.Getter; import lombok.NonNull; import lombok.Setter; @@ -41,8 +45,6 @@ public class Worker implements MessageSender.Transforming getConcept() { return concept; } @DELETE public void removeConcept() { - processor.deleteConcept(concept); + processor.deleteConcept(concept.getId()); } } \ No newline at end of file diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java index ee81d656cb..6427619cde 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetProcessor.java @@ -73,6 +73,7 @@ public class AdminDatasetProcessor { public synchronized Dataset addDataset(Dataset dataset) throws IOException { final String name = dataset.getName(); + if (datasetRegistry.get(new DatasetId(name)) != null) { throw new WebApplicationException("Dataset already exists", Response.Status.CONFLICT); } @@ -86,12 +87,12 @@ public synchronized Dataset addDataset(Dataset dataset) throws IOException { public synchronized void deleteDataset(Dataset dataset) { final Namespace namespace = datasetRegistry.get(dataset.getId()); - if (!namespace.getStorage().getTables().isEmpty()) { + if (namespace.getStorage().getTables().findAny().isPresent()) { throw new WebApplicationException( String.format( "Cannot delete dataset `%s`, because it still has tables: `%s`", dataset.getId(), - namespace.getStorage().getTables().stream() + namespace.getStorage().getTables() .map(Table::getId) .map(Objects::toString) .collect(Collectors.joining(",")) @@ -109,7 +110,7 @@ public synchronized void deleteDataset(Dataset dataset) { */ public synchronized void addSecondaryId(Namespace namespace, SecondaryIdDescription secondaryId) { final Dataset dataset = namespace.getDataset(); - secondaryId.setDataset(dataset); + secondaryId.setDataset(dataset.getId()); if (namespace.getStorage().getSecondaryId(secondaryId.getId()) != null) { throw new WebApplicationException("SecondaryId already exists", Response.Status.CONFLICT); @@ -125,12 +126,12 @@ public synchronized void addSecondaryId(Namespace namespace, SecondaryIdDescript * Delete SecondaryId if it does not have any dependents. */ public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secondaryId) { - final Namespace namespace = datasetRegistry.get(secondaryId.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(secondaryId.getDataset()); // Before we commit this deletion, we check if this SecondaryId still has dependent Columns. - final List dependents = namespace.getStorage().getTables().stream() + final List dependents = namespace.getStorage().getTables() .map(Table::getColumns).flatMap(Arrays::stream) - .filter(column -> secondaryId.equals(column.getSecondaryId())) + .filter(column -> secondaryId.getId().equals(column.getSecondaryId())) .toList(); if (!dependents.isEmpty()) { @@ -157,10 +158,11 @@ public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secon public synchronized void addTable(@NonNull Table table, Namespace namespace) { Dataset dataset = namespace.getDataset(); + DatasetId datasetId = dataset.getId(); if (table.getDataset() == null) { - table.setDataset(dataset); + table.setDataset(datasetId); } - else if (!table.getDataset().equals(dataset)) { + else if (!table.getDataset().equals(datasetId)) { throw new IllegalArgumentException(); } @@ -181,7 +183,7 @@ else if (!table.getDataset().equals(dataset)) { * Therefore, the concept will be deleted first then added */ public synchronized void updateConcept(@NonNull Dataset dataset, @NonNull Concept concept) { - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); if (!datasetRegistry.get(dataset.getId()).getStorage().hasConcept(concept.getId())) { throw new NotFoundException("Can't find the concept in the dataset " + concept.getId()); } @@ -194,14 +196,14 @@ public synchronized void updateConcept(@NonNull Dataset dataset, @NonNull Concep * Add the concept to the dataset if it does not exist yet */ public synchronized void addConcept(@NonNull Dataset dataset, @NonNull Concept concept, boolean force) { - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); ValidatorHelper.failOnError(log, environment.getValidator().validate(concept)); if (datasetRegistry.get(dataset.getId()).getStorage().hasConcept(concept.getId())) { if (!force) { throw new WebApplicationException("Can't replace already existing concept " + concept.getId(), Response.Status.CONFLICT); } - deleteConcept(concept); + deleteConcept(concept.getId()); log.info("Force deleted previous concept: {}", concept.getId()); } @@ -210,6 +212,15 @@ public synchronized void addConcept(@NonNull Dataset dataset, @NonNull Concept deleteTable(Table table, boolean force) { - final Namespace namespace = datasetRegistry.get(table.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(table.getDataset()); - final List> dependentConcepts = namespace.getStorage().getAllConcepts().stream().flatMap(c -> c.getConnectors().stream()) - .filter(con -> con.getTable().equals(table)) + TableId tableId = table.getId(); + final List> dependentConcepts = namespace.getStorage().getAllConcepts().flatMap(c -> c.getConnectors().stream()) + .filter(con -> con.getResolvedTableId().equals(tableId)) .map(Connector::getConcept) .collect(Collectors.toList()); if (force || dependentConcepts.isEmpty()) { for (Concept concept : dependentConcepts) { - deleteConcept(concept); + deleteConcept(concept.getId()); } - namespace.getStorage().getAllImports().stream() - .filter(imp -> imp.getTable().equals(table)) + namespace.getStorage().getAllImports() + .filter(imp -> imp.getTable().equals(tableId)) .forEach(this::deleteImport); - namespace.getStorage().removeTable(table.getId()); + namespace.getStorage().removeTable(tableId); storageListener.onRemoveTable(table); } @@ -303,13 +306,10 @@ public synchronized List deleteTable(Table table, boolean force) { } /** - * Deletes a concept. + * Deletes an import. */ - public synchronized void deleteConcept(Concept concept) { - final Namespace namespace = datasetRegistry.get(concept.getDataset().getId()); - - namespace.getStorage().removeConcept(concept.getId()); - storageListener.onDeleteConcept(concept); + public synchronized void deleteImport(Import imp) { + this.importHandler.deleteImport(imp); } /** @@ -339,9 +339,9 @@ public void addInternToExternMapping(Namespace namespace, InternToExternMapper i } public List deleteInternToExternMapping(InternToExternMapper internToExternMapper, boolean force) { - final Namespace namespace = datasetRegistry.get(internToExternMapper.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(internToExternMapper.getDataset()); - final Set> dependentConcepts = namespace.getStorage().getAllConcepts().stream() + final Set> dependentConcepts = namespace.getStorage().getAllConcepts() .filter( c -> c.getSelects().stream() .filter(MappableSingleColumnSelect.class::isInstance) @@ -354,7 +354,7 @@ public List deleteInternToExternMapping(InternToExternMapper internTo if (force || dependentConcepts.isEmpty()) { for (Concept concept : dependentConcepts) { - deleteConcept(concept); + deleteConcept(concept.getId()); } namespace.getStorage().removeInternToExternMapper(internToExternMapper.getId()); @@ -368,7 +368,7 @@ public void clearIndexCache() { } public void addSearchIndex(Namespace namespace, SearchIndex searchIndex) { - searchIndex.setDataset(namespace.getDataset()); + searchIndex.setDataset(namespace.getDataset().getId()); ValidatorHelper.failOnError(log, environment.getValidator().validate(searchIndex)); @@ -381,9 +381,9 @@ public void addSearchIndex(Namespace namespace, SearchIndex searchIndex) { } public List deleteSearchIndex(SearchIndex searchIndex, boolean force) { - final Namespace namespace = datasetRegistry.get(searchIndex.getDataset().getId()); + final Namespace namespace = datasetRegistry.get(searchIndex.getDataset()); - final List> dependentConcepts = namespace.getStorage().getAllConcepts().stream() + final List> dependentConcepts = namespace.getStorage().getAllConcepts() .filter( c -> c.getConnectors().stream() .map(Connector::getFilters) @@ -392,13 +392,13 @@ public List deleteSearchIndex(SearchIndex searchIndex, boolean force) .map(SelectFilter.class::cast) .map(SelectFilter::getTemplate) .filter(Objects::nonNull) - .anyMatch(searchIndex::equals) + .anyMatch(searchIndex.getId()::equals) ) .toList(); if (force || dependentConcepts.isEmpty()) { for (Concept concept : dependentConcepts) { - deleteConcept(concept); + deleteConcept(concept.getId()); } namespace.getStorage().removeSearchIndex(searchIndex.getId()); diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java index 9c7cce00e9..ff91326468 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetResource.java @@ -200,13 +200,16 @@ public void setStructure(@NotNull @Valid StructureNode[] structure) { @GET @Path("tables") public List listTables() { - return namespace.getStorage().getTables().stream().map(Table::getId).collect(Collectors.toList()); + return namespace.getStorage().getTables().map(Table::getId).collect(Collectors.toList()); } @GET @Path("concepts") public List listConcepts() { - return namespace.getStorage().getAllConcepts().stream().map(Concept::getId).collect(Collectors.toList()); + return namespace.getStorage() + .getAllConcepts() + .map(Concept::getId) + .collect(Collectors.toList()); } @DELETE diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java index 3090a7d6af..fee4ed1366 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminDatasetsResource.java @@ -2,10 +2,6 @@ import java.util.List; import java.util.stream.Collectors; - -import com.bakdata.conquery.io.jersey.ExtraMimeTypes; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import jakarta.inject.Inject; import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; @@ -14,6 +10,10 @@ import jakarta.ws.rs.POST; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; + +import com.bakdata.conquery.io.jersey.ExtraMimeTypes; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; @@ -29,7 +29,7 @@ public class AdminDatasetsResource { @SneakyThrows @POST @Consumes(ExtraMimeTypes.JSON_STRING) - public void addDataset(@Valid @NotNull Dataset dataset) { + public void addDataset(@NotNull @Valid Dataset dataset) { processor.addDataset(dataset); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java index b02fa9bd33..daf27288a3 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminProcessor.java @@ -11,6 +11,8 @@ import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; +import jakarta.validation.Validator; import com.bakdata.conquery.commands.ManagerNode; import com.bakdata.conquery.io.jackson.Jackson; @@ -37,7 +39,6 @@ import com.google.common.collect.Multimap; import com.univocity.parsers.csv.CsvWriter; import groovy.lang.GroovyShell; -import jakarta.validation.Validator; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -90,19 +91,15 @@ public synchronized void addRole(Role role) throws JSONException { public void deleteRole(Role role) { log.info("Deleting {}", role); - for (User user : storage.getAllUsers()) { - user.removeRole(role); - } + storage.getAllUsers().forEach(user -> user.removeRole(role)); - for (Group group : storage.getAllGroups()) { - group.removeRole(role); - } + storage.getAllGroups().forEach(group -> group.removeRole(role)); storage.removeRole(role.getId()); } public SortedSet getAllRoles() { - return new TreeSet<>(storage.getAllRoles()); + return storage.getAllRoles().collect(Collectors.toCollection(TreeSet::new)); } @@ -129,13 +126,11 @@ public void deletePermission(PermissionOwner owner, ConqueryPermission permis public TreeSet getAllUsers() { - return new TreeSet<>(storage.getAllUsers()); + return storage.getAllUsers().collect(Collectors.toCollection(TreeSet::new)); } public synchronized void deleteUser(User user) { - for (Group group : storage.getAllGroups()) { - group.removeMember(user); - } + storage.getAllGroups().forEach(group -> group.removeMember(user)); storage.removeUser(user.getId()); log.trace("Removed user {} from the storage.", user.getId()); } @@ -158,7 +153,7 @@ public void addUser(User user) { } public TreeSet getAllGroups() { - return new TreeSet<>(storage.getAllGroups()); + return storage.getAllGroups().collect(Collectors.toCollection(TreeSet::new)); } public void addGroups(List groups) { @@ -215,7 +210,7 @@ public String getPermissionOverviewAsCSV() { /** * Renders the permission overview for certain {@link User} in form of a CSV. */ - public String getPermissionOverviewAsCSV(Collection users) { + public String getPermissionOverviewAsCSV(Stream users) { final StringWriter sWriter = new StringWriter(); final CsvWriter writer = config.getCsv().createWriter(sWriter); final List scope = config @@ -224,9 +219,9 @@ public String getPermissionOverviewAsCSV(Collection users) { // Header writeAuthOverviewHeader(writer, scope); // Body - for (User user : users) { - writeAuthOverviewUser(writer, scope, user, storage, config); - } + users.forEach(user -> + writeAuthOverviewUser(writer, scope, user, storage, config) + ); return sWriter.toString(); } @@ -261,7 +256,7 @@ private static void writeAuthOverviewUser(CsvWriter writer, List scope, * Renders the permission overview for all users in a certain {@link Group} in form of a CSV. */ public String getPermissionOverviewAsCSV(Group group) { - return getPermissionOverviewAsCSV(group.getMembers().stream().map(storage::getUser).collect(Collectors.toList())); + return getPermissionOverviewAsCSV(group.getMembers().stream().map(storage::getUser)); } public boolean isBusy() { diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java index ef315cf67e..5ed9d37cde 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminResource.java @@ -106,13 +106,13 @@ public Stream getQueries(@Auth Subject currentUser, @QueryParam return storage.getAllExecutions() - .stream() + .filter(t -> t.getCreationTime().toLocalDate().isAfter(since) || t.getCreationTime().toLocalDate().isEqual(since)) .limit(limit) .map(t -> { try { if (t.isInitialized()) { - final Namespace namespace = processor.getDatasetRegistry().get(t.getDataset().getId()); + final Namespace namespace = processor.getDatasetRegistry().get(t.getDataset()); return t.buildStatusFull(currentUser, namespace); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java index f10c8369d1..e8f63e59c1 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/AdminTablesResource.java @@ -4,15 +4,6 @@ import java.util.List; import java.util.stream.Collectors; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.jersey.ExtraMimeTypes; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Import; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; -import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; -import com.bakdata.conquery.models.worker.Namespace; import jakarta.annotation.PostConstruct; import jakarta.inject.Inject; import jakarta.ws.rs.Consumes; @@ -25,6 +16,15 @@ import jakarta.ws.rs.QueryParam; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.Response.Status; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.jersey.ExtraMimeTypes; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; +import com.bakdata.conquery.models.worker.Namespace; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.Setter; @@ -84,8 +84,7 @@ public Response remove(@QueryParam("force") @DefaultValue("false") boolean force public List listImports() { return namespace.getStorage() .getAllImports() - .stream() - .filter(imp -> imp.getTable().equals(table)) + .filter(imp -> imp.getTable().equals(table.getId())) .map(Import::getId) .collect(Collectors.toList()); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java index 17c89ccdbf..554582e1c9 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/admin/rest/UIProcessor.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.resources.admin.rest; -import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; @@ -12,6 +11,8 @@ import java.util.TreeSet; import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; +import jakarta.inject.Inject; import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.storage.MetaStorage; @@ -43,7 +44,6 @@ import com.bakdata.conquery.resources.admin.ui.model.TableStatistics; import com.bakdata.conquery.resources.admin.ui.model.UIContext; import com.google.common.cache.CacheStats; -import jakarta.inject.Inject; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -53,21 +53,13 @@ * Wrapper processor that transforms internal representations of the {@link AdminProcessor} into * objects that are more convenient to handle with freemarker. */ +@Getter @Slf4j @RequiredArgsConstructor(onConstructor_ = {@Inject}) public class UIProcessor { - @Getter private final AdminProcessor adminProcessor; - public DatasetRegistry getDatasetRegistry() { - return adminProcessor.getDatasetRegistry(); - } - - public MetaStorage getStorage() { - return adminProcessor.getStorage(); - } - public UIContext getUIContext(String csrfToken) { return new UIContext(adminProcessor.getNodeProvider(), csrfToken); } @@ -82,7 +74,7 @@ public CacheStats getIndexServiceStatistics() { public FrontendAuthOverview getAuthOverview() { Collection overview = new TreeSet<>(); - for (User user : getStorage().getAllUsers()) { + for (User user : getStorage().getAllUsers().toList()) { Collection userGroups = AuthorizationHelper.getGroupsOf(user, getStorage()); Set effectiveRoles = user.getRoles().stream() .map(getStorage()::getRole) @@ -99,6 +91,9 @@ public FrontendAuthOverview getAuthOverview() { return FrontendAuthOverview.builder().overview(overview).build(); } + public MetaStorage getStorage() { + return adminProcessor.getStorage(); + } public FrontendRoleContent getRoleContent(Role role) { return FrontendRoleContent.builder() @@ -110,6 +105,15 @@ public FrontendRoleContent getRoleContent(Role role) { .build(); } + private SortedSet wrapInFEPermission(Collection permissions) { + TreeSet fePermissions = new TreeSet<>(); + + for (ConqueryPermission permission : permissions) { + fePermissions.add(FrontendPermission.from(permission)); + } + return fePermissions; + } + private Map, List>> preparePermissionTemplate() { Map, List>> permissionTemplateMap = new HashMap<>(); @@ -132,30 +136,20 @@ private Map, List>> preparePermissionTemplate( } public List getUsers(Role role) { - Collection user = getStorage().getAllUsers(); - return user.stream().filter(u -> u.getRoles().contains(role.getId())).sorted().collect(Collectors.toList()); + return getStorage().getAllUsers().filter(u -> u.getRoles().contains(role.getId())).sorted().collect(Collectors.toList()); } private List getGroups(Role role) { - Collection groups = getStorage().getAllGroups(); - return groups.stream() + return getStorage().getAllGroups() .filter(g -> g.getRoles().contains(role.getId())) .sorted() .collect(Collectors.toList()); } - private SortedSet wrapInFEPermission(Collection permissions) { - TreeSet fePermissions = new TreeSet<>(); - - for (ConqueryPermission permission : permissions) { - fePermissions.add(FrontendPermission.from(permission)); - } - return fePermissions; - } - public FrontendUserContent getUserContent(User user) { - final Collection availableGroups = new ArrayList<>(getStorage().getAllGroups()); - availableGroups.removeIf(g -> g.containsMember(user)); + final Collection availableGroups = getStorage().getAllGroups() + .filter(group -> !group.containsMember(user)) + .toList(); return FrontendUserContent .builder() @@ -163,32 +157,33 @@ public FrontendUserContent getUserContent(User user) { .groups(AuthorizationHelper.getGroupsOf(user, getStorage())) .availableGroups(availableGroups) .roles(user.getRoles().stream().map(getStorage()::getRole).collect(Collectors.toList())) - .availableRoles(getStorage().getAllRoles()) + .availableRoles(getStorage().getAllRoles().collect(Collectors.toCollection(TreeSet::new))) .permissions(wrapInFEPermission(user.getPermissions())) .permissionTemplateMap(preparePermissionTemplate()) .build(); } - public FrontendGroupContent getGroupContent(Group group) { + Set memberIds = group.getMembers(); + Set members = memberIds.stream().map(getStorage()::getUser).collect(Collectors.toCollection(TreeSet::new)); + Collection + availableMembers = + getStorage().getAllUsers().filter(user -> !memberIds.contains(user.getId())).collect(Collectors.toCollection(TreeSet::new)); - Set membersIds = group.getMembers(); - ArrayList availableMembers = new ArrayList<>(getStorage().getAllUsers()); - availableMembers.removeIf(u -> membersIds.contains(u.getId())); return FrontendGroupContent .builder() .owner(group) - .members(membersIds.stream().map(getStorage()::getUser).collect(Collectors.toList())) + .members(members) .availableMembers(availableMembers) .roles(group.getRoles().stream().map(getStorage()::getRole).collect(Collectors.toList())) - .availableRoles(getStorage().getAllRoles()) + .availableRoles(getStorage().getAllRoles().collect(Collectors.toCollection(TreeSet::new))) .permissions(wrapInFEPermission(group.getPermissions())) .permissionTemplateMap(preparePermissionTemplate()) .build(); } public TableStatistics getTableStatistics(Table table) { - final NamespaceStorage storage = getDatasetRegistry().get(table.getDataset().getId()).getStorage(); + final NamespaceStorage storage = getDatasetRegistry().get(table.getDataset()).getStorage(); List imports = table.findImports(storage).collect(Collectors.toList()); final long entries = imports.stream().mapToLong(Import::getNumberOfEntries).sum(); @@ -205,31 +200,27 @@ public TableStatistics getTableStatistics(Table table) { .mapToLong(imp -> calculateCBlocksSizeBytes(imp, storage.getAllConcepts())) .sum(), imports, - storage.getAllConcepts().stream() + storage.getAllConcepts() .map(Concept::getConnectors) .flatMap(Collection::stream) - .filter(conn -> conn.getTable().equals(table)) + .filter(conn -> conn.getResolvedTableId().equals(table.getId())) .map(Connector::getConcept).collect(Collectors.toSet()) ); } - public ImportStatistics getImportStatistics(Import imp) { - final NamespaceStorage storage = getDatasetRegistry().get(imp.getDataset().getId()).getStorage(); - - final long cBlockSize = calculateCBlocksSizeBytes(imp, storage.getAllConcepts()); - - return new ImportStatistics(imp, cBlockSize); + public DatasetRegistry getDatasetRegistry() { + return adminProcessor.getDatasetRegistry(); } - public static long calculateCBlocksSizeBytes(Import imp, Collection> concepts) { + public static long calculateCBlocksSizeBytes(Import imp, Stream> concepts) { // CBlocks are created per (per Bucket) Import per Connector targeting this table // Since the overhead of a single CBlock is minor, we gloss over the fact, that there are multiple and assume it is only a single very large one. - return concepts.stream() + return concepts .filter(TreeConcept.class::isInstance) .flatMap(concept -> ((TreeConcept) concept).getConnectors().stream()) - .filter(con -> con.getTable().equals(imp.getTable())) + .filter(con -> con.getResolvedTableId().equals(imp.getTable())) .mapToLong(con -> { // Per event an int array is stored marking the path to the concept child. final double avgDepth = con.getConcept() @@ -242,4 +233,12 @@ public static long calculateCBlocksSizeBytes(Import imp, Collection new TableInfos( table.getId(), table.getName(), @@ -88,18 +88,17 @@ public View getDataset(@PathParam(DATASET) Dataset dataset) { table.findImports(namespace.getStorage()).mapToLong(Import::getNumberOfEntries).sum() )) .collect(Collectors.toList()), - namespace.getStorage().getAllConcepts(), + namespace.getStorage().getAllConcepts().toList(), // Total size of CBlocks namespace .getStorage().getTables() - .stream() .flatMap(table -> table.findImports(namespace.getStorage())) .mapToLong(imp -> calculateCBlocksSizeBytes( imp, namespace.getStorage().getAllConcepts() )) .sum(), // total size of entries - namespace.getStorage().getAllImports().stream().mapToLong(Import::estimateMemoryConsumption).sum() + namespace.getStorage().getAllImports().mapToLong(Import::estimateMemoryConsumption).sum() ) ); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java index ddd919ed27..7655cc7b13 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptResource.java @@ -4,14 +4,6 @@ import java.util.List; import java.util.stream.Collectors; - -import com.bakdata.conquery.apiv1.frontend.FrontendList; -import com.bakdata.conquery.io.jersey.ExtraMimeTypes; -import com.bakdata.conquery.models.auth.permissions.Ability; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.resources.hierarchies.HAuthorized; -import com.fasterxml.jackson.annotation.JsonCreator; import jakarta.inject.Inject; import jakarta.servlet.http.HttpServletResponse; import jakarta.validation.constraints.NotNull; @@ -25,6 +17,14 @@ import jakarta.ws.rs.core.EntityTag; import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; + +import com.bakdata.conquery.apiv1.frontend.FrontendList; +import com.bakdata.conquery.io.jersey.ExtraMimeTypes; +import com.bakdata.conquery.models.auth.permissions.Ability; +import com.bakdata.conquery.models.datasets.concepts.Concept; +import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; +import com.bakdata.conquery.resources.hierarchies.HAuthorized; +import com.fasterxml.jackson.annotation.JsonCreator; import lombok.Data; import lombok.RequiredArgsConstructor; import lombok.ToString; diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java index 908aa69235..71014612a8 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ConceptsProcessor.java @@ -13,6 +13,8 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import jakarta.inject.Inject; +import jakarta.validation.Validator; import com.bakdata.conquery.apiv1.IdLabel; import com.bakdata.conquery.apiv1.frontend.FrontendList; @@ -47,8 +49,6 @@ import com.google.common.collect.Iterators; import it.unimi.dsi.fastutil.objects.Object2LongMap; import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap; -import jakarta.inject.Inject; -import jakarta.validation.Validator; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -143,12 +143,12 @@ public FrontendPreviewConfig getEntityPreviewFrontendConfig(Dataset dataset) { return new FrontendPreviewConfig( previewConfig.getAllConnectors() .stream() - .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), namespace.getCentralRegistry().resolve(id).getTable().getLabel())) + .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), id.resolve().getResolvedTable().getLabel())) .collect(Collectors.toSet()), previewConfig.getDefaultConnectors() .stream() - .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), namespace.getCentralRegistry().resolve(id).getTable().getLabel())) + .map(id -> new FrontendPreviewConfig.Labelled(id.toString(), id.resolve().getResolvedTable().getLabel())) .collect(Collectors.toSet()), previewConfig.resolveSearchFilters(), previewConfig.resolveSearchConcept() @@ -164,7 +164,7 @@ public ResolvedFilterValues resolveFilterValues(SelectFilter searchable, List // search in the full text engine final Set openSearchTerms = new HashSet<>(searchTerms); - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); final List out = new ArrayList<>(); @@ -230,7 +230,7 @@ public AutoCompleteResult autocompleteTextFilter( } private Cursor listAllValues(SelectFilter searchable) { - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); /* Don't worry, I am as confused as you are! For some reason, flatMapped streams in conjunction with distinct will be evaluated full before further operation. @@ -255,7 +255,7 @@ private Cursor listAllValues(SelectFilter searchable) { } private int countAllValues(SelectFilter searchable) { - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); return namespace.getFilterSearch().getTotal(searchable); } @@ -265,7 +265,7 @@ private int countAllValues(SelectFilter searchable) { * Is used by the serach cache to load missing items */ private List autocompleteTextFilter(SelectFilter searchable, String text) { - final Namespace namespace = namespaces.get(searchable.getDataset().getId()); + final Namespace namespace = namespaces.get(searchable.getDataset()); // Note that FEValues is equals/hashcode only on value: // The different sources might contain duplicate FEValue#values which we exploit: diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java b/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java index f7b9b5f40f..895e5a220c 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/EntityPreviewRequest.java @@ -2,12 +2,11 @@ import java.time.LocalDate; import java.util.List; +import jakarta.validation.constraints.NotEmpty; -import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection; import com.bakdata.conquery.models.common.Range; -import com.bakdata.conquery.models.datasets.concepts.Connector; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.fasterxml.jackson.annotation.JsonCreator; -import jakarta.validation.constraints.NotEmpty; import lombok.AllArgsConstructor; import lombok.Data; @@ -17,9 +16,8 @@ public class EntityPreviewRequest { private String idKind; //TODO I think ID is fallback, but i dont currently know. private final String entityId; private final Range time; - @NsIdRefCollection @NotEmpty - private final List sources; + private final List sources; //TODO uncomment, when frontend is adapted to support this // @ValidationMethod(message = "Time must be closed.") diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java index 58ad274147..76e6aba671 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java @@ -5,7 +5,6 @@ import java.util.List; import java.util.Optional; import java.util.OptionalInt; - import jakarta.inject.Inject; import jakarta.validation.Valid; import jakarta.ws.rs.BadRequestException; @@ -44,7 +43,7 @@ public class FilterResource extends HAuthorized { @POST @Path("resolve") public ConceptsProcessor.ResolvedFilterValues resolveFilterValues(FilterValues filterValues) { - subject.isPermitted(filter.getDataset(), Ability.READ); + subject.isPermitted(filter.getDataset().resolve(), Ability.READ); subject.isPermitted(filter.getConnector().findConcept(), Ability.READ); return processor.resolveFilterValues((SelectFilter) filter, filterValues.values()); @@ -54,7 +53,7 @@ public ConceptsProcessor.ResolvedFilterValues resolveFilterValues(FilterValues f @POST @Path("autocomplete") public ConceptsProcessor.AutoCompleteResult autocompleteTextFilter(@Valid FilterResource.AutocompleteRequest request) { - subject.isPermitted(filter.getDataset(), Ability.READ); + subject.isPermitted(filter.getDataset().resolve(), Ability.READ); subject.isPermitted(filter.getConnector().findConcept(), Ability.READ); if (!(filter instanceof SelectFilter)) { diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java index facc8705ee..9bbcbeb664 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/QueryResource.java @@ -112,7 +112,7 @@ public void cancel(@Auth Subject subject, @PathParam(QUERY) ManagedExecution que subject.authorize(query.getDataset(), Ability.READ); subject.authorize(query, Ability.CANCEL); - processor.cancel(subject, query.getDataset(), query); + processor.cancel(subject, query); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java index c99e4b5bee..ad732fe173 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultArrowResource.java @@ -7,14 +7,6 @@ import java.net.URL; import java.util.Optional; import java.util.OptionalLong; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.result.arrow.ResultArrowProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -26,6 +18,14 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.result.arrow.ResultArrowProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -48,7 +48,7 @@ public Response getFile( ) { checkSingleTableResult(query); - log.info("Result for {} download on dataset {} by subject {} ({}).", query.getId(), query.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", query.getId(), query.getDataset(), subject.getId(), subject.getName()); return processor.createResultFile(subject, query, pretty, limit); } @@ -84,7 +84,7 @@ public Response getStream( @QueryParam("limit") OptionalLong limit ) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset(), subject.getId(), subject.getName()); return processor.createResultStream(subject, execution, pretty.orElse(false), limit); } } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java index caf4b1d33c..1e0b522e4f 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultCsvResource.java @@ -7,14 +7,6 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.OptionalLong; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.result.csv.ResultCsvProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -26,6 +18,14 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.result.csv.ResultCsvProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -59,7 +59,7 @@ public Response getAsCsv( ) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution, execution.getDataset(), subject.getId(), subject.getName()); return processor.createResult(subject, (E) execution, pretty, determineCharset(userAgent, queryCharset), limit); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java index 5fa2bdf217..eab229807b 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultExcelResource.java @@ -6,14 +6,6 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.OptionalLong; - -import com.bakdata.conquery.apiv1.AdditionalMediaTypes; -import com.bakdata.conquery.io.result.excel.ResultExcelProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -25,6 +17,14 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.apiv1.AdditionalMediaTypes; +import com.bakdata.conquery.io.result.excel.ResultExcelProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -47,8 +47,7 @@ public Response get( @QueryParam("pretty") @DefaultValue("true") boolean pretty, @QueryParam("limit") OptionalLong limit) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset() - .getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset(), subject.getId(), subject.getName()); return processor.createResult(subject, (E) execution, pretty, limit); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java index 74709908e7..55839554e5 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/ResultParquetResource.java @@ -7,13 +7,6 @@ import java.net.MalformedURLException; import java.net.URL; import java.util.OptionalLong; - -import com.bakdata.conquery.io.result.parquet.ResultParquetProcessor; -import com.bakdata.conquery.models.auth.entities.Subject; -import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.resources.ResourceConstants; -import io.dropwizard.auth.Auth; import jakarta.inject.Inject; import jakarta.ws.rs.DefaultValue; import jakarta.ws.rs.GET; @@ -25,6 +18,13 @@ import jakarta.ws.rs.core.HttpHeaders; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.core.UriBuilder; + +import com.bakdata.conquery.io.result.parquet.ResultParquetProcessor; +import com.bakdata.conquery.models.auth.entities.Subject; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.query.SingleTableResult; +import com.bakdata.conquery.resources.ResourceConstants; +import io.dropwizard.auth.Auth; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -58,7 +58,7 @@ public Response getFile( @QueryParam("limit") OptionalLong limit) { checkSingleTableResult(execution); - log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset().getId(), subject.getId(), subject.getName()); + log.info("Result for {} download on dataset {} by subject {} ({}).", execution.getId(), execution.getDataset(), subject.getId(), subject.getName()); return processor.createResultFile(subject, execution, pretty, limit); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java index 792779e24e..87f1059e4a 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HConnectors.java @@ -7,8 +7,8 @@ import jakarta.ws.rs.Path; import jakarta.ws.rs.PathParam; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.Table; +import com.bakdata.conquery.models.datasets.concepts.Connector; import com.google.common.collect.MoreCollectors; import lombok.Getter; import lombok.Setter; @@ -28,7 +28,7 @@ public void init() { super.init(); connector = concept.getConnectors() .stream() - .filter(con -> con.getTable().equals(table)) + .filter(con -> con.getResolvedTable().equals(table)) .collect(MoreCollectors.toOptional()) .orElseThrow(() -> new NotFoundException(String.format("Could not find Connector for Table[%s] in Concept[%s]", connector, concept))); } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java index dfeb09fe1a..c5942c8db4 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/hierarchies/HDatasets.java @@ -20,7 +20,7 @@ public abstract class HDatasets extends HAuthorized { @Inject - protected DatasetRegistry datasetRegistry; + protected DatasetRegistry datasetRegistry; @PathParam(DATASET) private Dataset dataset; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java index 27a35bcc70..1bc33168cc 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java @@ -12,9 +12,12 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; +import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.concept.ConceptColumnSelect; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeChild; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeNode; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.models.query.queryplan.DateAggregationAction; import com.bakdata.conquery.sql.conversion.NodeConverter; import com.bakdata.conquery.sql.conversion.SharedAliases; @@ -58,38 +61,6 @@ public CQConceptConverter() { ); } - @Override - public Class getConversionClass() { - return CQConcept.class; - } - - @Override - public ConversionContext convert(CQConcept cqConcept, ConversionContext context) { - - TablePath tablePath = new TablePath(cqConcept, context); - List convertedCQTables = cqConcept.getTables().stream() - .flatMap(cqTable -> convertCqTable(tablePath, cqConcept, cqTable, context).stream()) - .toList(); - - QueryStep joinedStep = QueryStepJoiner.joinSteps(convertedCQTables, ConqueryJoinType.OUTER_JOIN, DateAggregationAction.MERGE, context); - QueryStep lastConceptStep = finishConceptConversion(joinedStep, cqConcept, tablePath, context); - return context.withQueryStep(lastConceptStep); - } - - private Optional convertCqTable(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext context) { - CQTableContext tableContext = createTableContext(tablePath, cqConcept, cqTable, context); - Optional lastQueryStep = Optional.empty(); - for (ConnectorCte queryStep : connectorCTEs) { - Optional convertedStep = queryStep.convert(tableContext, lastQueryStep); - if (convertedStep.isEmpty()) { - continue; - } - lastQueryStep = convertedStep; - tableContext = tableContext.withPrevious(lastQueryStep.get()); - } - return lastQueryStep; - } - private static QueryStep finishConceptConversion(QueryStep predecessor, CQConcept cqConcept, TablePath tablePath, ConversionContext context) { ConceptSqlTables universalTables = tablePath.createConceptTables(predecessor); @@ -100,7 +71,10 @@ private static QueryStep finishConceptConversion(QueryStep predecessor, CQConcep SelectContext selectContext = SelectContext.create(ids, validityDate, universalTables, context); List converted = cqConcept.getSelects().stream() - .map(select -> select.createConverter().conceptSelect(select, selectContext)) + .map(selectId -> { + Select select = selectId.resolve(); + return select.createConverter().conceptSelect(select, selectContext); + }) .toList(); List queriesToJoin = new ArrayList<>(); @@ -136,41 +110,9 @@ private static QueryStep finishConceptConversion(QueryStep predecessor, CQConcep .build(); } - private CQTableContext createTableContext(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext conversionContext) { - - SqlIdColumns ids = convertIds(cqConcept, cqTable, conversionContext); - ConnectorSqlTables connectorTables = tablePath.getConnectorTables(cqTable); - Optional tablesValidityDate = convertValidityDate(cqTable, connectorTables.getLabel(), conversionContext); - - // convert filters - SqlFunctionProvider functionProvider = conversionContext.getSqlDialect().getFunctionProvider(); - List allSqlFiltersForTable = new ArrayList<>(); - cqTable.getFilters().stream() - .map(filterValue -> filterValue.convertToSqlFilter(ids, conversionContext, connectorTables)) - .forEach(allSqlFiltersForTable::add); - collectConditionFilters(cqConcept.getElements(), cqTable, functionProvider).ifPresent(allSqlFiltersForTable::add); - getDateRestriction(conversionContext, tablesValidityDate).ifPresent(allSqlFiltersForTable::add); - - // convert selects - SelectContext selectContext = SelectContext.create(ids, tablesValidityDate, connectorTables, conversionContext); - List allSelectsForTable = new ArrayList<>(); - ConnectorSqlSelects conceptColumnSelect = createConceptColumnConnectorSqlSelects(cqConcept, selectContext); - allSelectsForTable.add(conceptColumnSelect); - cqTable.getSelects().stream().map(select -> select.createConverter().connectorSelect(select, selectContext)).forEach(allSelectsForTable::add); - - return CQTableContext.builder() - .ids(ids) - .validityDate(tablesValidityDate) - .sqlSelects(allSelectsForTable) - .sqlFilters(allSqlFiltersForTable) - .connectorTables(connectorTables) - .conversionContext(conversionContext) - .build(); - } - public static SqlIdColumns convertIds(CQConcept cqConcept, CQTable cqTable, ConversionContext conversionContext) { - Table table = cqTable.getConnector().getTable(); + Table table = cqTable.getConnector().resolve().getResolvedTable(); Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(table, conversionContext.getConfig()); if (cqConcept.isExcludeFromSecondaryId() @@ -180,7 +122,7 @@ public static SqlIdColumns convertIds(CQConcept cqConcept, CQTable cqTable, Conv return new SqlIdColumns(primaryColumn).withAlias(); } - Column secondaryIdColumn = table.findSecondaryIdColumn(conversionContext.getSecondaryIdDescription()); + Column secondaryIdColumn = table.findSecondaryIdColumn(conversionContext.getSecondaryIdDescription().getId()); Preconditions.checkArgument( secondaryIdColumn != null, @@ -244,7 +186,7 @@ private static Stream collectConditions(CQTable cqTable, Concept if (!(conceptElement instanceof ConceptTreeChild child)) { return Stream.empty(); } - WhereCondition childCondition = child.getCondition().convertToSqlCondition(CTConditionContext.create(cqTable.getConnector(), functionProvider)); + WhereCondition childCondition = child.getCondition().convertToSqlCondition(CTConditionContext.create(cqTable.getConnector().resolve(), functionProvider)); return Stream.concat( collectConditions(cqTable, child.getParent(), functionProvider), Stream.of(childCondition) @@ -252,8 +194,8 @@ private static Stream collectConditions(CQTable cqTable, Concept } private static Optional convertConnectorCondition(CQTable cqTable, SqlFunctionProvider functionProvider) { - return Optional.ofNullable(cqTable.getConnector().getCondition()) - .map(condition -> condition.convertToSqlCondition(CTConditionContext.create(cqTable.getConnector(), functionProvider))); + return Optional.ofNullable(cqTable.getConnector().resolve().getCondition()) + .map(condition -> condition.convertToSqlCondition(CTConditionContext.create(cqTable.getConnector().resolve(), functionProvider))); } private static Optional getDateRestriction(ConversionContext context, Optional validityDate) { @@ -279,10 +221,75 @@ private static Optional getDateRestriction(ConversionContext context private static ConnectorSqlSelects createConceptColumnConnectorSqlSelects(CQConcept cqConcept, SelectContext selectContext) { return cqConcept.getSelects().stream() + .map(SelectId::resolve) .filter(select -> select instanceof ConceptColumnSelect) .findFirst() .map(select -> select.createConverter().connectorSelect(select, selectContext)) .orElse(ConnectorSqlSelects.none()); } + @Override + public Class getConversionClass() { + return CQConcept.class; + } + + @Override + public ConversionContext convert(CQConcept cqConcept, ConversionContext context) { + + TablePath tablePath = new TablePath(cqConcept, context); + List convertedCQTables = cqConcept.getTables().stream() + .flatMap(cqTable -> convertCqTable(tablePath, cqConcept, cqTable, context).stream()) + .toList(); + + QueryStep joinedStep = QueryStepJoiner.joinSteps(convertedCQTables, ConqueryJoinType.OUTER_JOIN, DateAggregationAction.MERGE, context); + QueryStep lastConceptStep = finishConceptConversion(joinedStep, cqConcept, tablePath, context); + return context.withQueryStep(lastConceptStep); + } + + private Optional convertCqTable(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext context) { + CQTableContext tableContext = createTableContext(tablePath, cqConcept, cqTable, context); + Optional lastQueryStep = Optional.empty(); + for (ConnectorCte queryStep : connectorCTEs) { + Optional convertedStep = queryStep.convert(tableContext, lastQueryStep); + if (convertedStep.isEmpty()) { + continue; + } + lastQueryStep = convertedStep; + tableContext = tableContext.withPrevious(lastQueryStep.get()); + } + return lastQueryStep; + } + + private CQTableContext createTableContext(TablePath tablePath, CQConcept cqConcept, CQTable cqTable, ConversionContext conversionContext) { + + SqlIdColumns ids = convertIds(cqConcept, cqTable, conversionContext); + ConnectorSqlTables connectorTables = tablePath.getConnectorTables(cqTable); + Optional tablesValidityDate = convertValidityDate(cqTable, connectorTables.getLabel(), conversionContext); + + // convert filters + SqlFunctionProvider functionProvider = conversionContext.getSqlDialect().getFunctionProvider(); + List allSqlFiltersForTable = new ArrayList<>(); + cqTable.getFilters().stream() + .map(filterValue -> filterValue.convertToSqlFilter(ids, conversionContext, connectorTables)) + .forEach(allSqlFiltersForTable::add); + collectConditionFilters(cqConcept.getElements().stream().>map(ConceptElementId::resolve).toList(), cqTable, functionProvider).ifPresent(allSqlFiltersForTable::add); + getDateRestriction(conversionContext, tablesValidityDate).ifPresent(allSqlFiltersForTable::add); + + // convert selects + SelectContext selectContext = SelectContext.create(ids, tablesValidityDate, connectorTables, conversionContext); + List allSelectsForTable = new ArrayList<>(); + ConnectorSqlSelects conceptColumnSelect = createConceptColumnConnectorSqlSelects(cqConcept, selectContext); + allSelectsForTable.add(conceptColumnSelect); + cqTable.getSelects().stream().map(SelectId::resolve).map(select -> select.createConverter().connectorSelect(select, selectContext)).forEach(allSelectsForTable::add); + + return CQTableContext.builder() + .ids(ids) + .validityDate(tablesValidityDate) + .sqlSelects(allSelectsForTable) + .sqlFilters(allSqlFiltersForTable) + .connectorTables(connectorTables) + .conversionContext(conversionContext) + .build(); + } + } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java index 114692d118..4c53669a2a 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CTConditionContext.java @@ -15,8 +15,8 @@ public class CTConditionContext { public static CTConditionContext create(Connector connector, SqlFunctionProvider functionProvider) { return new CTConditionContext( - connector.getTable(), - connector.getColumn(), + connector.getResolvedTable(), + connector.getColumn() != null ? connector.getColumn().resolve() : null, functionProvider ); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java index 8885fbcc9e..2c7a1bdea5 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/TablePath.java @@ -15,6 +15,7 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.datasets.concepts.select.Select; +import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; import com.bakdata.conquery.sql.conversion.cqelement.intervalpacking.IntervalPackingCteStep; import com.bakdata.conquery.sql.conversion.model.CteStep; @@ -49,13 +50,13 @@ public ConnectorSqlTables getConnectorTables(CQTable cqTable) { private static ConnectorSqlTables createConnectorTables(CQConcept cqConcept, CQTable cqTable, ConversionContext context) { - String conceptConnectorLabel = context.getNameGenerator().conceptConnectorName(cqConcept, cqTable.getConnector(), context.getSqlPrintSettings() + String conceptConnectorLabel = context.getNameGenerator().conceptConnectorName(cqConcept, cqTable.getConnector().resolve(), context.getSqlPrintSettings() .getLocale()); TablePathInfo tableInfo = collectConnectorTables(cqConcept, cqTable, context); Map cteNameMap = CteStep.createCteNameMap(tableInfo.getMappings().keySet(), conceptConnectorLabel, context.getNameGenerator()); return new ConnectorSqlTables( - cqTable.getConnector(), + cqTable.getConnector().resolve(), conceptConnectorLabel, tableInfo.getRootTable(), cteNameMap, @@ -82,10 +83,10 @@ public ConceptSqlTables createConceptTables(QueryStep predecessor) { private static TablePathInfo collectConnectorTables(CQConcept cqConcept, CQTable cqTable, ConversionContext context) { TablePathInfo tableInfo = new TablePathInfo(); - tableInfo.setRootTable(cqTable.getConnector().getTable().getName()); + tableInfo.setRootTable(cqTable.getConnector().resolve().getResolvedTableId().getTable()); tableInfo.addWithDefaultMapping(MANDATORY_STEPS); - boolean eventDateSelectsPresent = cqTable.getSelects().stream().anyMatch(Select::isEventDateSelect); + boolean eventDateSelectsPresent = cqTable.getSelects().stream().map(SelectId::resolve).anyMatch(Select::isEventDateSelect); // no validity date aggregation possible nor necessary if (cqTable.findValidityDate() == null || (!cqConcept.isAggregateEventDates() && !eventDateSelectsPresent)) { return tableInfo; @@ -122,7 +123,7 @@ private TablePathInfo collectConceptTables(QueryStep predecessor) { tableInfo.addRootTableMapping(UNIVERSAL_SELECTS); // no event date selects present - if (cqConcept.getSelects().stream().noneMatch(Select::isEventDateSelect)) { + if (cqConcept.getSelects().stream().map(SelectId::resolve).noneMatch(Select::isEventDateSelect)) { return tableInfo; } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java index f765f7cd84..1dcd4286cc 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java @@ -132,9 +132,10 @@ public ColumnDateRange forValidityDate(ValidityDate validityDate, CDateRange dat public ColumnDateRange forArbitraryDateRange(DaterangeSelectOrFilter daterangeSelectOrFilter) { String tableName = daterangeSelectOrFilter.getTable().getName(); if (daterangeSelectOrFilter.getEndColumn() != null) { - return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn(), daterangeSelectOrFilter.getEndColumn()); + return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn().resolve(), daterangeSelectOrFilter.getEndColumn().resolve()); } - return ofStartAndEnd(tableName, daterangeSelectOrFilter.getColumn(), daterangeSelectOrFilter.getColumn()); + Column column = daterangeSelectOrFilter.getColumn().resolve(); + return ofStartAndEnd(tableName, column, column); } @Override @@ -313,19 +314,20 @@ private ColumnDateRange toColumnDateRange(CDateRange dateRestriction) { private ColumnDateRange toColumnDateRange(ValidityDate validityDate) { - String tableName = validityDate.getConnector().getTable().getName(); + String tableName = validityDate.getConnector().getResolvedTableId().getTable(); Column startColumn; Column endColumn; // if no end column is present, the only existing column is both start and end of the date range if (validityDate.getEndColumn() == null) { - startColumn = validityDate.getColumn(); - endColumn = validityDate.getColumn(); + Column column = validityDate.getColumn().resolve(); + startColumn = column; + endColumn = column; } else { - startColumn = validityDate.getStartColumn(); - endColumn = validityDate.getEndColumn(); + startColumn = validityDate.getStartColumn().resolve(); + endColumn = validityDate.getEndColumn().resolve(); } return ofStartAndEnd(tableName, startColumn, endColumn); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java index 96ed3e3f7e..8b1d166155 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java @@ -123,9 +123,9 @@ public ColumnDateRange forValidityDate(ValidityDate validityDate, CDateRange dat public ColumnDateRange forArbitraryDateRange(DaterangeSelectOrFilter daterangeSelectOrFilter) { String tableName = daterangeSelectOrFilter.getTable().getName(); if (daterangeSelectOrFilter.getEndColumn() != null) { - return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn(), daterangeSelectOrFilter.getEndColumn()); + return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn().resolve(), daterangeSelectOrFilter.getEndColumn().resolve()); } - return ofSingleColumn(tableName, daterangeSelectOrFilter.getColumn()); + return ofSingleColumn(tableName, daterangeSelectOrFilter.getColumn().resolve()); } @Override @@ -310,11 +310,11 @@ private ColumnDateRange toColumnDateRange(CDateRange dateRestriction) { } private ColumnDateRange toColumnDateRange(ValidityDate validityDate) { - String tableName = validityDate.getConnector().getTable().getName(); + String tableName = validityDate.getConnector().getResolvedTableId().getTable(); if (validityDate.getEndColumn() != null) { - return ofStartAndEnd(tableName, validityDate.getStartColumn(), validityDate.getEndColumn()); + return ofStartAndEnd(tableName, validityDate.getStartColumn().resolve(), validityDate.getEndColumn().resolve()); } - return ofSingleColumn(tableName, validityDate.getColumn()); + return ofSingleColumn(tableName, validityDate.getColumn().resolve()); } private ColumnDateRange ofSingleColumn(String tableName, Column column) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java index 6c8e6fd6a7..664abdbe64 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountQuartersSqlAggregator.java @@ -42,14 +42,14 @@ public ConnectorSqlSelects connectorSelect(CountQuartersSelect countQuartersSele CommonAggregationSelect countAggregationSelect; if (countQuartersSelect.isSingleColumnDaterange()) { - Column countColumn = countQuartersSelect.getColumn(); + Column countColumn = countQuartersSelect.getColumn().resolve(); countAggregationSelect = countColumn.getType() == MajorTypeId.DATE_RANGE ? createSingleDaterangeColumnAggregationSelect(countColumn, alias, tables, functionProvider, stratificationFunctions) : createSingleDateColumnAggregationSelect(countColumn, alias, tables, functionProvider); } else { - Column startColumn = countQuartersSelect.getStartColumn(); - Column endColumn = countQuartersSelect.getEndColumn(); + Column startColumn = countQuartersSelect.getStartColumn().resolve(); + Column endColumn = countQuartersSelect.getEndColumn().resolve(); countAggregationSelect = createTwoDateColumnAggregationSelect(startColumn, endColumn, alias, tables, functionProvider, stratificationFunctions); } @@ -73,14 +73,14 @@ public SqlFilters convertToSqlFilter(CountQuartersFilter countQuartersFilter, Fi CommonAggregationSelect countAggregationSelect; if (countQuartersFilter.isSingleColumnDaterange()) { - Column countColumn = countQuartersFilter.getColumn(); + Column countColumn = countQuartersFilter.getColumn().resolve(); countAggregationSelect = countColumn.getType() == MajorTypeId.DATE_RANGE ? createSingleDaterangeColumnAggregationSelect(countColumn, alias, tables, functionProvider, stratificationFunctions) : createSingleDateColumnAggregationSelect(countColumn, alias, tables, functionProvider); } else { - Column startColumn = countQuartersFilter.getStartColumn(); - Column endColumn = countQuartersFilter.getEndColumn(); + Column startColumn = countQuartersFilter.getStartColumn().resolve(); + Column endColumn = countQuartersFilter.getEndColumn().resolve(); countAggregationSelect = createTwoDateColumnAggregationSelect(startColumn, endColumn, alias, tables, functionProvider, stratificationFunctions); } ConnectorSqlSelects selects = ConnectorSqlSelects.builder() diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java index 501340d79f..9e519583e2 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/CountSqlAggregator.java @@ -39,7 +39,7 @@ public ConnectorSqlSelects connectorSelect(CountSelect countSelect, SelectContex ConnectorSqlTables tables = selectContext.getTables(); CountType countType = CountType.fromBoolean(countSelect.isDistinct()); - Column countColumn = countSelect.getColumn(); + Column countColumn = countSelect.getColumn().resolve(); String alias = selectContext.getNameGenerator().selectName(countSelect); CommonAggregationSelect countAggregationSelect = createCountAggregationSelect(countColumn, countType, alias, tables); @@ -59,7 +59,7 @@ public SqlFilters convertToSqlFilter(CountFilter countFilter, FilterContext countAggregationSelect = createCountAggregationSelect(countColumn, countType, alias, tables); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java index ff7aba4043..c72832e7da 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/DateDistanceSqlAggregator.java @@ -37,7 +37,7 @@ public class DateDistanceSqlAggregator implements SelectConverter selectContext) { - Column column = select.getColumn(); + Column column = select.getColumn().resolve(); String alias = selectContext.getNameGenerator().selectName(select); ConnectorSqlTables tables = selectContext.getTables(); ConversionContext conversionContext = selectContext.getConversionContext(); @@ -60,7 +60,7 @@ public ConnectorSqlSelects connectorSelect(DateDistanceSelect select, SelectCont @Override public SqlFilters convertToSqlFilter(DateDistanceFilter filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String alias = filterContext.getNameGenerator().selectName(filter); ConnectorSqlTables tables = filterContext.getTables(); ConversionContext conversionContext = filterContext.getConversionContext(); @@ -80,7 +80,7 @@ public SqlFilters convertToSqlFilter(DateDistanceFilter filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java index a1505b3447..a2810beab0 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/FlagSqlAggregator.java @@ -9,6 +9,7 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.specific.FlagFilter; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.FlagSelect; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConnectorSqlTables; import com.bakdata.conquery.sql.conversion.cqelement.concept.FilterContext; @@ -99,7 +100,7 @@ private static Map> createFlagRootSelectMap .entrySet().stream() .collect(Collectors.toMap( Map.Entry::getKey, - entry -> new ExtractingSqlSelect<>(rootTable, entry.getValue().getName(), Boolean.class) + entry -> new ExtractingSqlSelect<>(rootTable, entry.getValue().resolve().getName(), Boolean.class) )); } @@ -170,9 +171,10 @@ public SqlFilters convertToSqlFilter(FlagFilter flagFilter, FilterContext getRequiredColumns(Map flags, Set selectedFlags) { + private static List getRequiredColumns(Map flags, Set selectedFlags) { return selectedFlags.stream() .map(flags::get) + .map(ColumnId::resolve) .toList(); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java index 8dbceba3be..5974b7536a 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SumFilter; import com.bakdata.conquery.models.datasets.concepts.select.connector.specific.SumSelect; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConceptCteStep; import com.bakdata.conquery.sql.conversion.cqelement.concept.ConnectorSqlTables; import com.bakdata.conquery.sql.conversion.cqelement.concept.FilterContext; @@ -97,15 +98,15 @@ private enum SumDistinctCteStep implements CteStep { @Override public ConnectorSqlSelects connectorSelect(SumSelect sumSelect, SelectContext selectContext) { - Column sumColumn = sumSelect.getColumn(); - Column subtractColumn = sumSelect.getSubtractColumn(); - List distinctByColumns = sumSelect.getDistinctByColumn(); + Column sumColumn = sumSelect.getColumn().resolve(); + Column subtractColumn = sumSelect.getSubtractColumn() != null ? sumSelect.getSubtractColumn().resolve() : null; + List distinctByColumns = sumSelect.getDistinctByColumn().stream().map(ColumnId::resolve).toList(); NameGenerator nameGenerator = selectContext.getNameGenerator(); String alias = nameGenerator.selectName(sumSelect); ConnectorSqlTables tables = selectContext.getTables(); CommonAggregationSelect sumAggregationSelect; - if (distinctByColumns != null && !distinctByColumns.isEmpty()) { + if (!distinctByColumns.isEmpty()) { SqlIdColumns ids = selectContext.getIds(); sumAggregationSelect = createDistinctSumAggregationSelect(sumColumn, distinctByColumns, alias, ids, tables, nameGenerator); ExtractingSqlSelect finalSelect = createFinalSelect(sumAggregationSelect, tables); @@ -129,16 +130,16 @@ public ConnectorSqlSelects connectorSelect(SumSelect sumSelect, SelectContext sumFilter, FilterContext filterContext) { - Column sumColumn = sumFilter.getColumn(); - Column subtractColumn = sumFilter.getSubtractColumn(); - List distinctByColumns = sumFilter.getDistinctByColumn(); + Column sumColumn = sumFilter.getColumn().resolve(); + Column subtractColumn = sumFilter.getSubtractColumn() != null ? sumFilter.getSubtractColumn().resolve() : null; + List distinctByColumns = sumFilter.getDistinctByColumn().stream().map(ColumnId::resolve).toList(); String alias = filterContext.getNameGenerator().selectName(sumFilter); ConnectorSqlTables tables = filterContext.getTables(); CommonAggregationSelect sumAggregationSelect; ConnectorSqlSelects selects; - if (distinctByColumns != null && !distinctByColumns.isEmpty()) { + if (!distinctByColumns.isEmpty()) { sumAggregationSelect = createDistinctSumAggregationSelect(sumColumn, distinctByColumns, alias, filterContext.getIds(), tables, filterContext.getNameGenerator()); selects = ConnectorSqlSelects.builder() @@ -168,19 +169,20 @@ public SqlFilters convertToSqlFilter(SumFilter sumFilter, FilterContext filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); Class numberClass = NumberMapUtil.getType(column); Field field = DSL.field(DSL.name(tableName, columnName), numberClass); - Column subtractColumn = filter.getSubtractColumn(); + ColumnId subtractColumn = filter.getSubtractColumn(); if (subtractColumn == null) { return new SumCondition(field, filterContext.getValue()).condition(); } - String subtractColumnName = subtractColumn.getName(); - String subtractTableName = subtractColumn.getTable().getName(); + Column resolvedSubtractionColumn = subtractColumn.resolve(); + String subtractColumnName = resolvedSubtractionColumn.getName(); + String subtractTableName = resolvedSubtractionColumn.getTable().getName(); Field subtractField = DSL.field(DSL.name(subtractTableName, subtractColumnName), numberClass); return new SumCondition(field.minus(subtractField), filterContext.getValue()).condition(); } diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java index abfa39614e..65e3ac4980 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/AbstractSelectFilterConverter.java @@ -17,7 +17,7 @@ public SqlFilters convertToSqlFilter(F filter, FilterContext filterContext) { ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>( filterContext.getTables().getPredecessor(ConceptCteStep.PREPROCESSING), - filter.getColumn().getName(), + filter.getColumn().getColumn(), String.class ); @@ -39,7 +39,7 @@ public SqlFilters convertToSqlFilter(F filter, FilterContext filterContext) { @Override public Condition convertForTableExport(F filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); Field field = DSL.field(DSL.name(tableName, columnName), String.class); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java index 16fe976764..cefab8290a 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/filter/NumberFilterConverter.java @@ -20,7 +20,7 @@ public class NumberFilterConverter> im @Override public SqlFilters convertToSqlFilter(NumberFilter filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); ConnectorSqlTables tables = filterContext.getTables(); Class numberClass = NumberMapUtil.getType(column); @@ -39,7 +39,7 @@ public SqlFilters convertToSqlFilter(NumberFilter filter, FilterContext filter, FilterContext filterContext) { - Column column = filter.getColumn(); + Column column = filter.getColumn().resolve(); String tableName = column.getTable().getName(); String columnName = column.getName(); Field field = DSL.field(DSL.name(tableName, columnName), Number.class); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java index 8de2c9361f..1bcda25c97 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/ConceptColumnSelectConverter.java @@ -43,7 +43,7 @@ public ConnectorSqlSelects connectorSelect(ConceptColumnSelect select, SelectCon if (connector.getColumn() == null) { return ConnectorSqlSelects.none(); } - ExtractingSqlSelect connectorColumn = new ExtractingSqlSelect<>(connector.getTable().getName(), connector.getColumn().getName(), Object.class); + ExtractingSqlSelect connectorColumn = new ExtractingSqlSelect<>(connector.getResolvedTableId().getTable(), connector.getColumn().getColumn(), Object.class); ExtractingSqlSelect qualified = connectorColumn.qualify(selectContext.getTables().getPredecessor(ConceptCteStep.EVENT_FILTER)); return ConnectorSqlSelects.builder() .preprocessingSelect(connectorColumn) @@ -117,14 +117,14 @@ private static QueryStep createConnectorColumnSelectQuery( String tableName = selectContext.getTables() .getConnectorTables() .stream() - .filter(tables -> Objects.equals(tables.getRootTable(), connector.getTable().getName())) + .filter(tables -> Objects.equals(tables.getRootTable(), connector.getResolvedTableId().getTable())) .findFirst() .map(tables -> tables.cteName(ConceptCteStep.EVENT_FILTER)) - .orElse(connector.getTable().getName()); + .orElse(connector.getResolvedTableId().getTable()); Table connectorTable = DSL.table(DSL.name(tableName)); SqlIdColumns ids = selectContext.getIds().qualify(connectorTable.getName()); - Field connectorColumn = DSL.field(DSL.name(connectorTable.getName(), connector.getColumn().getName())); + Field connectorColumn = DSL.field(DSL.name(connectorTable.getName(), connector.getColumn().resolve().getName())); Field casted = selectContext.getFunctionProvider().cast(connectorColumn, SQLDataType.VARCHAR).as(alias); FieldWrapper connectorSelect = new FieldWrapper<>(casted); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java index 5cde2abe5b..2c86a617e1 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/DistinctSelectConverter.java @@ -68,7 +68,7 @@ public ConnectorSqlSelects connectorSelect(DistinctSelect distinctSelect, Select String alias = selectContext.getNameGenerator().selectName(distinctSelect); ConnectorSqlTables tables = selectContext.getTables(); - FieldWrapper preprocessingSelect = new FieldWrapper<>(field(name(tables.getRootTable(), distinctSelect.getColumn().getName())).as(alias)); + FieldWrapper preprocessingSelect = new FieldWrapper<>(field(name(tables.getRootTable(), distinctSelect.getColumn().getColumn())).as(alias)); QueryStep distinctSelectCte = createDistinctSelectCte(preprocessingSelect, alias, selectContext); QueryStep aggregatedCte = createAggregationCte(selectContext, preprocessingSelect, distinctSelectCte, alias); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java index 64b19538bb..6eab341418 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/FirstValueSelectConverter.java @@ -8,7 +8,7 @@ public class FirstValueSelectConverter implements SelectConverter selectContext) { return ValueSelectUtil.createValueSelect( - select.getColumn(), + select.getColumn().resolve(), selectContext.getNameGenerator().selectName(select), (valueField, orderByFields) -> selectContext.getFunctionProvider().first(valueField, orderByFields), selectContext diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java index 06a27c7484..4cb0e45afc 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/LastValueSelectConverter.java @@ -8,7 +8,7 @@ public class LastValueSelectConverter implements SelectConverter selectContext) { return ValueSelectUtil.createValueSelect( - select.getColumn(), + select.getColumn().resolve(), selectContext.getNameGenerator().selectName(select), (valueField, orderByFields) -> selectContext.getFunctionProvider().last(valueField, orderByFields), selectContext diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java index 208860a993..100c74327d 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/select/RandomValueSelectConverter.java @@ -13,7 +13,7 @@ public ConnectorSqlSelects connectorSelect(RandomValueSelect select, SelectConte ConnectorSqlTables tables = selectContext.getTables(); String rootTableName = tables.getRootTable(); - String columnName = select.getColumn().getName(); + String columnName = select.getColumn().getColumn(); ExtractingSqlSelect rootSelect = new ExtractingSqlSelect<>(rootTableName, columnName, Object.class); String alias = selectContext.getNameGenerator().selectName(select); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java index e5d9ee6197..8b59f9c70e 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/SecondaryIdQueryConverter.java @@ -18,7 +18,7 @@ public ConversionContext convert(SecondaryIdQuery query, ConversionContext conte ConversionContext withConvertedQuery = context.getNodeConversions().convert( query.getQuery(), - context.withSecondaryIdDescription(query.getSecondaryId()) + context.withSecondaryIdDescription(query.getSecondaryId().resolve()) ); Preconditions.checkArgument(withConvertedQuery.getFinalQuery() != null, "The SecondaryIdQuery's query should be converted by now."); diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java index a519d5e8a1..83570651e4 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/query/TableExportQueryConverter.java @@ -13,6 +13,7 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.models.common.daterange.CDateRange; import com.bakdata.conquery.models.datasets.Column; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.sql.conversion.NodeConverter; import com.bakdata.conquery.sql.conversion.SharedAliases; import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext; @@ -49,7 +50,7 @@ public Class getConversionClass() { public ConversionContext convert(TableExportQuery tableExportQuery, ConversionContext context) { QueryStep convertedPrerequisite = convertPrerequisite(tableExportQuery, context); - Map positions = tableExportQuery.getPositions(); + Map positions = tableExportQuery.getPositions(); CDateRange dateRestriction = CDateRange.of(tableExportQuery.getDateRange()); List convertedTables = tableExportQuery.getTables().stream() @@ -105,12 +106,12 @@ private static QueryStep convertTable( CQConcept concept, CDateRange dateRestriction, QueryStep convertedPrerequisite, - Map positions, + Map positions, ConversionContext context ) { - Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(cqTable.getConnector().getTable(), context.getConfig()); + Field primaryColumn = TablePrimaryColumnUtil.findPrimaryColumn(cqTable.getConnector().resolve().getResolvedTable(), context.getConfig()); SqlIdColumns ids = new SqlIdColumns(primaryColumn); - String conceptConnectorName = context.getNameGenerator().conceptConnectorName(concept, cqTable.getConnector(), context.getSqlPrintSettings().getLocale()); + String conceptConnectorName = context.getNameGenerator().conceptConnectorName(concept, cqTable.getConnector().resolve(), context.getSqlPrintSettings().getLocale()); Optional validityDate = convertTablesValidityDate(cqTable, conceptConnectorName, context); List> exportColumns = initializeFields(cqTable, positions); @@ -142,22 +143,22 @@ private static Optional convertTablesValidityDate(CQTable table return Optional.of(ColumnDateRange.of(asStringExpression).asValidityDateRange(alias)); } - private static List> initializeFields(CQTable cqTable, Map positions) { + private static List> initializeFields(CQTable cqTable, Map positions) { Field[] exportColumns = createPlaceholders(positions, cqTable); - for (Column column : cqTable.getConnector().getTable().getColumns()) { + for (Column column : cqTable.getConnector().resolve().getResolvedTable().getColumns()) { // e.g. date column(s) are handled separately and not part of positions - if (!positions.containsKey(column)) { + if (!positions.containsKey(column.getId())) { continue; } - int position = positions.get(column) - 1; + int position = positions.get(column.getId()) - 1; exportColumns[position] = createColumnSelect(column, position); } return Arrays.stream(exportColumns).map(FieldWrapper::new).collect(Collectors.toList()); } - private static Field[] createPlaceholders(Map positions, CQTable cqTable) { + private static Field[] createPlaceholders(Map positions, CQTable cqTable) { Field[] exportColumns = new Field[positions.size() + 1]; exportColumns[0] = createSourceInfoSelect(cqTable); @@ -165,7 +166,7 @@ private static Field[] createPlaceholders(Map positions, CQT // if columns have the same computed position, they can share a common name because they will be unioned over multiple tables anyway positions.forEach((column, position) -> { int shifted = position - 1; - Field columnSelect = DSL.inline(null, Object.class).as("%s-%d".formatted(column.getName(), shifted)); + Field columnSelect = DSL.inline(null, Object.class).as("%s-%d".formatted(column.getColumn(), shifted)); exportColumns[shifted] = columnSelect; }); @@ -173,7 +174,7 @@ private static Field[] createPlaceholders(Map positions, CQT } private static Field createSourceInfoSelect(CQTable cqTable) { - String tableName = cqTable.getConnector().getTable().getName(); + String tableName = cqTable.getConnector().resolve().getResolvedTableId().getTable(); return DSL.val(tableName).as(SharedAliases.SOURCE.getAlias()); } @@ -191,7 +192,7 @@ private static Table joinConnectorTableWithPrerequisite( ConversionContext context ) { SqlFunctionProvider functionProvider = context.getSqlDialect().getFunctionProvider(); - Table connectorTable = DSL.table(DSL.name(cqTable.getConnector().getTable().getName())); + Table connectorTable = DSL.table(DSL.name(cqTable.getConnector().resolve().getResolvedTableId().getTable())); Table convertedPrerequisiteTable = DSL.table(DSL.name(convertedPrerequisite.getCteName())); ColumnDateRange validityDate = functionProvider.forValidityDate(cqTable.findValidityDate()); diff --git a/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java b/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java index 0b1911c529..98c00a29ad 100644 --- a/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java +++ b/backend/src/main/java/com/bakdata/conquery/tasks/PermissionCleanupTask.java @@ -2,10 +2,12 @@ import java.io.PrintWriter; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.PermissionOwner; @@ -50,11 +52,12 @@ public void execute(Map> parameters, PrintWriter output) th * * @return The number of deleted permissions. */ - public static int deleteQueryPermissionsWithMissingRef(MetaStorage storage, Iterable> owners) { + public static int deleteQueryPermissionsWithMissingRef(MetaStorage storage, Stream> owners) { int countDeleted = 0; // Do the loop-di-loop - for (PermissionOwner owner : owners) { - Set permissions = owner.getPermissions(); + for (Iterator> it = owners.iterator(); it.hasNext(); ) { + PermissionOwner owner = it.next(); + Set permissions = owner.getPermissions(); for (Permission permission : permissions) { WildcardPermission wpermission = getAsWildcardPermission(permission); if (wpermission == null) { @@ -90,7 +93,7 @@ public static int deleteQueryPermissionsWithMissingRef(MetaStorage storage, Iter countDeleted++; } - } + } return countDeleted; } @@ -113,7 +116,8 @@ private static WildcardPermission getAsWildcardPermission(Permission permission) */ public static & Owned, ID extends Id> int deletePermissionsOfOwnedInstances(MetaStorage storage, String permissionDomain, IdUtil.Parser idParser, Function instanceStorageExtractor) { int countDeleted = 0; - for (User user : storage.getAllUsers()) { + for (Iterator it = storage.getAllUsers().iterator(); it.hasNext(); ) { + User user = it.next(); Set permissions = user.getPermissions(); for (Permission permission : permissions) { WildcardPermission wpermission = getAsWildcardPermission(permission); @@ -157,7 +161,7 @@ public static & Owned, ID extends Id> int del } - } + } return countDeleted; diff --git a/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java b/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java index fe45b4ceec..6aa161b446 100644 --- a/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java +++ b/backend/src/main/java/com/bakdata/conquery/tasks/QueryCleanupTask.java @@ -65,7 +65,7 @@ public void execute(Map> parameters, PrintWriter output) th throw new IllegalArgumentException("Query Expiration may not be null"); } - log.info("Starting deletion of queries older than {} of {}", queryExpiration, storage.getAllExecutions().size()); + log.info("Starting deletion of queries older than {} of {}", queryExpiration, storage.getAllExecutions().count()); // Iterate for as long as no changes are needed (this is because queries can be referenced by other queries) while (true) { @@ -73,7 +73,7 @@ public void execute(Map> parameters, PrintWriter output) th final Set toDelete = new HashSet<>(); - for (ManagedExecution execution : storage.getAllExecutions()) { + for (ManagedExecution execution : storage.getAllExecutions().toList()) { // Gather all referenced queries via reused checker. requiredQueries.addAll(execution.getSubmitted().collectRequiredQueries()); @@ -114,8 +114,8 @@ public void execute(Map> parameters, PrintWriter output) th // remove all queries referenced in reused queries. final Collection referenced = requiredQueries.stream() - .map(storage::getExecution) - .collect(Collectors.toSet()); + .map(storage::getExecution) + .collect(Collectors.toSet()); toDelete.removeAll(referenced); diff --git a/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java b/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java index fb5d423dc2..0a38378390 100644 --- a/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java +++ b/backend/src/main/java/com/bakdata/conquery/tasks/ReloadMetaStorageTask.java @@ -26,11 +26,11 @@ public void execute(Map> parameters, PrintWriter output) th output.println("BEGIN reloading MetaStorage."); { - final int allUsers = storage.getAllUsers().size(); - final int allExecutions = storage.getAllExecutions().size(); - final int allFormConfigs = storage.getAllFormConfigs().size(); - final int allGroups = storage.getAllGroups().size(); - final int allRoles = storage.getAllRoles().size(); + final long allUsers = storage.getAllUsers().count(); + final long allExecutions = storage.getAllExecutions().count(); + final long allFormConfigs = storage.getAllFormConfigs().count(); + final long allGroups = storage.getAllGroups().count(); + final long allRoles = storage.getAllRoles().count(); log.debug("BEFORE: Have {} Users, {} Groups, {} Roles, {} Executions, {} FormConfigs.", allUsers, allGroups, allRoles, allExecutions, allFormConfigs); @@ -40,11 +40,11 @@ public void execute(Map> parameters, PrintWriter output) th output.println("DONE reloading MetaStorage within %s.".formatted(timer.elapsed())); { - final int allUsers = storage.getAllUsers().size(); - final int allExecutions = storage.getAllExecutions().size(); - final int allFormConfigs = storage.getAllFormConfigs().size(); - final int allGroups = storage.getAllGroups().size(); - final int allRoles = storage.getAllRoles().size(); + final long allUsers = storage.getAllUsers().count(); + final long allExecutions = storage.getAllExecutions().count(); + final long allFormConfigs = storage.getAllFormConfigs().count(); + final long allGroups = storage.getAllGroups().count(); + final long allRoles = storage.getAllRoles().count(); log.debug("AFTER: Have {} Users, {} Groups, {} Roles, {} Executions, {} FormConfigs.", allUsers, allGroups, allRoles, allExecutions, allFormConfigs); diff --git a/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java b/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java index a76e0040c2..33bd31a411 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/util/AuthUtil.java @@ -21,7 +21,7 @@ public synchronized void cleanUpUserAndBelongings(User user, MetaStorage storage // Remove form configurations int countForms = 0; - for (FormConfig formConfig : storage.getAllFormConfigs()) { + for (FormConfig formConfig : storage.getAllFormConfigs().toList()) { if (!user.isOwner(formConfig)) { continue; } @@ -32,7 +32,7 @@ public synchronized void cleanUpUserAndBelongings(User user, MetaStorage storage // Remove executions int countExecs = 0; - for (ManagedExecution exec : storage.getAllExecutions()) { + for (ManagedExecution exec : storage.getAllExecutions().toList()) { if (!user.isOwner(exec)) { continue; } @@ -43,7 +43,7 @@ public synchronized void cleanUpUserAndBelongings(User user, MetaStorage storage log.debug("Removed {} form configs and {} executions for user '{}'", countForms, countExecs, user); - for (Group group : storage.getAllGroups()) { + for (Group group : storage.getAllGroups().toList()) { if (group.containsMember(user)) { group.removeMember(user); group.updateStorage(); diff --git a/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java b/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java index 901394f33a..15cd949262 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java +++ b/backend/src/main/java/com/bakdata/conquery/util/QueryUtils.java @@ -28,13 +28,14 @@ import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.common.CDateSet; import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.ids.NamespacedId; import com.bakdata.conquery.models.identifiable.ids.NamespacedIdentifiable; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.query.NamespacedIdentifiableHolding; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.QueryExecutionContext; @@ -75,6 +76,100 @@ public static String createTotalDefaultMultiLabel(List elements, Stri return elements.stream().map(elt -> elt.defaultLabel(locale)).collect(Collectors.joining(delimiter)); } + public static void generateConceptReadPermissions(@NonNull QueryUtils.NamespacedIdentifiableCollector idCollector, @NonNull Collection collectPermissions){ + idCollector.getIdentifiables().stream() + .filter(id -> id instanceof ConceptElement) + .map(ConceptElement.class::cast) + .>map(ConceptElement::getConcept) + .map(cId -> cId.createPermission(Ability.READ.asSet())) + .distinct() + .collect(Collectors.toCollection(() -> collectPermissions)); + } + + public static QueryExecutionContext determineDateAggregatorForContext(QueryExecutionContext ctx, Supplier>> altValidityDateAggregator) { + if (ctx.getQueryDateAggregator().isPresent()) { + return ctx; + } + return ctx.withQueryDateAggregator(altValidityDateAggregator.get()); + } + + public static String makeQueryLabel(final Visitable query, PrintSettings cfg, ManagedExecutionId id) { + final StringBuilder sb = new StringBuilder(); + + final Map, List> sortedContents = + Visitable.stream(query) + .collect(Collectors.groupingBy(Visitable::getClass)); + + int sbStartSize = sb.length(); + + // Check for CQExternal + List externals = sortedContents.getOrDefault(CQExternal.class, Collections.emptyList()); + if (!externals.isEmpty()) { + if (!sb.isEmpty()) { + sb.append(" "); + } + sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).external()); + } + + // Check for CQReused + if (sortedContents.containsKey(CQReusedQuery.class)) { + if (!sb.isEmpty()) { + sb.append(" "); + } + sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).reused()); + } + + + // Check for CQConcept + if (sortedContents.containsKey(CQConcept.class)) { + if (!sb.isEmpty()) { + sb.append(" "); + } + // Track length of text we are appending for concepts. + final AtomicInteger length = new AtomicInteger(); + + sortedContents.get(CQConcept.class) + .stream() + .map(CQConcept.class::cast) + + .map(c -> makeLabelWithRootAndChild(c, cfg)) + .filter(Predicate.not(Strings::isNullOrEmpty)) + .distinct() + + .takeWhile(elem -> length.addAndGet(elem.length()) < MAX_CONCEPT_LABEL_CONCAT_LENGTH) + .forEach(label -> sb.append(label).append(" ")); + + // Last entry will output one Space that we don't want + if (!sb.isEmpty()) { + sb.deleteCharAt(sb.length() - 1); + } + + // If not all Concept could be included in the name, point that out + if (length.get() > MAX_CONCEPT_LABEL_CONCAT_LENGTH) { + sb.append(" ").append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).furtherConcepts()); + } + } + + + // Fallback to id if nothing could be extracted from the query description + if (sbStartSize == sb.length()) { + sb.append(id.getExecution()); + } + + return sb.toString(); + } + + private static String makeLabelWithRootAndChild(CQConcept cqConcept, PrintSettings cfg) { + String label = cqConcept.getUserOrDefaultLabel(cfg.getLocale()); + + if (label == null) { + label = cqConcept.getConcept().getLabel(); + } + + // Concat everything with dashes + return label.replace(" ", "-"); + } + /** * Checks if the query requires to resolve external ids. * @@ -137,7 +232,6 @@ public Optional getOnlyReused() { } } - /** * Collects all {@link NamespacedIdentifiable} provided by a user from a * {@link Visitable}. @@ -159,10 +253,10 @@ public void accept(Visitable element) { * Collects all {@link NamespacedId} references provided by a user from a * {@link Visitable}. */ + @Getter public static class AvailableSecondaryIdCollector implements QueryVisitor { - @Getter - private final Set ids = new HashSet<>(); + private final Set ids = new HashSet<>(); @Override public void accept(Visitable element) { @@ -174,7 +268,7 @@ public void accept(Visitable element) { } for (Connector connector : cqConcept.getConcept().getConnectors()) { - for (Column column : connector.getTable().getColumns()) { + for (Column column : connector.getResolvedTable().getColumns()) { if(column.getSecondaryId() == null){ continue; } @@ -185,101 +279,4 @@ public void accept(Visitable element) { } } } - - public static void generateConceptReadPermissions(@NonNull QueryUtils.NamespacedIdentifiableCollector idCollector, @NonNull Collection collectPermissions){ - idCollector.getIdentifiables().stream() - .filter(id -> id instanceof ConceptElement) - .map(ConceptElement.class::cast) - .map(ConceptElement::getConcept) - .map(cId -> cId.createPermission(Ability.READ.asSet())) - .distinct() - .collect(Collectors.toCollection(() -> collectPermissions)); - } - - - - public static QueryExecutionContext determineDateAggregatorForContext(QueryExecutionContext ctx, Supplier>> altValidityDateAggregator) { - if (ctx.getQueryDateAggregator().isPresent()) { - return ctx; - } - return ctx.withQueryDateAggregator(altValidityDateAggregator.get()); - } - - public static String makeQueryLabel(final Visitable query, PrintSettings cfg, ManagedExecutionId id) { - final StringBuilder sb = new StringBuilder(); - - final Map, List> sortedContents = - Visitable.stream(query) - .collect(Collectors.groupingBy(Visitable::getClass)); - - int sbStartSize = sb.length(); - - // Check for CQExternal - List externals = sortedContents.getOrDefault(CQExternal.class, Collections.emptyList()); - if (!externals.isEmpty()) { - if (!sb.isEmpty()) { - sb.append(" "); - } - sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).external()); - } - - // Check for CQReused - if (sortedContents.containsKey(CQReusedQuery.class)) { - if (!sb.isEmpty()) { - sb.append(" "); - } - sb.append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).reused()); - } - - - // Check for CQConcept - if (sortedContents.containsKey(CQConcept.class)) { - if (!sb.isEmpty()) { - sb.append(" "); - } - // Track length of text we are appending for concepts. - final AtomicInteger length = new AtomicInteger(); - - sortedContents.get(CQConcept.class) - .stream() - .map(CQConcept.class::cast) - - .map(c -> makeLabelWithRootAndChild(c, cfg)) - .filter(Predicate.not(Strings::isNullOrEmpty)) - .distinct() - - .takeWhile(elem -> length.addAndGet(elem.length()) < MAX_CONCEPT_LABEL_CONCAT_LENGTH) - .forEach(label -> sb.append(label).append(" ")); - - // Last entry will output one Space that we don't want - if (!sb.isEmpty()) { - sb.deleteCharAt(sb.length() - 1); - } - - // If not all Concept could be included in the name, point that out - if (length.get() > MAX_CONCEPT_LABEL_CONCAT_LENGTH) { - sb.append(" ").append(C10N.get(CQElementC10n.class, I18n.LOCALE.get()).furtherConcepts()); - } - } - - - // Fallback to id if nothing could be extracted from the query description - if (sbStartSize == sb.length()) { - sb.append(id.getExecution()); - } - - return sb.toString(); - } - - - private static String makeLabelWithRootAndChild(CQConcept cqConcept, PrintSettings cfg) { - String label = cqConcept.getUserOrDefaultLabel(cfg.getLocale()); - - if (label == null) { - label = cqConcept.getConcept().getLabel(); - } - - // Concat everything with dashes - return label.replace(" ", "-"); - } } diff --git a/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java b/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java index 3f1716a3f6..6dbb8e568d 100644 --- a/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java +++ b/backend/src/main/java/com/bakdata/conquery/util/io/IdColumnUtil.java @@ -52,7 +52,7 @@ public static IdPrinter getIdPrinter(Subject owner, ManagedExecution execution, .findFirst() .orElseThrow(); - if (owner.isPermitted(execution.getDataset(), Ability.PRESERVE_ID)) { + if (owner.isPermitted(execution.getDataset().resolve(), Ability.PRESERVE_ID)) { // todo(tm): The integration of ids in the sql connector needs to be properly managed return new FullIdPrinter(namespace.getStorage().getIdMapping(), size, pos); } diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl index 59d2a6858c..428201bd66 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/concept.html.ftl @@ -6,11 +6,11 @@ <@layout.layout> <@breadcrumbs.breadcrumbs - labels=["Datasets", c.dataset.label, "Concepts", c.label] + labels=["Datasets", c.dataset.resolve().label, "Concepts", c.label] links=[ "/admin-ui/datasets", - "/admin-ui/datasets/${c.dataset.id}", - "/admin-ui/datasets/${c.dataset.id}#Concepts" + "/admin-ui/datasets/${c.dataset}", + "/admin-ui/datasets/${c.dataset}#Concepts" ] /> <@infoCard.infoCard @@ -59,7 +59,7 @@ "${descriptionHeader}": x.description!"" } ) - link="/admin-ui/datasets/${c.dataset.id}/connectors/" + link="/admin-ui/datasets/${c.dataset}/connectors/" /> diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl index 6b00c5d789..ad055abc11 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/connector.html.ftl @@ -11,28 +11,28 @@ "${idHeader}": x.id, "name": x.name, "${labelHeader}": x.label, - "${requiredColumnsHeader}": x.requiredColumns?sort_by("name")?join(', ') + "${requiredColumnsHeader}": x.requiredColumns?sort?join(', ') }) /> <@layout.layout> <@breadcrumbs.breadcrumbs - labels=["Datasets", c.concept.dataset.label, "Concept", c.concept.label, "Connector", c.label] + labels=["Datasets", c.concept.dataset.resolve().label, "Concept", c.concept.label, "Connector", c.label] links=[ "/admin-ui/datasets", - "/admin-ui/datasets/${c.dataset.id}", - "/admin-ui/datasets/${c.dataset.id}#Concepts", - "/admin-ui/datasets/${c.dataset.id}/concepts/${c.concept.id}" - "/admin-ui/datasets/${c.dataset.id}/concepts/${c.concept.id}#Connectors" + "/admin-ui/datasets/${c.dataset}", + "/admin-ui/datasets/${c.dataset}#Concepts", + "/admin-ui/datasets/${c.dataset}/concepts/${c.concept.id}" + "/admin-ui/datasets/${c.dataset}/concepts/${c.concept.id}#Connectors" ] /> <@infoCard.infoCard class="d-inline-flex mt-2" labels=["ID", "Label", "Validity Dates", "Table"] - values=[c.id, c.label, c.validityDates?join(', '), c.table.name] - links={"Table": "/admin-ui/datasets/${c.dataset.id}/tables/${c.table.id}"} + values=[c.id, c.label, c.validityDates?join(', '), c.getResolvedTable().name] + links={"Table": "/admin-ui/datasets/${c.dataset}/tables/${c.getResolvedTable().id}"} /> <@accordion.accordionGroup> <#assign idHeader = "id"> diff --git a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl index 6ece489be3..2f073b17d2 100644 --- a/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl +++ b/backend/src/main/resources/com/bakdata/conquery/resources/admin/ui/table.html.ftl @@ -5,7 +5,7 @@ <#import "templates/table.html.ftl" as table> <#macro deleteTagButton id> - + <#macro columnInfoRender id> @@ -22,11 +22,11 @@ <@layout.layout> <@breadcrumbs.breadcrumbs - labels=["Datasets", c.table.dataset.label, "Tables", c.table.label] + labels=["Datasets", c.table.dataset.resolve().label, "Tables", c.table.label] links=[ "/admin-ui/datasets", - "/admin-ui/datasets/${c.table.dataset.id}", - "/admin-ui/datasets/${c.table.dataset.id}#Tables" + "/admin-ui/datasets/${c.table.dataset}", + "/admin-ui/datasets/${c.table.dataset}#Tables" ] /> <@infoCard.infoCard @@ -41,7 +41,7 @@ <@table.table columns=["id", "name", "numberOfEntries", "actions"] items=c.imports?sort_by("name") - link="/admin-ui/datasets/${c.table.dataset.id}/tables/${c.table.id}/import/" + link="/admin-ui/datasets/${c.table.dataset}/tables/${c.table.id}/import/" deleteButton=deleteTagButton /> @@ -49,7 +49,7 @@ <@table.table columns=["id", "name"] items=c.concepts?sort_by("name") - link="/admin-ui/datasets/${c.table.dataset.id}/concepts/" + link="/admin-ui/datasets/${c.table.dataset}/concepts/" /> <@accordion.accordion summary="Columns" infoText="${c.table.columns?size} entries"> diff --git a/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java b/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java index f233059782..84e797f740 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java +++ b/backend/src/test/java/com/bakdata/conquery/api/StoredQueriesProcessorTest.java @@ -52,7 +52,8 @@ import com.bakdata.conquery.models.query.resultinfo.ResultInfo; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; -import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.bakdata.conquery.util.extensions.MetaStorageExtension; +import com.bakdata.conquery.util.extensions.UserExtension; import com.google.common.collect.ImmutableList; import io.dropwizard.core.setup.Environment; import io.dropwizard.jersey.validation.Validators; @@ -61,16 +62,28 @@ import lombok.SneakyThrows; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; public class StoredQueriesProcessorTest { + public static final ConqueryConfig CONFIG = new ConqueryConfig(); + public static final UriBuilder URI_BUILDER = UriBuilder.fromPath("http://localhost"); + public static final IndexService INDEX_SERVICE = new IndexService(CONFIG.getCsv().createCsvParserSettings(), "empty"); private static final Environment ENVIRONMENT = new Environment("StoredQueriesProcessorTest"); + @RegisterExtension + private static final MetaStorageExtension STORAGE_EXTENTION = new MetaStorageExtension(ENVIRONMENT.metrics()); + public static final MetaStorage STORAGE = STORAGE_EXTENTION.getMetaStorage(); + @RegisterExtension + private static final UserExtension USER_0_EXTENSIONS = new UserExtension(STORAGE, "0"); + @RegisterExtension + private static final UserExtension USER_1_EXTENSIONS = new UserExtension(STORAGE, "1"); + private static final User[] USERS = new User[]{ + USER_0_EXTENSIONS.getUser(), + USER_1_EXTENSIONS.getUser() + }; + private static final Validator VALIDATOR = Validators.newValidator(); - public static final NonPersistentStoreFactory NON_PERSISTENT_STORE_FACTORY = new NonPersistentStoreFactory(); - public static final ConqueryConfig CONFIG = new ConqueryConfig().withStorage(NON_PERSISTENT_STORE_FACTORY); - private static final MetaStorage STORAGE = NON_PERSISTENT_STORE_FACTORY.createMetaStorage(); public static final InternalMapperFactory INTERNAL_MAPPER_FACTORY = new InternalMapperFactory(CONFIG, VALIDATOR); - public static final IndexService INDEX_SERVICE = new IndexService(CONFIG.getCsv().createCsvParserSettings(), "empty"); private static final DatasetRegistry DATASET_REGISTRY = new DatasetRegistry<>( @@ -80,54 +93,58 @@ public class StoredQueriesProcessorTest { new ClusterNamespaceHandler(new ClusterState(), CONFIG, INTERNAL_MAPPER_FACTORY), INDEX_SERVICE ); - private static final QueryProcessor QUERY_PROCESSOR = new QueryProcessor(DATASET_REGISTRY, STORAGE, CONFIG, VALIDATOR); - + private static final QueryProcessor processor = new QueryProcessor(DATASET_REGISTRY, STORAGE, CONFIG, VALIDATOR); + private static final ExcelResultProvider EXCEL_RESULT_PROVIDER = new ExcelResultProvider(); + private static final CsvResultProvider CSV_RESULT_PROVIDER = new CsvResultProvider(); + private static final ArrowResultProvider ARROW_RESULT_PROVIDER = new ArrowResultProvider(); + private static final ParquetResultProvider PARQUET_RESULT_PROVIDER = new ParquetResultProvider(); private static final Dataset DATASET_0 = new Dataset() {{ setName("dataset0"); }}; private static final Dataset DATASET_1 = new Dataset() {{ setName("dataset1"); }}; + private static ManagedExecutionId QUERY_ID_0; + private static ManagedExecutionId QUERY_ID_1; + private static ManagedExecutionId QUERY_ID_2; + private static ManagedExecutionId QUERY_ID_3; + private static ManagedExecutionId QUERY_ID_4; + private static ManagedExecutionId QUERY_ID_5; + private static ManagedExecutionId QUERY_ID_6; + private static ManagedExecutionId QUERY_ID_7; + private static ManagedExecutionId QUERY_ID_8; + private static ManagedExecutionId QUERY_ID_9; + private static ManagedExecutionId QUERY_ID_10; + private static List QUERIES; - private static final ManagedExecutionId QUERY_ID_0 = createExecutionId(DATASET_0, "0"); - private static final ManagedExecutionId QUERY_ID_1 = createExecutionId(DATASET_1, "1"); - private static final ManagedExecutionId QUERY_ID_2 = createExecutionId(DATASET_0, "2"); - private static final ManagedExecutionId QUERY_ID_3 = createExecutionId(DATASET_0, "3"); - private static final ManagedExecutionId QUERY_ID_4 = createExecutionId(DATASET_0, "4"); - private static final ManagedExecutionId QUERY_ID_5 = createExecutionId(DATASET_0, "5"); - private static final ManagedExecutionId QUERY_ID_6 = createExecutionId(DATASET_0, "6"); - private static final ManagedExecutionId QUERY_ID_7 = createExecutionId(DATASET_0, "7"); - private static final ManagedExecutionId QUERY_ID_8 = createExecutionId(DATASET_0, "8"); - private static final ManagedExecutionId QUERY_ID_9 = createExecutionId(DATASET_0, "9"); - private static final ManagedExecutionId QUERY_ID_10 = createExecutionId(DATASET_0, "10"); - public static final UriBuilder URI_BUILDER = UriBuilder.fromPath("http://localhost"); + @BeforeAll + public static void beforeAll() throws IOException { + new AuthorizationController(STORAGE, CONFIG, new Environment(StoredQueriesProcessorTest.class.getSimpleName()), null); - private static final ExcelResultProvider EXCEL_RESULT_PROVIDER = new ExcelResultProvider(); - private static final CsvResultProvider CSV_RESULT_PROVIDER = new CsvResultProvider(); - private static final ArrowResultProvider ARROW_RESULT_PROVIDER = new ArrowResultProvider(); - private static final ParquetResultProvider PARQUET_RESULT_PROVIDER = new ParquetResultProvider(); + DATASET_REGISTRY.createNamespace(DATASET_0, STORAGE, ENVIRONMENT); + DATASET_REGISTRY.createNamespace(DATASET_1, STORAGE, ENVIRONMENT); - private static ManagedExecutionId createExecutionId(Dataset dataset0, String s) { - StringBuilder idBuilder = new StringBuilder("00000000-0000-0000-0000-000000000000"); - idBuilder.replace(idBuilder.length() - s.length(), idBuilder.length(), s); - return new ManagedExecutionId(dataset0.getId(), UUID.fromString(idBuilder.toString())); - } + QUERY_ID_0 = createExecutionId(DATASET_0, "0"); + QUERY_ID_1 = createExecutionId(DATASET_1, "1"); + QUERY_ID_2 = createExecutionId(DATASET_0, "2"); + QUERY_ID_3 = createExecutionId(DATASET_0, "3"); + QUERY_ID_4 = createExecutionId(DATASET_0, "4"); + QUERY_ID_5 = createExecutionId(DATASET_0, "5"); + QUERY_ID_6 = createExecutionId(DATASET_0, "6"); + QUERY_ID_7 = createExecutionId(DATASET_0, "7"); + QUERY_ID_8 = createExecutionId(DATASET_0, "8"); + QUERY_ID_9 = createExecutionId(DATASET_0, "9"); + QUERY_ID_10 = createExecutionId(DATASET_0, "10"); - private static final User[] USERS = new User[]{ - mockUser(0, List.of(QUERY_ID_0, QUERY_ID_1, QUERY_ID_2, QUERY_ID_4, QUERY_ID_7, QUERY_ID_9, QUERY_ID_10)), - mockUser(1, List.of(QUERY_ID_3, QUERY_ID_4)) - }; + User user0 = USER_0_EXTENSIONS.getUser(); + for (ManagedExecutionId id : List.of(QUERY_ID_4, QUERY_ID_7, QUERY_ID_9, QUERY_ID_10)) { - private static List queries; + user0.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR, id)); + } - @BeforeAll - public static void beforeAll() throws IOException { - DATASET_REGISTRY.createNamespace(DATASET_0, STORAGE, ENVIRONMENT); - DATASET_REGISTRY.createNamespace(DATASET_1, STORAGE, ENVIRONMENT); - new AuthorizationController(STORAGE, CONFIG, new Environment(StoredQueriesProcessorTest.class.getSimpleName()), null); - queries = ImmutableList.of( + QUERIES= ImmutableList.of( mockManagedConceptQueryFrontEnd(USERS[0], QUERY_ID_0, NEW, DATASET_0, 100L), // included mockManagedConceptQueryFrontEnd(USERS[0], QUERY_ID_1, NEW, DATASET_1, 100L), // not included: wrong dataset mockManagedForm(USERS[0], QUERY_ID_2, NEW, DATASET_0), // not included: not a ManagedQuery @@ -145,47 +162,6 @@ public static void beforeAll() throws IOException { ); } - - @Test - public void getQueriesFiltered() { - List infos = QUERY_PROCESSOR.getQueriesFiltered(DATASET_0.getId(), URI_BUILDER, USERS[0], queries, true) - .collect(Collectors.toList()); - - assertThat(infos) - .containsExactly( - makeState(QUERY_ID_0, USERS[0], USERS[0], NEW, "CONCEPT_QUERY", null, 100L), - makeState(QUERY_ID_4, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), - makeState(QUERY_ID_7, USERS[1], USERS[0], DONE, "SECONDARY_ID_QUERY", new SecondaryIdDescriptionId(DATASET_0.getId(), "sid"), 100L), - makeState(QUERY_ID_9, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), - makeState(QUERY_ID_10, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 2_000_000L) - - ); - } - - private static User mockUser(int id, List allowedQueryIds) { - final User user = new User("user" + id, null, STORAGE); - - STORAGE.addUser(user); - - for (ManagedExecutionId queryId : allowedQueryIds) { - user.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR,queryId)); - } - - return user; - - } - - private static ManagedForm mockManagedForm(User user, ManagedExecutionId id, ExecutionState execState, final Dataset dataset) { - ManagedInternalForm managedInternalForm = new ManagedInternalForm<>(new ExportForm(), user, dataset, STORAGE, DATASET_REGISTRY) { - { - setCreationTime(LocalDateTime.MIN); - setQueryId(id.getExecution()); - } - }; - setState(execState, managedInternalForm.getId()); - return managedInternalForm; - } - private static void setState(ExecutionState execState, ManagedExecutionId id) { if (execState != NEW) { DistributedExecutionManager.DistributedState state = new DistributedExecutionManager.DistributedState(); @@ -196,6 +172,13 @@ private static void setState(ExecutionState execState, ManagedExecutionId id) { } } + private static ManagedExecutionId createExecutionId(Dataset dataset0, String s) { + StringBuilder idBuilder = new StringBuilder("00000000-0000-0000-0000-000000000000"); + idBuilder.replace(idBuilder.length() - s.length(), idBuilder.length(), s); + + return new ManagedExecutionId(dataset0.getId(), UUID.fromString(idBuilder.toString())); + } + private static ManagedQuery mockManagedConceptQueryFrontEnd(User user, ManagedExecutionId id, ExecutionState execState, Dataset dataset, long resultCount) { return mockManagedQuery( new ConceptQuery( @@ -209,20 +192,19 @@ private static ManagedQuery mockManagedConceptQueryFrontEnd(User user, ManagedEx execState, dataset, resultCount ); } - private static ManagedQuery mockManagedSecondaryIdQueryFrontEnd(User user, ManagedExecutionId id, ExecutionState execState, CQElement root, Dataset dataset){ - final SecondaryIdQuery sid = new SecondaryIdQuery(); - sid.setSecondaryId(new SecondaryIdDescription() {{ - setDataset(dataset); - setName("sid"); - }}); - sid.setRoot(root); - return mockManagedQuery(sid, user, id, execState, dataset, 100L); + private static ManagedForm mockManagedForm(User user, ManagedExecutionId id, ExecutionState execState, final Dataset dataset){ + return new ManagedInternalForm<>(new ExportForm(), user.getId(), dataset.getId(), STORAGE, DATASET_REGISTRY) { + { + setState(execState, id); + setCreationTime(LocalDateTime.MIN); + setQueryId(id.getExecution()); + } + }; } - private static ManagedQuery mockManagedQuery(Query queryDescription, User user, ManagedExecutionId id, ExecutionState execState, final Dataset dataset, final long resultCount) { - ManagedQuery managedQuery = new ManagedQuery(queryDescription, user, dataset, STORAGE, DATASET_REGISTRY) { + ManagedQuery managedQuery = new ManagedQuery(queryDescription, user.getId(), dataset.getId(), STORAGE, DATASET_REGISTRY) { { setCreationTime(LocalDateTime.MIN); setQueryId(id.getExecution()); @@ -241,11 +223,40 @@ public List getResultInfos() { return managedQuery; } + private static ManagedQuery mockManagedSecondaryIdQueryFrontEnd(User user, ManagedExecutionId id, ExecutionState execState, CQElement root, Dataset dataset){ + final SecondaryIdQuery sIdQ = new SecondaryIdQuery(); + SecondaryIdDescription sId = new SecondaryIdDescription() {{ + setDataset(dataset.getId()); + setName("sid"); + }}; + sIdQ.setSecondaryId(sId.getId()); + sIdQ.setRoot(root); + + return mockManagedQuery(sIdQ, user, id, execState, dataset, 100L); + } + + @Test + public void getQueriesFiltered() { + + List infos = processor.getQueriesFiltered(DATASET_0.getId(), URI_BUILDER, USERS[0], QUERIES.stream(), true) + .collect(Collectors.toList()); + + assertThat(infos) + .containsExactly( + makeState(QUERY_ID_0, USERS[0], USERS[0], NEW, "CONCEPT_QUERY", null, 100L), + makeState(QUERY_ID_4, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), + makeState(QUERY_ID_7, USERS[1], USERS[0], DONE, "SECONDARY_ID_QUERY", new SecondaryIdDescriptionId(DATASET_0.getId(), "sid"), 100L), + makeState(QUERY_ID_9, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 100L), + makeState(QUERY_ID_10, USERS[1], USERS[0], DONE, "CONCEPT_QUERY", null, 2_000_000L) + + ); + } + @SneakyThrows private static ExecutionStatus makeState(ManagedExecutionId id, User owner, User callingUser, ExecutionState state, String typeLabel, SecondaryIdDescriptionId secondaryId, Long resultCount) { OverviewExecutionStatus status = new OverviewExecutionStatus(); - final ManagedQuery execMock = new ManagedQuery(null, owner, DATASET_0, STORAGE, DATASET_REGISTRY) { + final ManagedQuery execMock = new ManagedQuery(null, owner.getId(), DATASET_0.getId(), STORAGE, DATASET_REGISTRY) { { setQueryId(id.getExecution()); setLastResultCount(resultCount); @@ -263,13 +274,14 @@ public List getResultInfos() { status.setPristineLabel(true); status.setCreatedAt(LocalDateTime.MIN.atZone(ZoneId.systemDefault())); status.setOwner(owner.getId()); + status.setOwnerName(owner.getLabel()); status.setShared(false); status.setOwn(owner.equals(callingUser)); status.setId(id); status.setStatus(state); status.setQueryType(typeLabel); status.setNumberOfResults(resultCount); - status.setSecondaryId(secondaryId); // This is probably not interesting on the overview (only if there is an filter for the search) + status.setSecondaryId(secondaryId); // This is probably not interesting on the overview (only if there is a filter for the search) if(state.equals(DONE)) { List resultUrls = new ArrayList<>(); resultUrls.addAll(EXCEL_RESULT_PROVIDER.generateResultURLs(execMock, URI_BUILDER.clone(), true)); diff --git a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java index f287060707..d1af7170eb 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java +++ b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java @@ -2,7 +2,8 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.when; import java.net.URL; import java.time.ZoneId; @@ -18,7 +19,6 @@ import com.bakdata.conquery.apiv1.forms.export_form.AbsoluteMode; import com.bakdata.conquery.apiv1.forms.export_form.ExportForm; import com.bakdata.conquery.apiv1.forms.export_form.RelativeMode; -import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.jackson.MutableInjectableValues; @@ -39,11 +39,12 @@ import com.bakdata.conquery.models.forms.frontendconfiguration.FormConfigProcessor; import com.bakdata.conquery.models.forms.frontendconfiguration.FormScanner; import com.bakdata.conquery.models.forms.frontendconfiguration.FormType; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.worker.DatasetRegistry; -import com.bakdata.conquery.models.worker.IdResolveContext; import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; @@ -59,6 +60,7 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.api.TestInstance.Lifecycle; +import org.mockito.Mockito; /** @@ -69,14 +71,11 @@ public class FormConfigTest { private final ConqueryConfig config = new ConqueryConfig(); - - private MetaStorage storage; - - private FormConfigProcessor processor; - private Validator validator = Validators.newValidatorFactory().getValidator(); - + private final Validator validator = Validators.newValidatorFactory().getValidator(); private final Dataset dataset = new Dataset("test"); private final Dataset dataset1 = new Dataset("test1"); + private MetaStorage storage; + private FormConfigProcessor processor; private DatasetId datasetId; private DatasetId datasetId1; private ExportForm form; @@ -90,22 +89,16 @@ public void setupTestClass() throws Exception { datasetId1 = dataset1.getId(); // Mock DatasetRegistry for translation - DatasetRegistry namespacesMock = mock(DatasetRegistry.class); - - doAnswer(invocation -> { - throw new UnsupportedOperationException("Not yet implemented"); - }).when(namespacesMock).getOptional(any()); + DatasetRegistry namespacesMock = Mockito.mock(DatasetRegistry.class); doAnswer(invocation -> { final DatasetId id = invocation.getArgument(0); - Namespace namespaceMock = mock(LocalNamespace.class); + Namespace namespaceMock = Mockito.mock(LocalNamespace.class); if (id.equals(datasetId)) { when(namespaceMock.getDataset()).thenReturn(dataset); - } - else if (id.equals(datasetId1)) { + } else if (id.equals(datasetId1)) { when(namespaceMock.getDataset()).thenReturn(dataset1); - } - else { + } else { throw new IllegalStateException("Unknown dataset id."); } return namespaceMock; @@ -117,20 +110,17 @@ else if (id.equals(datasetId1)) { storage = new NonPersistentStoreFactory().createMetaStorage(); ((MutableInjectableValues) FormConfigProcessor.getMAPPER().getInjectableValues()) - .add(IdResolveContext.class, namespacesMock); + .add(NamespacedStorageProvider.class, namespacesMock); processor = new FormConfigProcessor(validator, storage, namespacesMock); AuthorizationController controller = new AuthorizationController(storage, config, new Environment(this.getClass().getSimpleName()), null); controller.start(); + } @BeforeEach public void setupTest() { - - user = new User("test", "test", storage); - storage.addUser(user); - - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), user, dataset, null, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), dataset.getId(), storage, null); managedQuery.setQueryId(UUID.randomUUID()); form = new ExportForm(); @@ -138,6 +128,11 @@ public void setupTest() { form.setTimeMode(mode); form.setQueryGroupId(managedQuery.getId()); mode.setForm(form); + + + user = new User("test", "test", storage); + user.setMetaStorage(storage); + storage.addUser(user); } @AfterEach @@ -157,7 +152,7 @@ public void addConfigWithoutTranslation() { processor.addConfig(user, dataset, formConfig); - assertThat(storage.getAllFormConfigs()).containsExactly(formConfig.intern(user, dataset.getId())); + assertThat(storage.getAllFormConfigs()).containsExactly(formConfig.intern(user.getId(), dataset.getId())); } @Test @@ -168,6 +163,7 @@ public void deleteConfig() { ObjectMapper mapper = FormConfigProcessor.getMAPPER(); FormConfig formConfig = new FormConfig(form.getClass().getAnnotation(CPSType.class).id(), mapper.valueToTree(form)); formConfig.setDataset(dataset.getId()); + formConfig.setOwner(user.getId()); user.addPermission(formConfig.createPermission(AbilitySets.FORM_CONFIG_CREATOR)); storage.addFormConfig(formConfig); @@ -190,7 +186,7 @@ public void getConfig() { JsonNode values = mapper.valueToTree(form); FormConfig formConfig = new FormConfig(form.getClass().getAnnotation(CPSType.class).id(), values); formConfig.setDataset(dataset.getId()); - formConfig.setOwner(user); + formConfig.setOwner(user.getId()); user.addPermission(formConfig.createPermission(Ability.READ.asSet())); storage.addFormConfig(formConfig); @@ -338,13 +334,16 @@ public void patchConfig() { patchedFormExpected.setLabel("newTestLabel"); patchedFormExpected.setShared(true); patchedFormExpected.setTags(new String[]{"tag1", "tag2"}); - patchedFormExpected.setOwner(user); + patchedFormExpected.setOwner(user.getId()); patchedFormExpected.setValues(new ObjectNode(mapper.getNodeFactory(), Map.of("test-Node", new TextNode("test-text")))); + final String[] fieldsToIgnore = new String[] {FormConfig.Fields.creationTime, "cachedId", "metaStorage", "nsIdResolver"}; final FormConfigId formId = config.getId(); - assertThat(storage.getFormConfig(formId)).usingRecursiveComparison() - .ignoringFields("cachedId", FormConfig.Fields.creationTime) - .isEqualTo(patchedFormExpected); + assertThat(storage.getFormConfig(formId)) + .usingRecursiveComparison() + .usingOverriddenEquals() + .ignoringFields(fieldsToIgnore) + .isEqualTo(patchedFormExpected); assertThat(storage.getGroup(group1.getId()).getPermissions()).contains(FormConfigPermission.onInstance(AbilitySets.SHAREHOLDER, formId)); assertThat(storage.getGroup(group2.getId()).getPermissions()).doesNotContain(FormConfigPermission.onInstance(AbilitySets.SHAREHOLDER, formId)); @@ -363,7 +362,7 @@ public void patchConfig() { patchedFormExpected.setShared(false); assertThat(storage.getFormConfig(formId)).usingRecursiveComparison() - .ignoringFields("cachedId", FormConfig.Fields.creationTime) + .ignoringFields(fieldsToIgnore) .isEqualTo(patchedFormExpected); assertThat(storage.getGroup(group1.getId()).getPermissions()).doesNotContain(FormConfigPermission.onInstance(AbilitySets.SHAREHOLDER, formId)); diff --git a/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java b/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java index 018d1d9aef..d201265ca3 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java +++ b/backend/src/test/java/com/bakdata/conquery/api/form/config/TestForm.java @@ -11,11 +11,11 @@ import com.bakdata.conquery.apiv1.query.QueryDescription; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.QueryResolveContext; import com.bakdata.conquery.models.query.Visitable; import com.bakdata.conquery.models.worker.DatasetRegistry; @@ -25,7 +25,7 @@ public abstract class TestForm extends Form implements InternalForm { @Override - public ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { + public ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry datasetRegistry) { return new ManagedInternalForm<>(this, user, submittedDataset, storage, datasetRegistry); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java b/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java index aa939d6fde..a145aa9810 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/DownloadLinkGeneration.java @@ -2,12 +2,15 @@ import static org.assertj.core.api.Assertions.assertThat; +import java.io.InputStream; import java.net.URI; import java.util.Set; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; import com.bakdata.conquery.apiv1.execution.ResultAsset; +import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.integration.common.IntegrationUtils; +import com.bakdata.conquery.integration.json.ConqueryTestSpec; import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; @@ -32,8 +35,8 @@ public void execute(StandaloneSupport conquery) throws Exception { final User user = new User("testU", "testU", storage); - final String testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(conquery.getDataset(), testJson); + final InputStream testJson = In.resource("/tests/query/SIMPLE_TREECONCEPT_QUERY/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().asStream(); + final QueryTest test = (QueryTest) new JsonIntegrationTest(testJson).getTestSpec(); storage.updateUser(user); @@ -41,8 +44,11 @@ public void execute(StandaloneSupport conquery) throws Exception { ValidatorHelper.failOnError(log, conquery.getValidator().validate(test)); test.importRequiredData(conquery); + // Parse the query in the context of the conquery instance, not the test, to have the IdResolver properly set + Query query = ConqueryTestSpec.parseSubTree(conquery, test.getRawQuery(), Query.class, false); + // Create execution for download - ManagedQuery exec = new ManagedQuery(test.getQuery(), user, conquery.getDataset(), storage, conquery.getDatasetRegistry()); + ManagedQuery exec = new ManagedQuery(query, user.getId(), conquery.getDataset().getId(), storage, conquery.getDatasetRegistry()); exec.setLastResultCount(100L); storage.addExecution(exec); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java index e37943068e..f9ec952a71 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/IntegrationTests.java @@ -7,7 +7,14 @@ import java.io.InputStream; import java.net.URI; import java.nio.file.Files; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -25,6 +32,7 @@ import com.bakdata.conquery.models.config.DatabaseConfig; import com.bakdata.conquery.models.config.Dialect; import com.bakdata.conquery.models.config.SqlConnectorConfig; +import com.bakdata.conquery.models.config.XodusStoreFactory; import com.bakdata.conquery.util.support.ConfigOverride; import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; @@ -43,9 +51,9 @@ public class IntegrationTests { public static final ObjectMapper MAPPER; - private static final ObjectWriter CONFIG_WRITER; public static final String JSON_TEST_PATTERN = ".*\\.test\\.json$"; public static final String SQL_TEST_PATTERN = ".*\\.json$"; + private static final ObjectWriter CONFIG_WRITER; static { @@ -59,15 +67,13 @@ public class IntegrationTests { CONFIG_WRITER = MAPPER.writerFor(ConqueryConfig.class); } - + @Getter + public final ConqueryConfig config = new ConqueryConfig(); private final Map reusedInstances = new HashMap<>(); - private final String defaultTestRoot; private final String defaultTestRootPackage; @Getter private final File workDir; - @Getter - public final ConqueryConfig config = new ConqueryConfig(); @SneakyThrows(IOException.class) public IntegrationTests(String defaultTestRoot, String defaultTestRootPackage) { @@ -77,6 +83,31 @@ public IntegrationTests(String defaultTestRoot, String defaultTestRootPackage) { ConfigOverride.configurePathsAndLogging(this.config, this.workDir); } + private static DynamicContainer toDynamicContainer(ResourceTree currentDir, List list) { + list.sort(Comparator.comparing(DynamicNode::getDisplayName)); + return dynamicContainer( + currentDir.getName(), + URI.create("classpath:/" + currentDir.getFullName() + "/"), + list.stream() + ); + } + + private static DynamicTest wrapError(Resource resource, String name, Exception e) { + return DynamicTest.dynamicTest( + name, + resource.getURI(), + () -> { + throw e; + } + ); + } + + private static ResourceTree scanForResources(String testRoot, String pattern) { + ResourceTree tree = new ResourceTree(null, null); + tree.addAll(CPSTypeIdResolver.SCAN_RESULT.getResourcesMatchingPattern(Pattern.compile("^" + testRoot + pattern))); + return tree; + } + public List jsonTests() { TestDataImporter testImporter = new WorkerTestDataImporter(); final String testRoot = Objects.requireNonNullElse(System.getenv(TestTags.TEST_DIRECTORY_ENVIRONMENT_VARIABLE), defaultTestRoot); @@ -85,6 +116,12 @@ public List jsonTests() { return collectTestTree(tree, testRoot, testImporter, dialect); } + @SneakyThrows + public Stream sqlProgrammaticTests(DatabaseConfig databaseConfig, TestSqlConnectorConfig sqlConfig, TestDataImporter testDataImporter) { + this.config.setSqlConnectorConfig(sqlConfig); + return programmaticTests(testDataImporter, StandaloneSupport.Mode.SQL); + } + @SneakyThrows public Stream programmaticTests(TestDataImporter testImporter, StandaloneSupport.Mode mode) { String regexFilter = System.getenv(TestTags.TEST_PROGRAMMATIC_REGEX_FILTER); @@ -121,13 +158,15 @@ public Stream programmaticTests(TestDataImporter testImporter, Stan .map(programmaticIntegrationTest -> createDynamicProgrammaticTestNode(programmaticIntegrationTest, testImporter)); } - @SneakyThrows - public Stream sqlProgrammaticTests(DatabaseConfig databaseConfig, TestSqlConnectorConfig sqlConfig, TestDataImporter testDataImporter) { - this.config.setSqlConnectorConfig(sqlConfig); - return programmaticTests(testDataImporter, StandaloneSupport.Mode.SQL); + private DynamicTest createDynamicProgrammaticTestNode(ProgrammaticIntegrationTest test, TestDataImporter testImporter) { + return DynamicTest.dynamicTest( + test.getClass().getSimpleName(), + //classpath URI + URI.create("classpath:/" + test.getClass().getName().replace('.', '/') + ".java"), + new IntegrationTest.Wrapper(test.getClass().getSimpleName(), this, test, testImporter) + ); } - @SneakyThrows public List sqlQueryTests(DatabaseConfig databaseConfig, TestSqlConnectorConfig sqlConfig, TestDataImporter testDataImporter) { this.config.setSqlConnectorConfig(sqlConfig); @@ -150,15 +189,6 @@ private List collectTestTree(ResourceTree tree, String testRoot, Te .collect(Collectors.toList()); } - private DynamicTest createDynamicProgrammaticTestNode(ProgrammaticIntegrationTest test, TestDataImporter testImporter) { - return DynamicTest.dynamicTest( - test.getClass().getSimpleName(), - //classpath URI - URI.create("classpath:/" + test.getClass().getName().replace('.', '/') + ".java"), - new IntegrationTest.Wrapper(test.getClass().getSimpleName(), this, test, testImporter) - ); - } - private DynamicNode collectTests(ResourceTree currentDir, TestDataImporter testImporter, Dialect sqlDialect) { if (currentDir.getValue() != null) { Optional dynamicTest = readTest(currentDir.getValue(), currentDir.getName(), testImporter, sqlDialect); @@ -173,15 +203,6 @@ private DynamicNode collectTests(ResourceTree currentDir, TestDataImporter testI return toDynamicContainer(currentDir, list); } - private static DynamicContainer toDynamicContainer(ResourceTree currentDir, List list) { - list.sort(Comparator.comparing(DynamicNode::getDisplayName)); - return dynamicContainer( - currentDir.getName(), - URI.create("classpath:/" + currentDir.getFullName() + "/"), - list.stream() - ); - } - private Optional readTest(Resource resource, String name, TestDataImporter testImporter, Dialect sqlDialect) { try (InputStream in = resource.open()) { JsonIntegrationTest test = new JsonIntegrationTest(in); @@ -198,16 +219,6 @@ private Optional readTest(Resource resource, String name, TestDataI } } - private static DynamicTest wrapError(Resource resource, String name, Exception e) { - return DynamicTest.dynamicTest( - name, - resource.getURI(), - () -> { - throw e; - } - ); - } - private DynamicTest wrapTest(Resource resource, String name, JsonIntegrationTest test, TestDataImporter testImporter) { String testLabel = Optional.ofNullable(test.getTestSpec().getLabel()) // If no label was defined use the filename part before the first dot @@ -236,22 +247,24 @@ public synchronized TestConquery getCachedConqueryInstance(File workDir, Conquer // This should be fast enough and a stable comparison String confString = CONFIG_WRITER.writeValueAsString(conf); if (!reusedInstances.containsKey(confString)) { - // For the overriden config we must override the ports so there are no clashes + + // For the overriden config we must override the ports and storage path (xodus) so there are no clashes // We do it here so the config "hash" is not influenced by the port settings ConfigOverride.configureRandomPorts(conf); + + if (conf.getStorage() instanceof XodusStoreFactory storeFactory) { + ConfigOverride.configureWorkdir(storeFactory, workDir.toPath().resolve(String.valueOf(confString.hashCode()))); + } + log.trace("Creating a new test conquery instance for test {}", conf); TestConquery conquery = new TestConquery(workDir, conf, testDataImporter); reusedInstances.put(confString, conquery); + + // Start the fresh instance conquery.beforeAll(); } TestConquery conquery = reusedInstances.get(confString); return conquery; } - private static ResourceTree scanForResources(String testRoot, String pattern) { - ResourceTree tree = new ResourceTree(null, null); - tree.addAll(CPSTypeIdResolver.SCAN_RESULT.getResourcesMatchingPattern(Pattern.compile("^" + testRoot + pattern))); - return tree; - } - } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java b/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java index 15428973db..5ea49928d5 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/AutoConceptUtil.java @@ -43,11 +43,11 @@ public static TreeConcept createConcept(Table table) { final TreeConcept concept = new TreeConcept(); concept.setName(table.getName() + CONCEPT_NAME_SUFFIX); - // Prepare connnector + // Prepare connector final ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setConcept(concept); connector.setName(CONNECTOR_NAME); - connector.setTable(table); + connector.setTable(table.getId()); // Prepare selects List getAutoSelectsForColumn(Column column) { final String prefix = column.getName() + "_"; // Create basic single column selects - final LastValueSelect last = new LastValueSelect(column, null); + final LastValueSelect last = new LastValueSelect(column.getId(), null); last.setName(prefix + LastValueSelect.class.getAnnotation(CPSType.class).id()); - last.setColumn(column); + last.setColumn(column.getId()); - final FirstValueSelect first = new FirstValueSelect(column, null); + final FirstValueSelect first = new FirstValueSelect(column.getId(), null); first.setName(prefix + FirstValueSelect.class.getAnnotation(CPSType.class).id()); - first.setColumn(column); + first.setColumn(column.getId()); - final DistinctSelect distinct = new DistinctSelect(column, null); + final DistinctSelect distinct = new DistinctSelect(column.getId(), null); distinct.setName(prefix + DistinctSelect.class.getAnnotation(CPSType.class).id()); - distinct.setColumn(column); + distinct.setColumn(column.getId()); return List.of( last, diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java b/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java index 552299152c..eb9f10097d 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/IntegrationUtils.java @@ -63,7 +63,7 @@ public static void importPermissionConstellation(MetaStorage storage, Role[] rol public static Query parseQuery(StandaloneSupport support, JsonNode rawQuery) throws JSONException, IOException { - return ConqueryTestSpec.parseSubTree(support, rawQuery, Query.class); + return ConqueryTestSpec.parseSubTree(support, rawQuery, Query.class, true); } /** diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java b/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java index f99d089b9f..9cd0d2ede6 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/LoadingUtil.java @@ -18,10 +18,12 @@ import java.util.List; import java.util.Map; import java.util.UUID; +import jakarta.ws.rs.client.Client; import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.client.Invocation; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.query.ConceptQuery; @@ -48,6 +50,7 @@ import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.resources.ResourceConstants; import com.bakdata.conquery.resources.admin.rest.AdminDatasetResource; +import com.bakdata.conquery.resources.admin.rest.AdminDatasetsResource; import com.bakdata.conquery.resources.hierarchies.HierarchyHelper; import com.bakdata.conquery.util.io.ConqueryMDC; import com.bakdata.conquery.util.support.StandaloneSupport; @@ -64,7 +67,21 @@ @UtilityClass public class LoadingUtil { - public static void importPreviousQueries(StandaloneSupport support, RequiredData content, User user) throws IOException, JSONException { + public static void importDataset(Client client, UriBuilder adminUriBuilder, Dataset dataset) { + + final URI uri = HierarchyHelper.hierarchicalPath(adminUriBuilder, AdminDatasetsResource.class, "addDataset") + .build(); + + final Invocation.Builder request = client.target(uri).request(MediaType.APPLICATION_JSON_TYPE); + try (final Response response = request.post(Entity.json(dataset))) { + + assertThat(response.getStatusInfo().getFamily()) + .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) + .isEqualTo(Response.Status.Family.SUCCESSFUL); + } + } + + public static void importPreviousQueries(StandaloneSupport support, RequiredData content, User user) throws IOException { // Load previous query results if available int id = 1; for (ResourceFile queryResults : content.getPreviousQueryResults()) { @@ -76,7 +93,7 @@ public static void importPreviousQueries(StandaloneSupport support, RequiredData ConceptQuery query = new ConceptQuery(new CQExternal(Arrays.asList("ID", "DATE_SET"), data, false)); ExecutionManager executionManager = support.getNamespace().getExecutionManager(); - ManagedExecution managed = executionManager.createExecution(query, queryId, user, support.getNamespace(), false); + ManagedExecution managed = executionManager.createExecution(query, queryId, user.getId(), support.getNamespace(), false); user.addPermission(managed.createPermission(AbilitySets.QUERY_CREATOR)); @@ -87,11 +104,13 @@ public static void importPreviousQueries(StandaloneSupport support, RequiredData for (JsonNode queryNode : content.getPreviousQueries()) { - Query query = ConqueryTestSpec.parseSubTree(support, queryNode, Query.class); + Query query = ConqueryTestSpec.parseSubTree(support, queryNode, Query.class, false); + + // Since we don't submit the query but injecting it into the manager we need to set the id resolver UUID queryId = new UUID(0L, id++); ExecutionManager executionManager = support.getNamespace().getExecutionManager(); - ManagedExecution managed = executionManager.createExecution(query, queryId, user, support.getNamespace(), false); + ManagedExecution managed = executionManager.createExecution(query, queryId, user.getId(), support.getNamespace(), false); user.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR, managed.getId())); @@ -109,13 +128,13 @@ public static void importPreviousQueries(StandaloneSupport support, RequiredData public static void importTables(StandaloneSupport support, List tables, boolean autoConcept) throws JSONException { for (RequiredTable rTable : tables) { - final Table table = rTable.toTable(support.getDataset(), support.getNamespace().getStorage().getCentralRegistry()); + final Table table = rTable.toTable(support.getDataset(), support.getNamespace().getStorage()); uploadTable(support, table); if (autoConcept) { final TreeConcept concept = AutoConceptUtil.createConcept(table); - uploadConcept(support, table.getDataset(), concept); + uploadConcept(support, table.getDataset().resolve(), concept); } } } @@ -133,6 +152,25 @@ private static void uploadTable(StandaloneSupport support, Table table) { } } + public static void uploadConcept(StandaloneSupport support, Dataset dataset, Concept concept) { + final URI uri = HierarchyHelper.hierarchicalPath(support.defaultAdminURIBuilder(), AdminDatasetResource.class, "addConcept") + .buildFromMap(Map.of(ResourceConstants.DATASET, dataset.getId().toString())); + + final Invocation.Builder request = support.getClient().target(uri).request(MediaType.APPLICATION_JSON_TYPE); + try (final Response response = request.post(Entity.json(concept))) { + + assertThat(response.getStatusInfo().getFamily()) + .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) + .isEqualTo(Response.Status.Family.SUCCESSFUL); + } + } + + public static void importTableContents(StandaloneSupport support, Collection tables) throws Exception { + List cqpps = generateCqpp(support, tables); + + importCqppFiles(support, cqpps); + } + public static List generateCqpp(StandaloneSupport support, Collection tables) throws Exception { List preprocessedFiles = new ArrayList<>(); List descriptions = new ArrayList<>(); @@ -174,6 +212,16 @@ public static List generateCqpp(StandaloneSupport support, Collection cqppFiles) { + for (File cqpp : cqppFiles) { + uploadCqpp(support, cqpp, false, Response.Status.Family.SUCCESSFUL); + } + + support.waitUntilWorkDone(); + + + } + public static void uploadCqpp(StandaloneSupport support, File cqpp, boolean update, Response.Status.Family expectedResponseFamily) { if(update) { assertThat(cqpp).exists(); @@ -210,22 +258,6 @@ public static void uploadCqpp(StandaloneSupport support, File cqpp, boolean upda } } - public static void importCqppFiles(StandaloneSupport support, List cqppFiles) { - for (File cqpp : cqppFiles) { - uploadCqpp(support, cqpp, false, Response.Status.Family.SUCCESSFUL); - } - - support.waitUntilWorkDone(); - - - } - - public static void importTableContents(StandaloneSupport support, Collection tables) throws Exception { - List cqpps = generateCqpp(support, tables); - - importCqppFiles(support, cqpps); - } - public static void importConcepts(StandaloneSupport support, ArrayNode rawConcepts) throws JSONException, IOException { Dataset dataset = support.getDataset(); @@ -233,7 +265,7 @@ public static void importConcepts(StandaloneSupport support, ArrayNode rawConcep support, rawConcepts, Concept.class, - c -> c.setDataset(support.getDataset()) + c -> c.setDataset(support.getDataset().getDataset()) ); for (Concept concept : concepts) { @@ -241,39 +273,25 @@ public static void importConcepts(StandaloneSupport support, ArrayNode rawConcep } } - public static void uploadConcept(StandaloneSupport support, Dataset dataset, Concept concept) { - final URI uri = HierarchyHelper.hierarchicalPath(support.defaultAdminURIBuilder(), AdminDatasetResource.class, "addConcept") - .buildFromMap(Map.of(ResourceConstants.DATASET, dataset.getId().toString())); + public static void updateConcepts(StandaloneSupport support, ArrayNode rawConcepts, @NonNull Response.Status.Family expectedResponseFamily) + throws IOException { + List> concepts = getConcepts(support, rawConcepts); + for (Concept concept : concepts) { + updateConcept(support, concept, expectedResponseFamily); + } - final Invocation.Builder request = support.getClient().target(uri).request(MediaType.APPLICATION_JSON_TYPE); - try (final Response response = request.post(Entity.json(concept))) { - assertThat(response.getStatusInfo().getFamily()) - .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) - .isEqualTo(Response.Status.Family.SUCCESSFUL); - } } - private static List> getConcepts(StandaloneSupport support, ArrayNode rawConcepts) throws IOException { return ConqueryTestSpec.parseSubTreeList( support, rawConcepts, Concept.class, - c -> c.setDataset(support.getDataset()) + c -> c.setDataset(support.getDataset().getDataset()) ); } - public static void updateConcepts(StandaloneSupport support, ArrayNode rawConcepts, @NonNull Response.Status.Family expectedResponseFamily) - throws IOException { - List> concepts = getConcepts(support, rawConcepts); - for (Concept concept : concepts) { - updateConcept(support, concept, expectedResponseFamily); - } - - - } - private static void updateConcept(@NonNull StandaloneSupport support, @NonNull Concept concept, @NonNull Response.Status.Family expectedResponseFamily) { final URI conceptURI = @@ -300,7 +318,7 @@ public static void importIdMapping(StandaloneSupport support, RequiredData conte } try (InputStream in = content.getIdMapping().stream()) { - support.getDatasetsProcessor().setIdMapping(in, support.getNamespace()); + support.getAdminDatasetsProcessor().setIdMapping(in, support.getNamespace()); } } @@ -309,10 +327,9 @@ public static Map importSecondaryIds(StandaloneS for (RequiredSecondaryId required : secondaryIds) { final SecondaryIdDescription description = - required.toSecondaryId(support.getDataset(), support.getDatasetRegistry().findRegistry(support.getDataset().getId())); + required.toSecondaryId(support.getDataset()); - support.getDatasetsProcessor() - .addSecondaryId(support.getNamespace(), description); + uploadSecondaryId(support, description); out.put(description.getName(), description); } @@ -320,6 +337,27 @@ public static Map importSecondaryIds(StandaloneS return out; } + private static void uploadSecondaryId(@NonNull StandaloneSupport support, @NonNull SecondaryIdDescription secondaryIdDescription) { + final URI + conceptURI = + HierarchyHelper.hierarchicalPath(support.defaultAdminURIBuilder(), AdminDatasetResource.class, "addSecondaryId") + .buildFromMap(Map.of( + ResourceConstants.DATASET, support.getDataset().getId() + )); + + final Invocation.Builder request = support.getClient() + .target(conceptURI) + .request(MediaType.APPLICATION_JSON); + try (final Response response = request + .post(Entity.entity(secondaryIdDescription, MediaType.APPLICATION_JSON_TYPE))) { + + + assertThat(response.getStatusInfo().getFamily()) + .describedAs(new LazyTextDescription(() -> response.readEntity(String.class))) + .isEqualTo(Response.Status.Family.SUCCESSFUL); + } + } + public static void importInternToExternMappers(StandaloneSupport support, List internToExternMappers) { for (InternToExternMapper internToExternMapper : internToExternMappers) { uploadInternalToExternalMappings(support, internToExternMapper); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java index 3d187f970a..4bf413bb23 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredColumn.java @@ -1,17 +1,17 @@ package com.bakdata.conquery.integration.common; import javax.annotation.Nullable; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.events.MajorTypeId; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId; import com.bakdata.conquery.models.preproc.outputs.CopyOutput; import com.bakdata.conquery.models.preproc.outputs.OutputDescription; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import org.assertj.core.util.Strings; @@ -43,7 +43,7 @@ public OutputDescription createOutput() { return out; } - public Column toColumn(Table table, CentralRegistry storage) { + public Column toColumn(Table table, NamespacedStorageProvider idResolver) { Column col = new Column(); col.setName(name); col.setType(type); @@ -51,9 +51,10 @@ public Column toColumn(Table table, CentralRegistry storage) { col.setDescription(description); if (!Strings.isNullOrEmpty(secondaryId)) { - final SecondaryIdDescription description = storage.resolve(new SecondaryIdDescriptionId(table.getDataset().getId(), secondaryId)); + SecondaryIdDescriptionId secondaryIdDescriptionId = new SecondaryIdDescriptionId(table.getDataset(), secondaryId); + final SecondaryIdDescription description = secondaryIdDescriptionId.get(idResolver.getStorage(table.getDataset())); - col.setSecondaryId(description); + col.setSecondaryId(description.getId()); } return col; diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java index 6b2db6d950..7a11ec2aaf 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredSecondaryId.java @@ -2,17 +2,16 @@ import java.io.IOException; import java.util.Objects; +import jakarta.validation.constraints.NotEmpty; import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.SecondaryIdDescription; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.InternToExternMapperId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import jakarta.validation.constraints.NotEmpty; import lombok.Data; @Data @@ -25,14 +24,14 @@ public class RequiredSecondaryId { public final String mapping; - public SecondaryIdDescription toSecondaryId(Dataset dataset, CentralRegistry centralRegistry) { + public SecondaryIdDescription toSecondaryId(Dataset dataset) { final SecondaryIdDescription desc = new SecondaryIdDescription(); desc.setName(getName()); desc.setDescription(getDescription()); desc.setLabel(getLabel()); if (mapping != null) { - desc.setMapping(centralRegistry.resolve(InternToExternMapperId.Parser.INSTANCE.parsePrefixed(dataset.getName(), mapping))); + desc.setMapping(InternToExternMapperId.Parser.INSTANCE.parsePrefixed(dataset.getName(), mapping)); } return desc; diff --git a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java index 3de4cf93bf..87a05756c2 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/common/RequiredTable.java @@ -3,19 +3,19 @@ import java.io.IOException; import java.util.Arrays; import java.util.Objects; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotEmpty; +import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.identifiable.CentralRegistry; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; -import jakarta.validation.Valid; -import jakarta.validation.constraints.NotEmpty; -import jakarta.validation.constraints.NotNull; import lombok.Getter; import lombok.Setter; import net.minidev.json.annotate.JsonIgnore; @@ -24,39 +24,39 @@ @Setter public class RequiredTable { - @NotNull - @NotEmpty - private String name; - @NotNull - private ResourceFile csv; - @NotNull - @Valid - private RequiredColumn primaryColumn; - @NotEmpty - @Valid - private RequiredColumn[] columns; - @JsonIgnore - private String importName; + @NotNull + @NotEmpty + private String name; + @NotNull + private ResourceFile csv; + @NotNull + @Valid + private RequiredColumn primaryColumn; + @NotEmpty + @Valid + private RequiredColumn[] columns; + @JsonIgnore + private String importName; - public Table toTable(Dataset dataset, CentralRegistry centralRegistry) { - Table table = new Table(); - table.setPrimaryColumn(primaryColumn.toColumn(table, centralRegistry)); - table.setDataset(dataset); - table.setName(name); - table.setColumns(Arrays.stream(columns) - .map(col -> col.toColumn(table, centralRegistry)).toArray(Column[]::new)); + @JsonCreator + public static RequiredTable fromFile(String fileResource) throws JsonParseException, JsonMappingException, IOException { + return Jackson.MAPPER.readValue( + Objects.requireNonNull( + IntegrationTest.class.getResourceAsStream(fileResource), + fileResource + " not found" + ), + RequiredTable.class + ); + } - return table; - } + public Table toTable(Dataset dataset, NamespacedStorageProvider idResolver) { + Table table = new Table(); + table.setPrimaryColumn(primaryColumn.toColumn(table, idResolver)); + table.setDataset(dataset.getId()); + table.setName(name); + table.setColumns(Arrays.stream(columns) + .map(col -> col.toColumn(table, idResolver)).toArray(Column[]::new)); - @JsonCreator - public static RequiredTable fromFile(String fileResource) throws JsonParseException, JsonMappingException, IOException { - return Jackson.MAPPER.readValue( - Objects.requireNonNull( - IntegrationTest.class.getResourceAsStream(fileResource), - fileResource + " not found" - ), - RequiredTable.class - ); - } + return table; + } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java b/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java index f886d29a39..0cedb7574c 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/ConqueryTestSpec.java @@ -4,22 +4,21 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Consumer; - import javax.annotation.Nullable; import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.io.cps.CPSBase; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.jackson.View; -import com.bakdata.conquery.models.config.ColumnConfig; +import com.bakdata.conquery.io.storage.FailingProvider; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.Dialect; import com.bakdata.conquery.models.config.IdColumnConfig; -import com.bakdata.conquery.models.exceptions.JSONException; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.ids.IdUtil; +import com.bakdata.conquery.util.FailingMetaStorage; import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestSupport; @@ -37,67 +36,53 @@ @Setter @Getter -@JsonTypeInfo(use = JsonTypeInfo.Id.CUSTOM, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonTypeInfo(use = JsonTypeInfo.Id.CUSTOM, property = "type") @Slf4j @CPSBase public abstract class ConqueryTestSpec { + @Nullable + SqlSpec sqlSpec; private String label; - @Nullable private String description; - @Nullable private ConqueryConfig config; - - @Nullable - SqlSpec sqlSpec; - // default IdColumnConfig for SQL mode private IdColumnConfig idColumns = null; - public ConqueryConfig overrideConfig(ConqueryConfig config) { - - if (getConfig() != null) { - final ConqueryConfig conqueryConfig = getConfig().withStorage(new NonPersistentStoreFactory()); - conqueryConfig.setLoggingFactory(config.getLoggingFactory()); - return conqueryConfig; - } - - final IdColumnConfig idColumnConfig = idColumns != null ? idColumns : config.getIdColumns(); - return config.withIdColumns(idColumnConfig) - .withStorage(new NonPersistentStoreFactory()); - } - - public abstract void executeTest(StandaloneSupport support) throws Exception; - - public abstract void importRequiredData(StandaloneSupport support) throws Exception; - - - @Override - public String toString() { - return label; - } - - public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass) throws IOException, JSONException { - return parseSubTree(support, node, expectedClass, null); + public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass, boolean usePlaceholderResolvers) + throws IOException { + return parseSubTree(support, node, expectedClass, null, usePlaceholderResolvers); } - public static T parseSubTree(TestSupport support, JsonNode node, Class expectedClass, Consumer modifierBeforeValidation) throws IOException { - return parseSubTree(support, node, Jackson.MAPPER.getTypeFactory().constructParametricType(expectedClass, new JavaType[0]), modifierBeforeValidation); + public static T parseSubTree( + TestSupport support, + JsonNode node, + Class expectedClass, + Consumer modifierBeforeValidation, + boolean usePlaceholderResolvers + ) throws IOException { + return parseSubTree(support, node, Jackson.MAPPER.getTypeFactory() + .constructParametricType(expectedClass, new JavaType[0]), modifierBeforeValidation, usePlaceholderResolvers); } - public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType) throws IOException, JSONException { - return parseSubTree(support, node, expectedType, null); - } + public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType, Consumer modifierBeforeValidation, + boolean usePlaceholderResolvers) throws IOException { + final ObjectMapper om = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + final ObjectMapper mapper = om.addHandler(new DatasetPlaceHolderFiller(support)); - public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType, Consumer modifierBeforeValidation) throws IOException { - final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - support.getDataset().injectInto(mapper); - support.getNamespace().injectInto(mapper); - support.getMetaStorage().injectInto(mapper); support.getConfig().injectInto(mapper); - mapper.addHandler(new DatasetPlaceHolderFiller(support)); + support.getDataset().injectInto(mapper); + if (usePlaceholderResolvers) { + FailingProvider.INSTANCE.injectInto(mapper); + FailingMetaStorage.INSTANCE.injectInto(mapper); + } + else { + support.getMetaStorage().injectInto(mapper); + support.getNamespace().getStorage().injectInto(mapper); + } + T result = mapper.readerFor(expectedType).readValue(node); @@ -105,18 +90,27 @@ public static T parseSubTree(TestSupport support, JsonNode node, JavaType ex modifierBeforeValidation.accept(result); } - ValidatorHelper.failOnError(log, support.getValidator().validate(result)); + if (!usePlaceholderResolvers) { + // With placeholders the validation likely fails, so we skip it there + ValidatorHelper.failOnError(log, support.getValidator().validate(result)); + } return result; } + public static T parseSubTree(TestSupport support, JsonNode node, JavaType expectedType, boolean usePlaceholderResolvers) + throws IOException { + return parseSubTree(support, node, expectedType, null, usePlaceholderResolvers); + } + public static List parseSubTreeList(TestSupport support, ArrayNode node, Class expectedType, Consumer modifierBeforeValidation) throws IOException { - final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - support.getDataset().injectInto(mapper); - support.getNamespace().injectInto(mapper); - support.getMetaStorage().injectInto(mapper); - support.getConfig().injectInto(mapper); - mapper.addHandler(new DatasetPlaceHolderFiller(support)); + final ObjectMapper om = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + final ObjectMapper mapper = om.addHandler(new DatasetPlaceHolderFiller(support)); + + // Inject dataset, so that namespaced ids that are not prefixed with in the test-spec are get prefixed + support.getNamespace().getDataset().injectInto(mapper); + FailingProvider.INSTANCE.injectInto(mapper); + FailingMetaStorage.INSTANCE.injectInto(mapper); mapper.setConfig(mapper.getDeserializationConfig().withView(View.Api.class)); @@ -145,11 +139,32 @@ public static List parseSubTreeList(TestSupport support, ArrayNode node, modifierBeforeValidation.accept(value); } result.add(value); - ValidatorHelper.failOnError(log, support.getValidator().validate(value)); } return result; } + public ConqueryConfig overrideConfig(ConqueryConfig config) { + + if (getConfig() != null) { + final ConqueryConfig conqueryConfig = getConfig().withStorage(new NonPersistentStoreFactory()); + conqueryConfig.setLoggingFactory(config.getLoggingFactory()); + return conqueryConfig; + } + + final IdColumnConfig idColumnConfig = idColumns != null ? idColumns : config.getIdColumns(); + return config.withIdColumns(idColumnConfig) + .withStorage(new NonPersistentStoreFactory()); + } + + public abstract void executeTest(StandaloneSupport support) throws Exception; + + public abstract void importRequiredData(StandaloneSupport support) throws Exception; + + @Override + public String toString() { + return label; + } + public boolean isEnabled(Dialect sqlDialect) { return sqlSpec == null || sqlSpec.isEnabled() && sqlSpec.isAllowedTest(sqlDialect); } @@ -163,9 +178,9 @@ private static class DatasetPlaceHolderFiller extends DeserializationProblemHand private final TestSupport support; @Override - public Object handleWeirdStringValue(DeserializationContext ctxt, Class targetType, String valueToConvert, String failureMsg) throws IOException { - IdUtil.Parser parser = IdUtil.>>createParser((Class) targetType); - return parser.parsePrefixed(support.getDataset().getId().toString(), valueToConvert); + public Object handleWeirdStringValue(DeserializationContext ctxt, Class targetType, String valueToConvert, String failureMsg) { + IdUtil.Parser parser = IdUtil.>>createParser((Class) targetType); + return parser.parsePrefixed(support.getDataset().getId().getName(), valueToConvert); } } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java index eb5d2edcbe..46190a6e75 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/FormTest.java @@ -1,41 +1,32 @@ package com.bakdata.conquery.integration.json; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.util.List; import java.util.Locale; import java.util.Map; import java.util.OptionalLong; -import java.util.concurrent.TimeUnit; import jakarta.validation.Valid; import jakarta.validation.constraints.NotEmpty; import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.forms.Form; -import com.bakdata.conquery.integration.common.LoadingUtil; +import com.bakdata.conquery.integration.common.IntegrationUtils; import com.bakdata.conquery.integration.common.RequiredData; import com.bakdata.conquery.integration.common.ResourceFile; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.result.csv.CsvRenderer; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.exceptions.JSONException; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.forms.managed.ManagedInternalForm; +import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.mapping.IdPrinter; -import com.bakdata.conquery.models.query.ExecutionManager; -import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; -import com.bakdata.conquery.models.query.resultinfo.ResultInfo; -import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.io.IdColumnUtil; import com.bakdata.conquery.util.support.StandaloneSupport; import com.fasterxml.jackson.annotation.JsonIgnore; @@ -77,68 +68,31 @@ public class FormTest extends ConqueryTestSpec { @JsonIgnore private Form form; - private static void importConcepts(StandaloneSupport support, ArrayNode rawConcepts) throws JSONException, IOException { - if (rawConcepts == null) { - return; - } - - Dataset dataset = support.getDataset(); - - List> concepts = parseSubTreeList( - support, - rawConcepts, - Concept.class, - c -> c.setDataset(support.getDataset()) - ); - - for (Concept concept : concepts) { - LoadingUtil.uploadConcept(support, dataset, concept); - } - } - @ValidationMethod(message = "Form test defines no concepts. Neither explicit nor automatic concepts") public boolean isWithConcepts() { return rawConcepts != null || content.isAutoConcept(); } - @Override - public void importRequiredData(StandaloneSupport support) throws Exception { - support.getTestImporter().importFormTestData(support, this); - log.info("{} PARSE JSON FORM DESCRIPTION", getLabel()); - form = parseForm(support); - } - - private Form parseForm(StandaloneSupport support) throws JSONException, IOException { - return parseSubTree(support, rawForm, Form.class); - } - @Override public void executeTest(StandaloneSupport support) throws Exception { - Namespace namespace = support.getNamespace(); - - assertThat(support.getValidator().validate(form)) - .describedAs("Form Validation Errors") - .isEmpty(); - ExecutionManager executionManager = support.getNamespace().getExecutionManager(); - ManagedInternalForm managedForm = (ManagedInternalForm) executionManager - .runQuery(namespace, form, support.getTestUser(), support.getConfig(), false); + final ManagedExecutionId managedExecutionId = IntegrationUtils.assertQueryResult(support, form, -1, ExecutionState.DONE, support.getTestUser(), 201); - namespace.getExecutionManager().awaitDone(managedForm, 1, TimeUnit.MINUTES); + log.info("{} QUERIES EXECUTED", getLabel()); - if (managedForm.getState() != ExecutionState.DONE) { - if (managedForm.getState() == ExecutionState.FAILED) { - fail(getLabel() + " Query failed"); - } - else { - fail(getLabel() + " not finished after 10 min"); - } - } + checkResults(support, (ManagedInternalForm) support.getMetaStorage().getExecution(managedExecutionId), support.getTestUser()); + } - log.info("{} QUERIES EXECUTED", getLabel()); + @Override + public void importRequiredData(StandaloneSupport support) throws Exception { + support.getTestImporter().importFormTestData(support, this); + log.info("{} PARSE JSON FORM DESCRIPTION", getLabel()); + form = parseForm(support); + } - checkResults(support, managedForm, support.getTestUser()); + private Form parseForm(StandaloneSupport support) throws IOException { + return parseSubTree(support, rawForm, Form.class, true); } private void checkResults(StandaloneSupport standaloneSupport, ManagedInternalForm managedForm, User user) throws IOException { @@ -161,10 +115,8 @@ private void checkResults(StandaloneSupport standaloneSupport, ManagedInternalFo /** * The form produces only one result, so the result is directly requested. - * - * @see FormTest#checkMultipleResult(Map, ConqueryConfig, PrintSettings) */ - private void checkSingleResult(F managedForm, ConqueryConfig config, PrintSettings printSettings) + private & SingleTableResult> void checkSingleResult(F managedForm, ConqueryConfig config, PrintSettings printSettings) throws IOException { @@ -191,41 +143,4 @@ private void checkSingleResult(F man } - - /** - * Checks result of subqueries instead of form result. - * - * @see FormTest#checkSingleResult(ManagedForm, ConqueryConfig, PrintSettings) - */ - private void checkMultipleResult(Map> managedMapping, ConqueryConfig config, PrintSettings printSettings) throws IOException { - for (Map.Entry> managed : managedMapping.entrySet()) { - List resultInfos = managed.getValue().get(0).getResultInfos(); - log.info("{} CSV TESTING: {}", getLabel(), managed.getKey()); - - ByteArrayOutputStream output = new ByteArrayOutputStream(); - - final CsvWriter writer = config.getCsv().createWriter(output); - - CsvRenderer renderer = new CsvRenderer(writer, printSettings); - - renderer.toCSV( - config.getIdColumns().getIdResultInfos(), - resultInfos, - managed.getValue() - .stream() - .flatMap(managedQuery -> managedQuery.streamResults(OptionalLong.empty())), printSettings - ); - - writer.close(); - output.close(); - - assertThat(In.stream(new ByteArrayInputStream(output.toByteArray())).withUTF8().readLines()) - .as("Checking result " + managed.getKey()) - .containsExactlyInAnyOrderElementsOf( - In.stream(expectedCsv.get(managed.getKey()).stream()) - .withUTF8() - .readLines() - ); - } - } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java b/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java index ba050711b1..06be3476a6 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/WorkerTestDataImporter.java @@ -10,6 +10,7 @@ import com.bakdata.conquery.integration.common.RequiredTable; import com.bakdata.conquery.integration.json.filter.FilterTest; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; +import com.bakdata.conquery.models.identifiable.ids.specific.TableId; import com.bakdata.conquery.util.support.StandaloneSupport; public class WorkerTestDataImporter implements TestDataImporter { @@ -53,12 +54,18 @@ public void importFilterTestData(StandaloneSupport support, FilterTest test) thr importSearchIndexes(support, test.getSearchIndices()); importTables(support, content.getTables(), content.isAutoConcept()); + test.setConnector(ConqueryTestSpec.parseSubTree( - support, - test.getRawConnector(), - ConceptTreeConnector.class, - conn -> conn.setConcept(test.getConcept()) - )); + support, + test.getRawConnector(), + ConceptTreeConnector.class, + conn -> { + conn.setTable(new TableId(support.getDataset().getDataset(), FilterTest.TABLE_NAME)); + conn.setConcept(test.getConcept()); + }, + true + ) + ); test.getConcept().setConnectors(Collections.singletonList((ConceptTreeConnector) test.getConnector())); waitUntilDone(support, () -> LoadingUtil.uploadConcept(support, support.getDataset(), test.getConcept())); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java b/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java index dade5aca56..2c09a96303 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/json/filter/FilterTest.java @@ -6,7 +6,6 @@ import java.time.LocalDate; import java.util.Collections; import java.util.List; - import jakarta.validation.constraints.NotNull; import com.bakdata.conquery.apiv1.frontend.FrontendFilterConfiguration; @@ -28,6 +27,7 @@ import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.exceptions.ConceptConfigurationException; import com.bakdata.conquery.models.exceptions.JSONException; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; @@ -46,6 +46,8 @@ @CPSType(id = "FILTER_TEST", base = ConqueryTestSpec.class) public class FilterTest extends AbstractQueryEngineTest { + public static final String CONCEPT_LABEL = "concept"; + public static final String TABLE_NAME = "table"; private ResourceFile expectedCsv; @NotNull @@ -79,22 +81,23 @@ public class FilterTest extends AbstractQueryEngineTest { @JsonIgnore private Connector connector; + @JsonIgnore private TreeConcept concept; @Override public void importRequiredData(StandaloneSupport support) throws Exception { - ((ObjectNode) rawContent.get("tables")).put("name", "table"); + ((ObjectNode) rawContent.get("tables")).put("name", TABLE_NAME); - content = parseSubTree(support, rawContent, RequiredData.class); + content = parseSubTree(support, rawContent, RequiredData.class, true); concept = new TreeConcept(); - concept.setLabel("concept"); + concept.setLabel(CONCEPT_LABEL); - concept.setDataset(support.getDataset()); + concept.setDataset(new DatasetId(support.getDataset().getId().getName())); rawConnector.put("name", "connector"); - rawConnector.put("table", "table"); + rawConnector.put(TABLE_NAME, TABLE_NAME); ((ObjectNode) rawConnector.get("filters")).put("name", "filter"); @@ -113,18 +116,18 @@ private Query parseQuery(StandaloneSupport support) throws JSONException, IOExce } - FilterValue result = parseSubTree(support, rawFilterValue, Jackson.MAPPER.getTypeFactory().constructType(FilterValue.class)); + FilterValue result = parseSubTree(support, rawFilterValue, Jackson.MAPPER.getTypeFactory().constructType(FilterValue.class), false); CQTable cqTable = new CQTable(); cqTable.setFilters(Collections.singletonList(result)); - cqTable.setConnector(connector); + cqTable.setConnector(connector.getId()); CQConcept cqConcept = new CQConcept(); cqTable.setConcept(cqConcept); - cqConcept.setElements(Collections.singletonList(concept)); + cqConcept.setElements(Collections.singletonList(concept.getId())); cqConcept.setTables(Collections.singletonList(cqTable)); if (dateRange != null) { @@ -134,15 +137,11 @@ private Query parseQuery(StandaloneSupport support) throws JSONException, IOExce return new ConceptQuery(cqConcept); } - @Override - public Query getQuery() { - return query; - } - @Override public void executeTest(StandaloneSupport standaloneSupport) throws IOException { try { - final FrontendFilterConfiguration.Top actual = connector.getFilters().iterator().next().createFrontendConfig(standaloneSupport.getConfig()); + final Connector internalConnector = standaloneSupport.getNamespace().getStorage().getAllConcepts().findFirst().get().getConnectors().get(0); + final FrontendFilterConfiguration.Top actual = internalConnector.getFilters().iterator().next().createFrontendConfig(standaloneSupport.getConfig()); if (expectedFrontendConfig != null) { log.info("Checking actual FrontendConfig: {}", actual); @@ -155,4 +154,9 @@ public void executeTest(StandaloneSupport standaloneSupport) throws IOException super.executeTest(standaloneSupport); } + + @Override + public Query getQuery() { + return query; + } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java index 603b94445f..9076db091a 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ConceptPermissionTest.java @@ -58,14 +58,14 @@ public void execute(StandaloneSupport conquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - // Id of the lone concept that is used in the test. - Concept conceptId = conquery.getNamespace().getStorage().getAllConcepts().iterator().next(); + // The lone concept that is used in the test. + Concept concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next(); IntegrationUtils.assertQueryResult(conquery, query, -1, ExecutionState.FAILED, user, 403); // Add the necessary Permission { - final ConqueryPermission permission = conceptId.createPermission(Ability.READ.asSet()); + final ConqueryPermission permission = concept.createPermission(Ability.READ.asSet()); log.info("Adding the Permission[{}] to User[{}]", permission, user); user.addPermission(permission); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java index 476e6a4fae..9aa3cfacd6 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityExportTest.java @@ -12,6 +12,9 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.AdditionalMediaTypes; import com.bakdata.conquery.apiv1.execution.ResultAsset; @@ -22,11 +25,10 @@ import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.PreviewConfig; -import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.SelectId; import com.bakdata.conquery.models.query.ColumnDescriptor; import com.bakdata.conquery.models.query.preview.EntityPreviewStatus; @@ -40,9 +42,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.assertj.core.description.LazyTextDescription; @@ -123,17 +122,16 @@ public void execute(String name, TestConquery testConquery) throws Exception { final URI entityExport = HierarchyHelper.hierarchicalPath(conquery.defaultApiURIBuilder(), DatasetQueryResource.class, "getEntityData") .buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName())); - // Api uses NsIdRef so we have to use the real objects here. - final List allConnectors = conquery.getNamespaceStorage().getAllConcepts().stream() - .map(Concept::getConnectors) - .flatMap(List::stream) - .collect(Collectors.toList()); - final EntityPreviewStatus result; try (Response allEntityDataResponse = conquery.getClient().target(entityExport) .request(MediaType.APPLICATION_JSON_TYPE) .header("Accept-Language", "en-Us") - .post(Entity.json(new EntityPreviewRequest("ID", "1", dateRange, allConnectors)))) { + .post(Entity.json(new EntityPreviewRequest("ID", "1", dateRange, + List.of( + ConnectorId.Parser.INSTANCE.parse(dataset.getName() + ".tree1.connector"), + ConnectorId.Parser.INSTANCE.parse(dataset.getName() + ".tree2.connector") + ) + )))) { assertThat(allEntityDataResponse.getStatusInfo().getFamily()) .describedAs(new LazyTextDescription(() -> allEntityDataResponse.readEntity(String.class))) @@ -153,7 +151,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { new ColumnDescriptor( "Values", "Values", "Description", "LIST[STRING]", Set.of(new SemanticType.SelectResultT( - conquery.getNamespace().getCentralRegistry().resolve(valuesSelectId) + valuesSelectId )) ) ); @@ -182,7 +180,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { 9, ResultType.Primitive.INTEGER.typeInfo(), null, - Set.of(new SemanticType.SelectResultT(conquery.getDatasetRegistry().resolve(SelectId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.age")))) + Set.of(new SemanticType.SelectResultT(SelectId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.age"))) ), new EntityPreviewStatus.Info( "Values", @@ -190,14 +188,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { new ResultType.ListT(ResultType.Primitive.STRING).typeInfo(), null, Set.of( - new SemanticType.SelectResultT(conquery.getDatasetRegistry().resolve(valuesSelectId)) + new SemanticType.SelectResultT(valuesSelectId) ) ) ); - - assertThat(result.getColumnDescriptions()) .isNotNull() .isNotEmpty(); @@ -210,8 +206,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(t2values.get().getDescription()).isEqualTo("This is a column"); assertThat(t2values.get().getSemantics()) .contains( - new SemanticType.ConceptColumnT(conquery.getDatasetRegistry() - .resolve(ConceptId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree2"))) + new SemanticType.ConceptColumnT(ConceptId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree2")) ); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java index 7009dcf4d3..0b830e1628 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/EntityResolveTest.java @@ -8,6 +8,9 @@ import java.util.List; import java.util.Map; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue; import com.bakdata.conquery.integration.common.LoadingUtil; @@ -24,9 +27,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.assertj.core.description.LazyTextDescription; @@ -75,9 +75,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName())); // Api uses NsIdRef, so we have to use the real objects here. - final Filter filter = conquery.getDatasetRegistry().resolve( - FilterId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.values-filter") - ); + FilterId filterId = FilterId.Parser.INSTANCE.parsePrefixed(dataset.getName(), "tree1.connector.values-filter"); + Filter filter = filterId.get(conquery.getNamespaceStorage()); final List> result; @@ -87,8 +86,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .post(Entity.json( new FilterValue[]{ // Bit lazy, but this explicitly or's two filters - new FilterValue.CQMultiSelectFilter((Filter>) filter, Set.of("A1")), - new FilterValue.CQMultiSelectFilter((Filter>) filter, Set.of("B2")) + new FilterValue.CQMultiSelectFilter(filter.getId(), Set.of("A1")), + new FilterValue.CQMultiSelectFilter(filter.getId(), Set.of("B2")) } ))) { diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java index ef5723c00a..570ace712d 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ExternalFormBackendTest.java @@ -46,80 +46,83 @@ @Slf4j public class ExternalFormBackendTest implements ProgrammaticIntegrationTest { - public static final String FORM_BACKEND_ID = "mock"; private ClientAndServer formBackend; @Override public void execute(String name, TestConquery testConquery) throws Exception { - - final StandaloneSupport support = testConquery.getSupport(name); - - log.info("Test health"); - assertThat(testConquery.getStandaloneCommand() - .getManagerNode() - .getEnvironment() - .healthChecks() - .runHealthCheck(FORM_BACKEND_ID) - .isHealthy()) - .describedAs("Checking health of form backend").isTrue(); - - log.info("Get external form configs"); - final FormScanner formScanner = testConquery.getStandaloneCommand().getManagerNode().getFormScanner(); - formScanner.execute(Collections.emptyMap(), null); - - final String externalFormId = FormBackendConfig.createSubTypedId("SOME_EXTERNAL_FORM"); - assertThat(FormScanner.FRONTEND_FORM_CONFIGS.keySet()).contains(externalFormId); - - log.info("Get version info"); - final UriBuilder apiUriBuilder = testConquery.getSupport(name).defaultApiURIBuilder(); - final URI frontendConfigURI = HierarchyHelper.hierarchicalPath(apiUriBuilder.clone(), ConfigResource.class, "getFrontendConfig") - .build(); - final FrontendConfiguration - frontendConfiguration = - support.getClient().target(frontendConfigURI).request(MediaType.APPLICATION_JSON_TYPE).get().readEntity(FrontendConfiguration.class); - - assertThat(frontendConfiguration.versions()) - .describedAs("Checking health of form backend") - .contains(new VersionContainer(FORM_BACKEND_ID, "3.2.1-ge966c285", ZonedDateTime.parse("2007-08-31T16:47:00+00:00"))); // example value from OpenAPI Spec - - log.info("Send an external form"); - final User testUser = support.getTestUser(); - final ManagedExecutionId - managedExecutionId = - IntegrationUtils.assertQueryResult(support, String.format("{\"type\": \"%s\", \"testProp\": \"testVal\"}", externalFormId), -1, ExecutionState.DONE, testUser, 201); - - log.info("Request state"); - assert managedExecutionId != null; - final FullExecutionStatus executionStatus = IntegrationUtils.getExecutionStatus(support, managedExecutionId, testUser, 200); + try { + + final StandaloneSupport support = testConquery.getSupport(name); + + log.info("Test health"); + assertThat(testConquery.getStandaloneCommand() + .getManagerNode() + .getEnvironment() + .healthChecks() + .runHealthCheck(FORM_BACKEND_ID) + .isHealthy()) + .describedAs("Checking health of form backend").isTrue(); + + log.info("Get external form configs"); + final FormScanner formScanner = testConquery.getStandaloneCommand().getManagerNode().getFormScanner(); + formScanner.execute(Collections.emptyMap(), null); + + final String externalFormId = FormBackendConfig.createSubTypedId("SOME_EXTERNAL_FORM"); + assertThat(FormScanner.FRONTEND_FORM_CONFIGS.keySet()).contains(externalFormId); + + log.info("Get version info"); + final UriBuilder apiUriBuilder = testConquery.getSupport(name).defaultApiURIBuilder(); + final URI frontendConfigURI = HierarchyHelper.hierarchicalPath(apiUriBuilder.clone(), ConfigResource.class, "getFrontendConfig") + .build(); + final FrontendConfiguration + frontendConfiguration = + support.getClient().target(frontendConfigURI).request(MediaType.APPLICATION_JSON_TYPE).get().readEntity(FrontendConfiguration.class); + + assertThat(frontendConfiguration.versions()) + .describedAs("Checking health of form backend") + .contains(new VersionContainer(FORM_BACKEND_ID, "3.2.1-ge966c285", ZonedDateTime.parse("2007-08-31T16:47:00+00:00"))); // example value from OpenAPI Spec + + log.info("Send an external form"); + final User testUser = support.getTestUser(); + final ManagedExecutionId + managedExecutionId = + IntegrationUtils.assertQueryResult(support, String.format("{\"type\": \"%s\", \"testProp\": \"testVal\"}", externalFormId), -1, ExecutionState.DONE, testUser, 201); + + log.info("Request state"); + assert managedExecutionId != null; + final FullExecutionStatus executionStatus = IntegrationUtils.getExecutionStatus(support, managedExecutionId, testUser, 200); assertThat(executionStatus.getStatus()).isEqualTo(ExecutionState.DONE); - // Generate asset urls and check them in the status - final ManagedExecution storedExecution = testConquery.getSupport(name).getMetaStorage().getExecution(managedExecutionId); - final URI - downloadUrlAsset1 = - ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() - .get(0) - .getAssetId()); - final URI - downloadUrlAsset2 = - ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() - .get(1) - .getAssetId()); + // Generate asset urls and check them in the status + final ManagedExecution storedExecution = testConquery.getSupport(name).getMetaStorage().getExecution(managedExecutionId); + final URI + downloadUrlAsset1 = + ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() + .get(0) + .getAssetId()); + final URI + downloadUrlAsset2 = + ResultExternalResource.getDownloadURL(apiUriBuilder.clone(), (ExternalExecution) storedExecution, executionStatus.getResultUrls() + .get(1) + .getAssetId()); + - assertThat(executionStatus.getResultUrls()).containsExactly(new ResultAsset("Result", downloadUrlAsset1), new ResultAsset("Another Result", downloadUrlAsset2)); + assertThat(executionStatus.getResultUrls()).containsExactly(new ResultAsset("Result", downloadUrlAsset1), new ResultAsset("Another Result", downloadUrlAsset2)); - log.info("Download Result"); - final String - response = - support.getClient().target(executionStatus.getResultUrls().get(0).url()).request(TEXT_PLAIN_TYPE).get(String.class); + log.info("Download Result"); + final String + response = + support.getClient().target(executionStatus.getResultUrls().get(0).url()).request(TEXT_PLAIN_TYPE).get(String.class); - assertThat(response).isEqualTo("Hello"); + assertThat(response).isEqualTo("Hello"); - log.info("Stopping mock form backend server"); - formBackend.stop(); + log.info("Stopping mock form backend server"); + } finally { + formBackend.stop(); + } } @Override diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java index 3846fb4286..468e9e1361 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterAutocompleteTest.java @@ -3,6 +3,7 @@ import static com.bakdata.conquery.resources.ResourceConstants.*; import static org.assertj.core.api.Assertions.assertThat; +import java.io.File; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; @@ -11,6 +12,10 @@ import java.util.Optional; import java.util.OptionalInt; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.client.Invocation; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.FilterTemplate; import com.bakdata.conquery.apiv1.frontend.FrontendValue; @@ -19,6 +24,7 @@ import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.CSVConfig; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; @@ -31,10 +37,6 @@ import com.bakdata.conquery.resources.hierarchies.HierarchyHelper; import com.bakdata.conquery.util.support.StandaloneSupport; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.client.Invocation; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -56,6 +58,12 @@ public Set forModes() { return Set.of(StandaloneSupport.Mode.WORKER, StandaloneSupport.Mode.SQL); } + @Override + public ConqueryConfig overrideConfig(ConqueryConfig conf, File workdir) { + conf.getIndex().setEmptyLabel("emptyDefaultLabel"); + return conf; + } + @Override public void execute(StandaloneSupport conquery) throws Exception { final SelectFilter filter = setupSearch(conquery); @@ -71,7 +79,7 @@ public void execute(StandaloneSupport conquery) throws Exception { Map.of( DATASET, conquery.getDataset().getId(), CONCEPT, concept.getId(), - TABLE, filter.getConnector().getTable().getId(), + TABLE, filter.getConnector().getResolvedTable().getId(), FILTER, filter.getId() ) ); @@ -153,22 +161,30 @@ private static SelectFilter setupSearch(StandaloneSupport conquery) throws Ex final CSVConfig csvConf = conquery.getConfig().getCsv(); NamespaceStorage namespaceStorage = conquery.getNamespace().getStorage(); - final Concept concept = namespaceStorage.getAllConcepts().stream().filter(c -> c.getName().equals("geschlecht_select")).findFirst().orElseThrow(); + final Concept concept = namespaceStorage.getAllConcepts().filter(c -> c.getName().equals("geschlecht_select")).findFirst().orElseThrow(); final Connector connector = concept.getConnectors().iterator().next(); final SelectFilter filter = (SelectFilter) connector.getFilters().iterator().next(); // Copy search csv from resources to tmp folder. - final Path tmpCSv = Files.createTempFile("conquery_search", "csv"); + // TODO this file is not deleted at the end of this test + final Path tmpCsv = Files.createTempFile("conquery_search", "csv"); Files.write( - tmpCSv, + tmpCsv, String.join(csvConf.getLineSeparator(), RAW_LINES).getBytes(), StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.CREATE, StandardOpenOption.WRITE ); - final IndexService indexService = new IndexService(conquery.getConfig().getCsv().createCsvParserSettings(), "emptyDefaultLabel"); + IndexService indexService = conquery.getDatasetRegistry().getIndexService(); + + final FilterTemplate + filterTemplate = + new FilterTemplate(conquery.getDataset().getId(), "test", tmpCsv.toUri(), "id", "{{label}}", "Hello this is {{option}}", 2, true, indexService); + filter.setTemplate(filterTemplate.getId()); - filter.setTemplate(new FilterTemplate(conquery.getDataset(), "test", tmpCSv.toUri(), "id", "{{label}}", "Hello this is {{option}}", 2, true, indexService)); + // We need to persist the modification before we submit the update matching stats request + namespaceStorage.addSearchIndex(filterTemplate); + namespaceStorage.updateConcept(concept); final URI matchingStatsUri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder() , AdminDatasetResource.class, "postprocessNamespace") diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java index 62a292c2f7..f136a67f4a 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/FilterResolutionTest.java @@ -9,7 +9,6 @@ import java.nio.file.StandardOpenOption; import java.util.List; import java.util.Map; - import jakarta.ws.rs.client.Entity; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; @@ -19,6 +18,7 @@ import com.bakdata.conquery.integration.IntegrationTest; import com.bakdata.conquery.integration.json.ConqueryTestSpec; import com.bakdata.conquery.integration.json.JsonIntegrationTest; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.CSVConfig; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; @@ -66,8 +66,10 @@ public void execute(StandaloneSupport conquery) throws Exception { conquery.waitUntilWorkDone(); + // Prepare the concept by injecting a filter template - final Concept concept = conquery.getNamespace().getStorage().getAllConcepts().iterator().next(); + NamespaceStorage namespaceStorage = conquery.getNamespace().getStorage(); + final Concept concept = namespaceStorage.getAllConcepts().iterator().next(); final Connector connector = concept.getConnectors().iterator().next(); final SelectFilter filter = (SelectFilter) connector.getFilters().iterator().next(); @@ -79,7 +81,14 @@ public void execute(StandaloneSupport conquery) throws Exception { final IndexService indexService = new IndexService(conquery.getConfig().getCsv().createCsvParserSettings(), "emptyDefaultLabel"); - filter.setTemplate(new FilterTemplate(conquery.getDataset(), "test", tmpCSv.toUri(), "HEADER", "", "", 2, true, indexService)); + final FilterTemplate + filterTemplate = + new FilterTemplate(conquery.getDataset().getId(), "test", tmpCSv.toUri(), "HEADER", "", "", 2, true, indexService); + filter.setTemplate(filterTemplate.getId()); + + // We need to persist the modification before we submit the update matching stats request + namespaceStorage.addSearchIndex(filterTemplate); + namespaceStorage.updateConcept(concept); final URI matchingStatsUri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder() , AdminDatasetResource.class, "postprocessNamespace") @@ -101,7 +110,7 @@ public void execute(StandaloneSupport conquery) throws Exception { Map.of( DATASET, conquery.getDataset().getId(), CONCEPT, concept.getId(), - TABLE, filter.getConnector().getTable().getId(), + TABLE, filter.getConnector().getResolvedTable().getId(), FILTER, filter.getId() ) ); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java index 3291d3ac4f..8adcd61eae 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ImportUpdateTest.java @@ -5,6 +5,7 @@ import java.io.File; import java.util.List; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.query.Query; @@ -30,7 +31,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; @@ -77,9 +77,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(cqpps.size()).isEqualTo(tables.size()); LoadingUtil.importCqppFiles(conquery, List.of(cqpps.get(0))); - conquery.waitUntilWorkDone(); - } final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); @@ -92,8 +90,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(imp -> imp.getId().equals(importId1)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)) - .isNotEmpty(); + assertThat(namespace.getStorage().getImport(importId1)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -105,7 +103,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())) + .filteredOn(block -> block.getBucket().getDataset().equals(dataset.getId())) .isNotEmpty(); assertThat(workerStorage.getAllBuckets()) @@ -187,8 +185,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(imp -> imp.getId().equals(importId1)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId1)) - .isNotEmpty(); + assertThat(namespace.getStorage().getImport(importId1)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -200,7 +198,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())) + .filteredOn(block -> block.getBucket().getDataset().equals(dataset.getId())) .isNotEmpty(); assertThat(workerStorage.getAllBuckets()) @@ -216,8 +214,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(namespace.getNumberOfEntities()).isEqualTo(9); // Issue a query and assert that it has more content. IntegrationUtils.assertQueryResult(conquery, query, 4L, ExecutionState.DONE, conquery.getTestUser(), 201); - - } } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java index fbaa0d43ef..c6573cb181 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/MetadataCollectionTest.java @@ -8,6 +8,7 @@ import com.bakdata.conquery.integration.json.ConqueryTestSpec; import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.models.common.daterange.CDateRange; +import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; @@ -34,7 +35,8 @@ public void execute(StandaloneSupport conquery) throws Exception { //ensure the metadata is collected DistributedNamespace namespace = (DistributedNamespace) conquery.getNamespace(); - namespace.getWorkerHandler().sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts())); + namespace.getWorkerHandler() + .sendToAll(new UpdateMatchingStatsMessage(conquery.getNamespace().getStorage().getAllConcepts().map(Concept::getId).toList())); conquery.waitUntilWorkDone(); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java index 9960d8cc94..0fab5f2626 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/RestartTest.java @@ -47,7 +47,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { String testJson = In.resource("/tests/query/RESTART_TEST_DATA/SIMPLE_FRONTEND_Query.json").withUTF8().readAll(); Validator validator = Validators.newValidator(); - EntityIdMap entityIdMap = IdMapSerialisationTest.createTestPersistentMap(); ManagerNode manager = testConquery.getStandaloneCommand().getManagerNode(); AdminDatasetProcessor adminDatasetProcessor = manager.getAdmin().getAdminDatasetProcessor(); @@ -64,11 +63,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { test.executeTest(conquery); - final int numberOfExecutions = conquery.getMetaStorage().getAllExecutions().size(); + final long numberOfExecutions = conquery.getMetaStorage().getAllExecutions().count(); assertThat(numberOfExecutions).isEqualTo(1); // IDMapping Testing NamespaceStorage namespaceStorage = conquery.getNamespaceStorage(); + EntityIdMap entityIdMap = IdMapSerialisationTest.createTestPersistentMap(namespaceStorage); namespaceStorage.updateIdMapping(entityIdMap); @@ -81,8 +81,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Dataset dataset6 = adminDatasetProcessor.addDataset(TEST_DATASET_6); - - MetaStorage storage = conquery.getMetaStorage(); Role role = new Role("role", "ROLE", storage); @@ -147,9 +145,9 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Restart complete"); - DatasetRegistry datasetRegistry = support.getDatasetsProcessor().getDatasetRegistry(); + DatasetRegistry datasetRegistry = support.getDatasetRegistry(); - assertThat(support.getMetaStorage().getAllExecutions().size()).as("Executions after restart").isEqualTo(numberOfExecutions); + assertThat(support.getMetaStorage().getAllExecutions().count()).as("Executions after restart").isEqualTo(numberOfExecutions); List allQueries = IntegrationUtils.getAllQueries(support, 200); assertThat(allQueries).size().isEqualTo(1); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java index bef52ead96..65e8365625 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/ReusedQueryTest.java @@ -7,6 +7,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; import com.bakdata.conquery.apiv1.query.ConceptQuery; @@ -21,17 +23,16 @@ import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.json.TestDataImporter; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.permissions.Ability; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.Connector; -import com.bakdata.conquery.models.datasets.concepts.filters.Filter; import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.execution.ManagedExecution; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.FilterId; @@ -44,8 +45,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; import lombok.extern.slf4j.Slf4j; @@ -139,15 +138,16 @@ public void execute(String name, TestConquery testConquery) throws Exception { // We select only a single event of the query by the exact filtering. final CQConcept cqConcept = new CQConcept(); final ConceptId conceptId = new ConceptId(conquery.getDataset().getId(), "concept"); - final Concept concept = conquery.getNamespaceStorage().getConcept(conceptId); - cqConcept.setElements(List.of(concept)); + final NamespaceStorage namespaceStorage = conquery.getNamespaceStorage(); + final Concept concept = namespaceStorage.getConcept(conceptId); + cqConcept.setElements(List.of(concept.getId())); final CQTable cqTable = new CQTable(); cqTable.setConcept(cqConcept); - final CentralRegistry centralRegistry = conquery.getNamespaceStorage().getCentralRegistry(); - final Connector connector = centralRegistry.resolve(new ConnectorId(conceptId, "connector1")); - cqTable.setConnector(connector); - cqTable.setFilters(List.of(new FilterValue.CQRealRangeFilter((Filter>) centralRegistry.resolve(new FilterId(connector.getId(), "filter")), new Range<>(BigDecimal.valueOf(1.01d), BigDecimal.valueOf(1.01d))))); + ConnectorId connector1 = new ConnectorId(conceptId, "connector1"); + final Connector connector = connector1.get(namespaceStorage); + cqTable.setConnector(connector.getId()); + cqTable.setFilters(List.of(new FilterValue.CQRealRangeFilter(new FilterId(connector.getId(), "filter"), new Range<>(BigDecimal.valueOf(1.01d), BigDecimal.valueOf(1.01d))))); cqConcept.setTables(List.of(cqTable)); cqConcept.setExcludeFromSecondaryId(false); @@ -195,9 +195,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { reusedDiffId.setRoot(new CQReusedQuery(execution1.getId())); // ignored is a single global value and therefore the same as by-PID - reusedDiffId.setSecondaryId(conquery.getNamespace() - .getStorage() - .getSecondaryId(new SecondaryIdDescriptionId(conquery.getDataset().getId(), "ignored"))); + reusedDiffId.setSecondaryId(new SecondaryIdDescriptionId(conquery.getDataset().getId(), "ignored")); final ManagedExecutionId executionId = @@ -217,7 +215,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { reused.setSecondaryId(query.getSecondaryId()); User shareHolder = new User("shareholder", "ShareHolder", conquery.getMetaStorage()); - conquery.getMetaProcessor().addUser(shareHolder); + conquery.getAdminProcessor().addUser(shareHolder); shareHolder.addPermissions(Set.of( dataset.createPermission(Set.of(Ability.READ)), diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java index fa395475f7..0b8e24398b 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/SecondaryIdEndpointTest.java @@ -5,6 +5,9 @@ import java.net.URI; import java.util.Map; import java.util.Set; +import jakarta.ws.rs.client.Entity; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.frontend.FrontendSecondaryId; import com.bakdata.conquery.integration.IntegrationTest; @@ -23,9 +26,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; -import jakarta.ws.rs.client.Entity; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -54,7 +54,7 @@ public void execute(StandaloneSupport conquery) throws Exception { final Set secondaryIds = fetchSecondaryIdDescriptions(conquery); log.info("{}", secondaryIds); - description.setDataset(conquery.getDataset()); + description.setDataset(conquery.getDataset().getId()); assertThat(secondaryIds) .extracting(FrontendSecondaryId::getId) .containsExactly(description.getId().toString()); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java index 0b1e1ee887..236f64ed93 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ConceptUpdateAndDeletionTest.java @@ -4,6 +4,7 @@ import static org.assertj.core.api.Assertions.assertThat; import java.util.Objects; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.commands.ShardNode; @@ -23,7 +24,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; /** @@ -48,10 +48,9 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ConceptId conceptId = ConceptId.Parser.INSTANCE.parse(dataset.getName(), "test_tree"); - final Concept concept; - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); - final QueryTest test2 = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson2); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test2 = JsonIntegrationTest.readJson(dataset, testJson2); // Manually import data, so we can do our own work. { @@ -84,8 +83,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getConcept(conceptId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -94,12 +93,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { } final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(workerStorage.getConcept(conceptId)) + .isNotNull(); assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)) + .filteredOn(cBlock -> cBlock.getConnector().getConcept().equals(conceptId)) .isNotEmpty(); } } @@ -114,13 +113,9 @@ public void execute(String name, TestConquery testConquery) throws Exception { // To perform the update, the old concept will be deleted first and the new concept will be added. That means the deletion of concept is also covered here { log.info("Executing update"); - LoadingUtil.updateConcepts(conquery, test2.getRawConcepts(), Response.Status.Family.SUCCESSFUL); conquery.waitUntilWorkDone(); - log.info("Update executed"); - - } @@ -133,8 +128,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getConcept(conceptId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -144,12 +139,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(workerStorage.getConcept(conceptId)) + .isNotNull(); assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)) + .filteredOn(cBlock -> cBlock.getConnector().getConcept().equals(conceptId)) .isNotEmpty(); } } @@ -182,8 +177,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isNotEmpty(); - assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(conquery.getNamespace().getStorage().getConcept(conceptId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -193,12 +188,12 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getCentralRegistry().getOptional(conceptId)) - .isNotEmpty(); + assertThat(workerStorage.getConcept(conceptId)) + .isNotNull(); assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId)) + .filteredOn(cBlock -> cBlock.getConnector().getConcept().equals(conceptId)) .isNotEmpty(); } } @@ -214,8 +209,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { // Delete the Concept. { log.info("Issuing deletion of import {}", conceptId); - concept = Objects.requireNonNull(conquery.getNamespace().getStorage().getConcept(conceptId)); - conquery.getDatasetsProcessor().deleteConcept(concept); + Concept concept = Objects.requireNonNull(conquery.getNamespace().getStorage().getConcept(conceptId)); + conquery.getAdminDatasetsProcessor().deleteConcept(conceptId); conquery.waitUntilWorkDone(); } @@ -229,8 +224,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isEmpty(); - assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)) - .isEmpty(); + assertThat(conquery.getNamespace().getStorage().getConcept(conceptId)) + .isNull(); assertThat( conquery.getShardNodes().stream() @@ -242,8 +237,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { .noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null) // CBlocks of Concept are deleted on Workers .noneMatch(workerStorage -> workerStorage.getAllCBlocks() - .stream() - .anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId))); + .anyMatch(cBlock -> cBlock.getConnector().getConcept().equals(conceptId))); log.info("Executing query after deletion (EXPECTING AN EXCEPTION IN THE LOGS!)"); @@ -271,8 +265,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(con -> con.getId().equals(conceptId)) .isEmpty(); - assertThat(conquery.getNamespace().getStorage().getCentralRegistry().getOptional(conceptId)) - .isEmpty(); + assertThat(conquery.getNamespace().getStorage().getConcept(conceptId)) + .isNull(); assertThat( conquery.getShardNodes().stream() @@ -284,8 +278,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { .noneMatch(workerStorage -> workerStorage.getConcept(conceptId) != null) // CBlocks of Concept are deleted on Workers .noneMatch(workerStorage -> workerStorage.getAllCBlocks() - .stream() - .anyMatch(cBlock -> cBlock.getConnector().getConcept().getId().equals(conceptId))); + .anyMatch(cBlock -> cBlock.getConnector().getConcept().equals(conceptId))); log.info("Executing query after restart (EXPECTING AN EXCEPTION IN THE LOGS!)"); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java index e5ba64967a..7c2062e340 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java @@ -4,6 +4,8 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; +import jakarta.ws.rs.WebApplicationException; + import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.integration.common.IntegrationUtils; @@ -12,7 +14,6 @@ import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.exceptions.ValidatorHelper; @@ -22,7 +23,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.WebApplicationException; import lombok.extern.slf4j.Slf4j; /** @@ -36,11 +36,10 @@ public class DatasetDeletionTest implements ProgrammaticIntegrationTest { public void execute(String name, TestConquery testConquery) throws Exception { final StandaloneSupport conquery = testConquery.getSupport(name); - final MetaStorage storage = conquery.getMetaStorage(); final Dataset dataset = conquery.getDataset(); Namespace namespace = conquery.getNamespace(); final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); // Manually import data, so we can do our own work. final RequiredData content = test.getContent(); @@ -62,15 +61,15 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - final int nImports = namespace.getStorage().getAllImports().size(); + final long nImports = namespace.getStorage().getAllImports().count(); log.info("Checking state before deletion"); // Assert state before deletion. { // Must contain the import. - assertThat(namespace.getStorage().getCentralRegistry().getOptional(dataset.getId())) - .isNotEmpty(); + assertThat(namespace.getStorage().getDataset()) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -96,25 +95,25 @@ public void execute(String name, TestConquery testConquery) throws Exception { // Delete Dataset. { - log.info("Issuing deletion of import {}", dataset); + log.info("Issuing deletion of dataset {}", dataset); // Delete the import. // But, we do not allow deletion of tables with associated connectors, so this should throw! - assertThatThrownBy(() -> conquery.getDatasetsProcessor().deleteDataset(dataset)) + assertThatThrownBy(() -> conquery.getAdminDatasetsProcessor().deleteDataset(dataset)) .isInstanceOf(WebApplicationException.class); //TODO use api conquery.getNamespace().getStorage().getTables() - .forEach(tableId -> conquery.getDatasetsProcessor().deleteTable(tableId, true)); + .forEach(tableId -> conquery.getAdminDatasetsProcessor().deleteTable(tableId, true)); conquery.waitUntilWorkDone(); // Finally delete dataset - conquery.getDatasetsProcessor().deleteDataset(dataset); + conquery.getAdminDatasetsProcessor().deleteDataset(dataset); conquery.waitUntilWorkDone(); - assertThat(storage.getCentralRegistry().getOptional(dataset.getId())).isEmpty(); + assertThat(conquery.getDatasetRegistry().get(dataset.getId())).isNull(); } // State after deletion. @@ -122,7 +121,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after deletion"); // We have deleted an import now there should be two less! - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(0); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(0); // The deleted import should not be found. assertThat(namespace.getStorage().getAllImports()) @@ -140,30 +139,26 @@ public void execute(String name, TestConquery testConquery) throws Exception { // No bucket should be found referencing the import. assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", value.getInfo().getId()) - .filteredOn(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId())) + .filteredOn(bucket -> bucket.getTable().getDataset().equals(dataset.getId())) .isEmpty(); // No CBlock associated with import may exist assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getBucket().getTable().getDataset().getId().equals(dataset.getId())) + .filteredOn(cBlock -> cBlock.getBucket().resolve().getTable().getDataset().equals(dataset.getId())) .isEmpty(); } } - // It's not exactly possible to issue a query for a non-existant dataset, so we assert that parsing the fails. - assertThatThrownBy(() -> { - IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - }).isNotNull(); - + // Try to execute the query after deletion IntegrationUtils.assertQueryResult(conquery, query, 0, ExecutionState.FAILED, conquery.getTestUser(), 404); } // Reload the dataset and assert the state. // We have to do some weird trix with StandaloneSupport to open it with another Dataset - final StandaloneSupport conqueryReimport = testConquery.getSupport(namespace.getDataset().getName()); + final StandaloneSupport conqueryReimport = testConquery.getSupport(dataset.getName()); { // only import the deleted import/table LoadingUtil.importTables(conqueryReimport, content.getTables(), content.isAutoConcept()); @@ -178,11 +173,11 @@ public void execute(String name, TestConquery testConquery) throws Exception { LoadingUtil.importConcepts(conqueryReimport, test.getRawConcepts()); conqueryReimport.waitUntilWorkDone(); - assertThat(conqueryReimport.getDatasetsProcessor().getDatasetRegistry().get(conqueryReimport.getDataset().getId())) + assertThat(conqueryReimport.getAdminDatasetsProcessor().getDatasetRegistry().get(conqueryReimport.getDataset().getId())) .describedAs("Dataset after re-import.") .isNotNull(); - assertThat(conqueryReimport.getNamespace().getStorage().getAllImports().size()).isEqualTo(nImports); + assertThat(conqueryReimport.getNamespace().getStorage().getAllImports().count()).isEqualTo(nImports); for (ShardNode node : conqueryReimport.getShardNodes()) { assertThat(node.getWorkers().getWorkers().values()) @@ -209,7 +204,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after re-start"); - assertThat(conqueryRestart.getNamespace().getStorage().getAllImports().size()).isEqualTo(2); + assertThat(conqueryRestart.getNamespace().getStorage().getAllImports().count()).isEqualTo(2); for (ShardNode node : conqueryRestart.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -219,7 +214,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getTable().getDataset().getId().equals(dataset.getId()))) + assertThat(workerStorage.getAllBuckets().filter(bucket -> bucket.getTable().getDataset().equals(dataset.getId()))) .describedAs("Buckets for Worker %s", value.getInfo().getId()) .isNotEmpty(); } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java index 5536f967cf..fa1235d7e7 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/ImportDeletionTest.java @@ -9,6 +9,8 @@ import java.util.List; import java.util.Map; import java.util.zip.GZIPInputStream; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.ConqueryConstants; import com.bakdata.conquery.apiv1.query.Query; @@ -39,8 +41,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.FileUtils; @@ -65,7 +65,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ImportId importId = ImportId.Parser.INSTANCE.parse(dataset.getName(), "test_table2", "test_table2"); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); // Manually import data, so we can do our own work. final RequiredData content = test.getContent(); @@ -88,7 +88,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - final int nImports = namespace.getStorage().getAllImports().size(); + final long nImports = namespace.getStorage().getAllImports().count(); // State before deletion. @@ -100,8 +100,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { .filteredOn(imp -> imp.getId().equals(importId)) .isNotEmpty(); - assertThat(namespace.getStorage().getCentralRegistry().getOptional(importId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getImport(importId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -113,7 +113,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(block -> block.getBucket().getId().getDataset().equals(dataset.getId())) + .filteredOn(block -> block.getBucket().getDataset().equals(dataset.getId())) .isNotEmpty(); assertThat(workerStorage.getAllBuckets()) .filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())) @@ -155,7 +155,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { { log.info("Checking state after deletion"); // We have deleted an import now there should be one less! - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports - 1); // The deleted import should not be found. assertThat(namespace.getStorage().getAllImports()) @@ -173,19 +173,19 @@ public void execute(String name, TestConquery testConquery) throws Exception { // No bucket should be found referencing the import. assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", worker.getInfo().getId()) - .filteredOn(bucket -> bucket.getImp().getId().equals(importId)) + .filteredOn(bucket -> bucket.getImp().equals(importId)) .isEmpty(); // No CBlock associated with import may exist assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", worker.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getBucket().getId().getImp().equals(importId)) + .filteredOn(cBlock -> cBlock.getBucket().getImp().equals(importId)) .isEmpty(); - + // Import should not exists anymore assertThat(workerStorage.getImport(importId)) - .describedAs("Import for Worker %s", worker.getInfo().getId()) - .isNull(); + .describedAs("Import for Worker %s", worker.getInfo().getId()) + .isNull(); } } @@ -239,10 +239,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { conquery.preprocessTmp(conquery.getTmpDir(), List.of(descriptionFile)); //import preprocessedFiles - conquery.getDatasetsProcessor().addImport(conquery.getNamespace(), new GZIPInputStream(new FileInputStream(preprocessedFile))); - conquery.waitUntilWorkDone(); - - + conquery.getAdminDatasetsProcessor().addImport(conquery.getNamespace(), new GZIPInputStream(new FileInputStream(preprocessedFile))); conquery.waitUntilWorkDone(); } @@ -250,7 +247,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { { log.info("Checking state after re-import"); - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports); for (ShardNode node : conquery.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -262,7 +259,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", worker.getInfo().getId()) - .filteredOn(bucket -> bucket.getImp().getId().equals(importId)) + .filteredOn(bucket -> bucket.getImp().equals(importId)) .filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())) .isNotEmpty(); } @@ -285,7 +282,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after re-start"); { - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2); + assertThat(conquery2.getNamespace().getStorage().getAllImports().count()).isEqualTo(2); for (ShardNode node : conquery2.getShardNodes()) { for (Worker worker : node.getWorkers().getWorkers().values()) { @@ -298,7 +295,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", worker.getInfo().getId()) .filteredOn(bucket -> bucket.getId().getDataset().equals(dataset.getId())) - .filteredOn(bucket -> bucket.getImp().getId().equals(importId)) + .filteredOn(bucket -> bucket.getImp().equals(importId)) .isNotEmpty(); } } diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java index 661bc05c31..fc00254905 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/TableDeletionTest.java @@ -6,6 +6,7 @@ import java.net.URI; import java.util.Map; import java.util.stream.Collectors; +import jakarta.ws.rs.core.Response; import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.commands.ShardNode; @@ -15,7 +16,6 @@ import com.bakdata.conquery.integration.json.JsonIntegrationTest; import com.bakdata.conquery.integration.json.QueryTest; import com.bakdata.conquery.integration.tests.ProgrammaticIntegrationTest; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.ModificationShieldedWorkerStorage; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.exceptions.ValidatorHelper; @@ -29,7 +29,6 @@ import com.bakdata.conquery.util.support.StandaloneSupport; import com.bakdata.conquery.util.support.TestConquery; import com.github.powerlibraries.io.In; -import jakarta.ws.rs.core.Response; import lombok.extern.slf4j.Slf4j; /** @@ -43,8 +42,6 @@ public void execute(String name, TestConquery testConquery) throws Exception { final StandaloneSupport conquery = testConquery.getSupport(name); - final MetaStorage storage = conquery.getMetaStorage(); - final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll(); final Dataset dataset = conquery.getDataset(); @@ -52,7 +49,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2"); - final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson); + final QueryTest test = JsonIntegrationTest.readJson(dataset, testJson); // Manually import data, so we can do our own work. final RequiredData content = test.getContent(); @@ -74,15 +71,15 @@ public void execute(String name, TestConquery testConquery) throws Exception { final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery()); - final int nImports = namespace.getStorage().getAllImports().size(); + final long nImports = namespace.getStorage().getAllImports().count(); // State before deletion. { log.info("Checking state before deletion"); // Must contain the import. - assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)) - .isNotEmpty(); + assertThat(namespace.getStorage().getTable(tableId)) + .isNotNull(); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -115,10 +112,10 @@ public void execute(String name, TestConquery testConquery) throws Exception { final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove") - .buildFromMap(Map.of( - ResourceConstants.DATASET, conquery.getDataset().getName(), - ResourceConstants.TABLE, tableId.toString() - )); + .buildFromMap(Map.of( + ResourceConstants.DATASET, conquery.getDataset().getName(), + ResourceConstants.TABLE, tableId.toString() + )); final Response failed = conquery.getClient() .target(deleteTable) @@ -127,7 +124,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR); - conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next()); + conquery.getAdminDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next().getId()); Thread.sleep(100); conquery.waitUntilWorkDone(); @@ -147,7 +144,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { { log.info("Checking state after deletion"); // We have deleted an import now there should be two less! - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports - 1); // The deleted import should not be found. assertThat(namespace.getStorage().getAllImports()) @@ -165,13 +162,13 @@ public void execute(String name, TestConquery testConquery) throws Exception { // No bucket should be found referencing the import. assertThat(workerStorage.getAllBuckets()) .describedAs("Buckets for Worker %s", value.getInfo().getId()) - .filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)) + .filteredOn(bucket -> bucket.getImp().getTable().equals(tableId)) .isEmpty(); // No CBlock associated with import may exist assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)) + .filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().equals(tableId)) .isEmpty(); } } @@ -211,7 +208,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { continue; } - assertThat(value.getStorage().getCentralRegistry().resolve(tableId)) + assertThat(value.getStorage().getTable(tableId)) .describedAs("Table in worker storage.") .isNotNull(); } @@ -221,7 +218,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { // Test state after reimport. { log.info("Checking state after re-import"); - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports); + assertThat(namespace.getStorage().getAllImports().count()).isEqualTo(nImports); for (ShardNode node : conquery.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -231,7 +228,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))) + assertThat(workerStorage.getAllBuckets().filter(bucket -> bucket.getImp().getTable().equals(tableId))) .describedAs("Buckets for Worker %s", value.getInfo().getId()) .isNotEmpty(); } @@ -255,7 +252,8 @@ public void execute(String name, TestConquery testConquery) throws Exception { log.info("Checking state after re-start"); { - assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2); + Namespace namespace2 = conquery2.getNamespace(); + assertThat(namespace2.getStorage().getAllImports().count()).isEqualTo(2); for (ShardNode node : conquery2.getShardNodes()) { for (Worker value : node.getWorkers().getWorkers().values()) { @@ -265,7 +263,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { final ModificationShieldedWorkerStorage workerStorage = value.getStorage(); - assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))) + assertThat(workerStorage.getAllBuckets().filter(bucket -> bucket.getImp().getTable().equals(tableId))) .describedAs("Buckets for Worker %s", value.getInfo().getId()) .isNotEmpty(); } diff --git a/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java b/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java index 64c67acf55..68c899544f 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/AbstractSerializationTest.java @@ -1,20 +1,23 @@ package com.bakdata.conquery.io; +import jakarta.validation.Validator; + import static org.mockito.Mockito.mock; import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.mode.cluster.ClusterNamespaceHandler; import com.bakdata.conquery.mode.cluster.ClusterState; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.index.IndexService; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.DistributedNamespace; import com.bakdata.conquery.models.worker.ShardWorkers; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import io.dropwizard.jersey.validation.Validators; import jakarta.validation.Validator; @@ -27,46 +30,52 @@ public abstract class AbstractSerializationTest { private final Validator validator = Validators.newValidator(); private final ConqueryConfig config = new ConqueryConfig(); private DatasetRegistry datasetRegistry; - private MetaStorage metaStorage; private NamespaceStorage namespaceStorage; - private IndexService indexService; - + private MetaStorage metaStorage; + private WorkerStorageImpl workerStorage; private ObjectMapper managerInternalMapper; private ObjectMapper namespaceInternalMapper; private ObjectMapper shardInternalMapper; private ObjectMapper apiMapper; + @BeforeEach public void before() { final InternalMapperFactory internalMapperFactory = new InternalMapperFactory(config, validator); + final IndexService indexService = new IndexService(config.getCsv().createCsvParserSettings(), "emptyDefaultLabel"); NonPersistentStoreFactory storageFactory = new NonPersistentStoreFactory(); metaStorage = new MetaStorage(storageFactory); namespaceStorage = new NamespaceStorage(storageFactory, ""); - indexService = new IndexService(config.getCsv().createCsvParserSettings(), "emptyDefaultLabel"); + workerStorage = new WorkerStorageImpl(new NonPersistentStoreFactory(), null, "serializationTestWorker"); final ClusterNamespaceHandler clusterNamespaceHandler = new ClusterNamespaceHandler(new ClusterState(), config, internalMapperFactory); datasetRegistry = new DatasetRegistry<>(0, config, internalMapperFactory, clusterNamespaceHandler, indexService); - // Prepare manager node internal mapper + MetricRegistry metricRegistry = new MetricRegistry(); + managerInternalMapper = internalMapperFactory.createManagerPersistenceMapper(datasetRegistry, metaStorage); + metaStorage.openStores(managerInternalMapper, metricRegistry); - metaStorage.openStores(managerInternalMapper); - metaStorage.loadData(); - // Prepare namespace persistence mapper - namespaceInternalMapper = internalMapperFactory.createNamespacePersistenceMapper(datasetRegistry); - namespaceStorage.injectInto(namespaceInternalMapper); - namespaceStorage.openStores(namespaceInternalMapper); - namespaceStorage.loadData(); - namespaceStorage.updateDataset(new Dataset("serialization_test")); + namespaceInternalMapper = internalMapperFactory.createNamespacePersistenceMapper(namespaceStorage); + namespaceStorage.openStores(namespaceInternalMapper, metricRegistry); - // Prepare shard node internal mapper - final ShardWorkers workers = mock(ShardWorkers.class); - shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(workers); + // Prepare worker persistence mapper + workerStorage.openStores(shardInternalMapper, metricRegistry); + ShardWorkers workers = new ShardWorkers( + config.getQueries().getExecutionPool(), + internalMapperFactory, + config.getCluster().getEntityBucketSize(), + config.getQueries().getSecondaryIdSubPlanRetention() + ); + shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(workerStorage); - // Prepare api mapper with a Namespace injected (usually done by PathParamInjector) + // Prepare api response mapper apiMapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); internalMapperFactory.customizeApiObjectMapper(apiMapper, datasetRegistry, metaStorage); + // This overrides the injected datasetRegistry namespaceStorage.injectInto(apiMapper); } + + } diff --git a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java deleted file mode 100644 index cc6cbebfde..0000000000 --- a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/IdRefrenceTest.java +++ /dev/null @@ -1,79 +0,0 @@ -package com.bakdata.conquery.io.jackson.serializer; - -import static org.assertj.core.api.Assertions.assertThat; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; - -import com.bakdata.conquery.io.jackson.Jackson; -import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; -import com.bakdata.conquery.util.NonPersistentStoreFactory; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.databind.ObjectMapper; -import lombok.Getter; -import lombok.RequiredArgsConstructor; -import org.junit.jupiter.api.Test; - -public class IdRefrenceTest { - - @Test - public void testListReferences() throws IOException { - final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - - CentralRegistry registry = new CentralRegistry(); - Dataset dataset = new Dataset(); - dataset.setName("dataset"); - Table table = new Table(); - table.setDataset(dataset); - table.setName("table"); - registry.register(dataset); - registry.register(table); - - final MetaStorage metaStorage = new MetaStorage(new NonPersistentStoreFactory()); - - metaStorage.openStores(null); - - - User user = new User("usermail", "userlabel", metaStorage); - metaStorage.addUser(user); - - String json = mapper.writeValueAsString( - new ListHolder( - Collections.singletonList(table), - Collections.singletonList(user) - ) - ); - - assertThat(json) - .contains("\"user.usermail\"") - .contains("\"dataset.table\""); - - new SingletonNamespaceCollection(registry) - .injectInto(mapper); - metaStorage.injectInto(mapper); - ListHolder holder = mapper - .readerFor(ListHolder.class) - .readValue(json); - - assertThat(holder.getUsers().get(0)).isSameAs(user); - assertThat(holder.getTables().get(0)).isSameAs(table); - } - - /** - * @implNote this needs to be a class, because jackson ignores NsIdRefCollection on records - */ - @Getter - @RequiredArgsConstructor(onConstructor_ = @JsonCreator) - public static class ListHolder { - @NsIdRefCollection - private final List
tables; - @MetaIdRefCollection - private final List users; - } -} diff --git a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java index 11db3ada69..a47a9e82b3 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/io/jackson/serializer/SerializationTestUtil.java @@ -19,9 +19,8 @@ import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.exceptions.JSONException; import com.bakdata.conquery.models.exceptions.ValidatorHelper; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; +import com.bakdata.conquery.models.identifiable.NamespacedStorageProvider; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; @@ -30,7 +29,6 @@ import io.dropwizard.jersey.validation.Validators; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import lombok.Setter; import lombok.experimental.Accessors; import lombok.extern.slf4j.Slf4j; import org.assertj.core.api.RecursiveComparisonAssert; @@ -53,13 +51,12 @@ public class SerializationTestUtil { User.ShiroUserAdapter.class, Validator.class, WeakReference.class, - CompletableFuture.class + CompletableFuture.class, + NamespacedStorageProvider.class }; private final JavaType type; private final Validator validator = Validators.newValidator(); - @Setter - private CentralRegistry registry; private List objectMappers = Collections.emptyList(); @NonNull private Injectable[] injectables = {}; @@ -100,6 +97,10 @@ public SerializationTestUtil customizingAssertion(UnaryOperator activeView = objectMapper.getSerializationConfig().getActiveView(); throw new IllegalStateException("Serdes failed with object mapper using view '" + activeView + "'", e); } } } - public void test(T value) throws JSONException, IOException { - test(value, value); - } - private void test(T value, T expected, ObjectMapper mapper) throws IOException { - if (registry != null) { - mapper = new SingletonNamespaceCollection(registry).injectInto(mapper); - } for (Injectable injectable : injectables) { mapper = injectable.injectInto(mapper); } @@ -157,7 +151,8 @@ private void test(T value, T expected, ObjectMapper mapper) throws IOException { .as("Unequal after copy.") .usingRecursiveComparison() .usingOverriddenEquals() - .ignoringFieldsOfTypes(TYPES_TO_IGNORE); + .ignoringFieldsOfTypes(TYPES_TO_IGNORE) + .ignoringFields("metaStorage", "namespacedStorageProvider"); // Apply assertion customizations ass = assertCustomizer.apply(ass); diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java b/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java index 66ab1a544e..431b259f32 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/ResultTestUtil.java @@ -8,12 +8,16 @@ import java.util.stream.Stream; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.Bucket; +import com.bakdata.conquery.models.identifiable.ids.specific.ColumnId; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.QueryExecutionContext; import com.bakdata.conquery.models.query.entity.Entity; @@ -34,8 +38,7 @@ @UtilityClass public class ResultTestUtil { - public static final User OWNER = new User("user", "User", null); - public static final Dataset DATASET = new Dataset("dataset"); + public static final DatasetId DATASET = new DatasetId("test_dataset"); private static final TreeConcept CONCEPT; static { @@ -54,7 +57,7 @@ public static List getIdFields() { @NotNull public static ManagedQuery getTestQuery() { - return new ManagedQuery(null, OWNER, DATASET, null, null) { + return new ManagedQuery(null, new UserId("test_user"), DATASET, null, null) { @Override public List getResultInfos() { return getResultTypes().stream() @@ -136,7 +139,7 @@ public TypedSelectDummy(ResultType resultType) { @Nullable @Override - public List getRequiredColumns() { + public List getRequiredColumns() { return Collections.emptyList(); } diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java b/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java index fa50e4ac1c..9fb1870c83 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java @@ -1,7 +1,9 @@ package com.bakdata.conquery.io.result.excel; -import static com.bakdata.conquery.io.result.ResultTestUtil.*; +import static com.bakdata.conquery.io.result.ResultTestUtil.getResultTypes; +import static com.bakdata.conquery.io.result.ResultTestUtil.getTestEntityResults; import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -17,10 +19,14 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.result.ResultTestUtil; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.ExcelConfig; +import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.mapping.EntityPrintId; import com.bakdata.conquery.models.query.ManagedQuery; @@ -33,6 +39,7 @@ import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import lombok.extern.slf4j.Slf4j; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.DataFormatter; @@ -45,7 +52,7 @@ @Slf4j public class ExcelResultRenderTest { - public static final ConqueryConfig CONFIG = new ConqueryConfig() {{ + public static final ConqueryConfig CONFIG = new ConqueryConfig(){{ // Suppress java.lang.NoClassDefFoundError: com/bakdata/conquery/io/jackson/serializer/CurrencyUnitDeserializer setStorage(new NonPersistentStoreFactory()); }}; @@ -68,19 +75,10 @@ void writeAndRead() throws IOException { // The Shard nodes send Object[] but since Jackson is used for deserialization, nested collections are always a list because they are not further specialized final List results = getTestEntityResults(); - final ManagedQuery mquery = new ManagedQuery(null, OWNER, DATASET, null, null) { - public List getResultInfos() { - return getResultTypes().stream() - .map(ResultTestUtil.TypedSelectDummy::new) - .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet())) - .collect(Collectors.toList()); - } + MetaStorage metaStorage = new MetaStorage(new NonPersistentStoreFactory()); + metaStorage.openStores(null, new MetricRegistry()); - @Override - public Stream streamResults(OptionalLong maybeLimit) { - return results.stream(); - } - }; + ManagedQuery mquery = getManagedQuery(metaStorage, results); // First we write to the buffer, than we read from it and parse it as TSV final ByteArrayOutputStream output = new ByteArrayOutputStream(); @@ -112,6 +110,26 @@ public Stream streamResults(OptionalLong maybeLimit) { } + private static @NotNull ManagedQuery getManagedQuery(MetaStorage metaStorage, List results) { + User user = new User("test", "test", metaStorage); + user.updateStorage(); + + return new ManagedQuery(mock(Query.class), user.getId(), new Dataset(ExcelResultRenderTest.class.getSimpleName()).getId(), metaStorage, null) { + @Override + public Stream streamResults(OptionalLong maybeLimit) { + return results.stream(); + } + + @Override + public List getResultInfos() { + return getResultTypes().stream() + .map(ResultTestUtil.TypedSelectDummy::new) + .map(select -> new SelectResultInfo(select, new CQConcept(), Collections.emptySet())) + .collect(Collectors.toList()); + } + }; + } + @NotNull private List readComputed(InputStream inputStream, PrintSettings settings) throws IOException { final XSSFWorkbook workbook = new XSSFWorkbook(inputStream); diff --git a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java index c1ad2f258b..baa5b24a3c 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/storage/xodus/stores/SerializingStoreDumpTest.java @@ -18,7 +18,7 @@ import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore.IterationStatistic; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.XodusStoreFactory; -import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.util.NonPersistentStoreFactory; @@ -44,7 +44,7 @@ public class SerializingStoreDumpTest { private ObjectMapper objectMapper; // Test data - private final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), new Dataset("dataset"), STORAGE, null); + private final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), new UserId("test"), new DatasetId("dataset"), STORAGE, null); private final ConceptQuery cQuery = new ConceptQuery( new CQReusedQuery(managedQuery.getId())); private final User user = new User("username", "userlabel", STORAGE); diff --git a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java index cd0bd04dc0..c614f2e4d5 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/SerializationTests.java @@ -1,11 +1,9 @@ package com.bakdata.conquery.models; -import static com.bakdata.conquery.models.types.SerialisationObjectsUtil.*; +import static com.bakdata.conquery.util.SerialisationObjectsUtil.*; import static org.assertj.core.api.Assertions.assertThat; import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; import java.time.LocalDate; import java.time.ZonedDateTime; import java.util.Arrays; @@ -34,7 +32,13 @@ import com.bakdata.conquery.io.AbstractSerializationTest; import com.bakdata.conquery.io.cps.CPSType; import com.bakdata.conquery.io.external.form.FormBackendVersion; +import com.bakdata.conquery.io.jackson.Injectable; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; import com.bakdata.conquery.io.jackson.serializer.SerializationTestUtil; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.NamespacedStorageImpl; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Role; import com.bakdata.conquery.models.auth.entities.User; @@ -70,19 +74,15 @@ import com.bakdata.conquery.models.forms.util.Alignment; import com.bakdata.conquery.models.forms.util.Resolution; import com.bakdata.conquery.models.i18n.I18n; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.IdMapSerialisationTest; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; -import com.bakdata.conquery.models.index.InternToExternMapper; -import com.bakdata.conquery.models.index.MapInternToExternMapper; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.entity.Entity; import com.bakdata.conquery.models.query.results.EntityResult; import com.bakdata.conquery.models.query.results.MultilineEntityResult; -import com.bakdata.conquery.util.SerialisationObjectsUtil; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; @@ -98,6 +98,7 @@ import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import lombok.extern.slf4j.Slf4j; import org.assertj.core.api.RecursiveComparisonAssert; +import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -107,10 +108,36 @@ @Slf4j public class SerializationTests extends AbstractSerializationTest { + public static Stream> rangeData() { + final int SEED = 7; + Random random = new Random(SEED); + return Stream + .generate(() -> { + int first = random.nextInt(); + int second = random.nextInt(); + + if (first < second) { + return Range.of(first, second); + } + return Range.of(second, first); + }) + .filter(Range::isOrdered) + .flatMap(range -> Stream.of( + range, + Range.exactly(range.getMin()), + Range.atMost(range.getMin()), + Range.atLeast(range.getMin()) + )) + .filter(Range::isOrdered) + .limit(100); + } + @Test public void dataset() throws IOException, JSONException { Dataset dataset = new Dataset(); dataset.setName("dataset"); + dataset.setLabel("Dataset"); + dataset.setNamespacedStorageProvider(getDatasetRegistry()); SerializationTestUtil .forType(Dataset.class) @@ -144,18 +171,18 @@ public void role() throws IOException, JSONException { @Test public void user() throws IOException, JSONException { User user = new User("user", "user", getMetaStorage()); + user.setMetaStorage(getMetaStorage()); user.addPermission(DatasetPermission.onInstance(Ability.READ, new DatasetId("test"))); user.addPermission(ExecutionPermission.onInstance(Ability.READ, new ManagedExecutionId(new DatasetId("dataset"), UUID.randomUUID()))); Role role = new Role("company", "company", getMetaStorage()); user.addRole(role); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - registry.register(role); + getMetaStorage().addRole(role); SerializationTestUtil .forType(User.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) + .injectables(getMetaStorage()) .test(user); } @@ -171,9 +198,9 @@ public void group() throws IOException, JSONException { User user = new User("userName", "userLabel", getMetaStorage()); group.addMember(user); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - registry.register(role); - registry.register(user); + final MetaStorage metaStorage = getMetaStorage(); + metaStorage.addRole(role); + metaStorage.addUser(user); SerializationTestUtil .forType(Group.class) @@ -181,7 +208,6 @@ public void group() throws IOException, JSONException { .test(group); } - @Test @Tag("OBJECT_2_INT_MAP") // Bucket uses Object2IntMap public void bucketCompoundDateRange() throws JSONException, IOException { @@ -208,11 +234,11 @@ public void bucketCompoundDateRange() throws JSONException, IOException { compoundCol.setTable(table); table.setColumns(new Column[]{startCol, endCol, compoundCol}); - table.setDataset(dataset); + table.setDataset(dataset.getId()); table.setName("tableName"); - Import imp = new Import(table); + Import imp = new Import(table.getId()); imp.setName("importTest"); @@ -223,39 +249,69 @@ public void bucketCompoundDateRange() throws JSONException, IOException { ColumnStore startStore = new IntegerDateStore(new ShortArrayStore(new short[]{1, 2, 3, 4}, Short.MIN_VALUE)); ColumnStore endStore = new IntegerDateStore(new ShortArrayStore(new short[]{5, 6, 7, 8}, Short.MIN_VALUE)); - Bucket bucket = new Bucket(0, new ColumnStore[]{startStore, endStore, compoundStore}, Object2IntMaps.singleton("0", 0), Object2IntMaps.singleton("0", 4),4, imp); + Bucket bucket = new Bucket(0, Object2IntMaps.singleton("0", 0), Object2IntMaps.singleton("0", 4), 4,imp.getId(), new ColumnStore[]{startStore, endStore, compoundStore}); compoundStore.setParent(bucket); + final WorkerStorageImpl workerStorage = getWorkerStorage(); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - - registry.register(dataset); - registry.register(startCol); - registry.register(endCol); - registry.register(compoundCol); - registry.register(table); - registry.register(imp); - registry.register(bucket); + workerStorage.updateDataset(dataset); + workerStorage.addTable(table); + workerStorage.addImport(imp); + workerStorage.addBucket(bucket); final Validator validator = Validators.newValidator(); SerializationTestUtil .forType(Bucket.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper()) - .registry(registry) - .injectables(values -> values.add(Validator.class, validator)) + .objectMappers(getShardInternalMapper()) + .injectables(new Injectable() { + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(Validator.class, validator); + } + }) .test(bucket); } - @Test public void table() throws JSONException, IOException { - Dataset dataset = new Dataset(); - dataset.setName("datasetName"); + { + // Manager + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + Dataset dataset = createDataset(namespaceStorage); + + Table table = getTable(dataset); + table.setStorage(namespaceStorage); + + table.init(); + SerializationTestUtil + .forType(Table.class) + .objectMappers(getNamespaceInternalMapper(), getApiMapper()) + .test(table); + } + + { + // Shard + final WorkerStorageImpl workerStorage = getWorkerStorage(); + Dataset dataset = createDataset(workerStorage); + + Table table = getTable(dataset); + table.setStorage(workerStorage); + + workerStorage.addTable(table); + + SerializationTestUtil + .forType(Table.class) + .objectMappers(getShardInternalMapper()) + .test(table); + } + } + + private static @NotNull Table getTable(Dataset dataset) { Table table = new Table(); Column column = new Column(); @@ -266,54 +322,55 @@ public void table() throws JSONException, IOException { table.setColumns(new Column[]{column}); - table.setDataset(dataset); + table.setDataset(dataset.getId()); table.setLabel("tableLabel"); table.setName("tableName"); - - - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - - registry.register(dataset); - registry.register(table); - registry.register(column); - - SerializationTestUtil - .forType(Table.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(registry) - .test(table); + return table; } @Test public void treeConcept() throws IOException, JSONException { + { + // Manager + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + final Dataset dataset = createDataset(namespaceStorage); + TreeConcept concept = createConcept(dataset, namespaceStorage); - CentralRegistry registry = getMetaStorage().getCentralRegistry(); - Dataset dataset = createDataset(registry); + SerializationTestUtil + .forType(Concept.class) + .objectMappers(getNamespaceInternalMapper(), getApiMapper()) + .test(concept); + } - TreeConcept concept = createConcept(registry, dataset); - concept.init(); + { + // Shard + final WorkerStorageImpl workerStorage = getWorkerStorage(); + final Dataset dataset = createDataset(workerStorage); + TreeConcept concept = createConcept(dataset, workerStorage); - SerializationTestUtil - .forType(Concept.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(registry) - .test(concept); - } + SerializationTestUtil + .forType(Concept.class) + .objectMappers(getShardInternalMapper()) + .test(concept); + } + } @Test public void persistentIdMap() throws JSONException, IOException { + EntityIdMap persistentMap = IdMapSerialisationTest.createTestPersistentMap(getNamespaceStorage()); + SerializationTestUtil.forType(EntityIdMap.class) - .objectMappers(getManagerInternalMapper()) - .test(IdMapSerialisationTest.createTestPersistentMap()); + .objectMappers(getNamespaceInternalMapper(), getApiMapper()) + .test(persistentMap); } @Test public void formConfig() throws JSONException, IOException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); - final Dataset dataset = createDataset(registry); + final Dataset dataset = createDataset(namespaceStorage); ExportForm form = new ExportForm(); AbsoluteMode mode = new AbsoluteMode(); @@ -328,77 +385,76 @@ public void formConfig() throws JSONException, IOException { SerializationTestUtil .forType(FormConfig.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) .test(formConfig); } @Test public void managedQuery() throws JSONException, IOException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + final MetaStorage metaStorage = getMetaStorage(); final Dataset dataset = new Dataset("test-dataset"); final User user = new User("test-user", "test-user", getMetaStorage()); - registry.register(dataset); - registry.register(user); + namespaceStorage.updateDataset(dataset); - getMetaStorage().updateUser(user); + metaStorage.updateUser(user); - ManagedQuery execution = new ManagedQuery(null, user, dataset, getMetaStorage(), getDatasetRegistry()); + ManagedQuery execution = new ManagedQuery(null, user.getId(), dataset.getId(), getMetaStorage(), getDatasetRegistry()); execution.setTags(new String[]{"test-tag"}); + // Trigger UUID creation + execution.getId(); + SerializationTestUtil.forType(ManagedExecution.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) + .injectables(metaStorage) .test(execution); } @Test public void testExportForm() throws JSONException, IOException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); - - final Dataset dataset = createDataset(registry); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + final Dataset dataset = createDataset(namespaceStorage); - registry.register(dataset); - - final ExportForm exportForm = createExportForm(registry, dataset); + final ExportForm exportForm = createExportForm(dataset, namespaceStorage); SerializationTestUtil.forType(QueryDescription.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) .checkHashCode() .test(exportForm); } @Test public void managedForm() throws JSONException, IOException { + final NamespaceStorage namespaceStorage = getNamespaceStorage(); - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); - - final Dataset dataset = createDataset(registry); + final Dataset dataset = createDataset(namespaceStorage); - final User user = createUser(registry, getMetaStorage()); + final User user = createUser(getMetaStorage()); - final ExportForm exportForm = createExportForm(registry, dataset); + final ExportForm exportForm = createExportForm(dataset, namespaceStorage); - ManagedInternalForm execution = new ManagedInternalForm<>(exportForm, user, dataset, getMetaStorage(), getDatasetRegistry()); + ManagedInternalForm execution = new ManagedInternalForm<>(exportForm, user.getId(), dataset.getId(), getMetaStorage(), getDatasetRegistry()); execution.setTags(new String[]{"test-tag"}); + // Trigger UUID creation + execution.getId(); + SerializationTestUtil.forType(ManagedExecution.class) .objectMappers(getManagerInternalMapper(), getApiMapper()) - .registry(registry) .test(execution); } - @Test public void testExternalExecution() throws IOException, JSONException { - final CentralRegistry centralRegistry = getMetaStorage().getCentralRegistry(); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + final String subType = "test-type"; JsonNodeFactory factory = new JsonNodeFactory(false); @@ -407,53 +463,71 @@ public void testExternalExecution() throws IOException, JSONException { )); ExternalForm form = new ExternalForm(node, subType); - final Dataset dataset = SerialisationObjectsUtil.createDataset(centralRegistry); - final User user = SerialisationObjectsUtil.createUser(centralRegistry, getMetaStorage()); + final Dataset dataset = createDataset(namespaceStorage); + final User user = createUser(getMetaStorage()); + + final ExternalExecution execution = new ExternalExecution(form, user.getId(), dataset.getId(), getMetaStorage(), getDatasetRegistry()); - final ExternalExecution execution = new ExternalExecution(form, user, dataset, getMetaStorage(), getDatasetRegistry()); + // Trigger UUID creation + execution.getId(); SerializationTestUtil.forType(ManagedExecution.class) .objectMappers(getManagerInternalMapper()) - .registry(centralRegistry) .test(execution); } @Test public void cqConcept() throws JSONException, IOException { + { + // Manager + final NamespaceStorage namespaceStorage = getNamespaceStorage(); - final Dataset dataset = new Dataset(); - dataset.setName("dataset"); + final CQConcept cqConcept = createCqConcept(namespaceStorage); + + SerializationTestUtil + .forType(CQConcept.class) + .objectMappers(getManagerInternalMapper(), getApiMapper()) + .test(cqConcept); + } + + { + // Shard + final WorkerStorageImpl workerStorage = getWorkerStorage(); + final CQConcept cqConcept = createCqConcept(workerStorage); + + SerializationTestUtil + .forType(CQConcept.class) + .objectMappers(getShardInternalMapper()) + .test(cqConcept); + } + } + + private static @NotNull CQConcept createCqConcept(NamespacedStorageImpl namespaceStorage) { + Dataset dataset = createDataset(namespaceStorage); final TreeConcept concept = new TreeConcept(); concept.setName("concept"); - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); final ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setConcept(concept); + connector.setName("connector"); concept.setConnectors(List.of(connector)); final CQConcept cqConcept = new CQConcept(); - cqConcept.setElements(List.of(concept)); + cqConcept.setElements(List.of(concept.getId())); cqConcept.setLabel("Label"); final CQTable cqTable = new CQTable(); - cqTable.setConnector(connector); + cqTable.setConnector(connector.getId()); cqTable.setFilters(List.of()); cqTable.setConcept(cqConcept); cqConcept.setTables(List.of(cqTable)); - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); - registry.register(dataset); - registry.register(concept); - registry.register(connector); - - SerializationTestUtil - .forType(CQConcept.class) - .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(registry) - .test(cqConcept); + namespaceStorage.updateConcept(concept); + return cqConcept; } @Test @@ -479,7 +553,6 @@ public void executionCreationResolveError() throws JSONException, IOException { .test(error); } - @Test public void executionQueryJobError() throws JSONException, IOException { log.info("Beware, this test will print an ERROR message."); @@ -493,7 +566,7 @@ public void executionQueryJobError() throws JSONException, IOException { @Test public void meInformation() throws IOException, JSONException { - User user = new User("name", "label", getMetaStorage()); + User user = new User("name", "labe", getMetaStorage()); MeProcessor.FrontendMeInformation info = MeProcessor.FrontendMeInformation.builder() .userName(user.getLabel()) @@ -514,7 +587,7 @@ public void testFormQuery() throws IOException, JSONException { final TreeConcept testConcept = new TreeConcept(); Dataset dataset = new Dataset(); dataset.setName("testDataset"); - testConcept.setDataset(dataset); + testConcept.setDataset(dataset.getId()); testConcept.setName("concept"); final ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setConcept(testConcept); @@ -522,10 +595,12 @@ public void testFormQuery() throws IOException, JSONException { testConcept.setConnectors(List.of(connector)); - concept.setElements(Collections.singletonList(testConcept)); + concept.setElements(Collections.singletonList(testConcept.getId())); CQTable[] tables = {new CQTable()}; - connector.setTable(new Table()); - tables[0].setConnector(connector); + Table table = new Table(); + table.setDataset(dataset.getId()); + connector.setTable(table.getId()); + tables[0].setConnector(connector.getId()); tables[0].setConcept(concept); concept.setTables(Arrays.asList(tables)); ConceptQuery subQuery = new ConceptQuery(concept); @@ -545,28 +620,30 @@ public void testFormQuery() throws IOException, JSONException { ) ); - CentralRegistry centralRegistry = getMetaStorage().getCentralRegistry(); - centralRegistry.register(dataset); - centralRegistry.register(testConcept); - centralRegistry.register(connector); + final NamespaceStorage namespaceStorage = getNamespaceStorage(); + namespaceStorage.updateDataset(dataset); + namespaceStorage.updateConcept(testConcept); + + WorkerStorageImpl workerStorage = getWorkerStorage(); + workerStorage.updateDataset(dataset); + workerStorage.updateConcept(testConcept); SerializationTestUtil .forType(AbsoluteFormQuery.class) .objectMappers(getManagerInternalMapper(), getShardInternalMapper(), getApiMapper()) - .registry(centralRegistry) .test(query); } - @Test - public void serialize() throws IOException, JSONException { - final CentralRegistry registry = getMetaStorage().getCentralRegistry(); + public void cBlock() throws IOException, JSONException { + final WorkerStorageImpl workerStorage = getWorkerStorage(); final Dataset dataset = new Dataset(); + dataset.setNamespacedStorageProvider(workerStorage); dataset.setName("dataset"); final TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); + concept.setDataset(dataset.getId()); concept.setName("concept"); final ConceptTreeConnector connector = new ConceptTreeConnector(); @@ -577,26 +654,25 @@ public void serialize() throws IOException, JSONException { final Table table = new Table(); table.setName("table"); - table.setDataset(dataset); + table.setDataset(dataset.getId()); - final Import imp = new Import(table); + final Import imp = new Import(table.getId()); imp.setName("import"); - final Bucket bucket = new Bucket(0, new ColumnStore[0], Object2IntMaps.emptyMap(), Object2IntMaps.emptyMap(),0, imp); + workerStorage.updateDataset(dataset); + workerStorage.addTable(table); + workerStorage.updateConcept(concept); + workerStorage.addImport(imp); + final Bucket bucket = new Bucket(0, Object2IntMaps.emptyMap(), Object2IntMaps.emptyMap(), 0, imp.getId(), new ColumnStore[0]); + + workerStorage.addBucket(bucket); final CBlock cBlock = CBlock.createCBlock(connector, bucket, 10); - registry.register(dataset) - .register(table) - .register(concept) - .register(connector) - .register(bucket) - .register(imp); SerializationTestUtil.forType(CBlock.class) .objectMappers(getShardInternalMapper()) - .registry(registry) .test(cBlock); } @@ -639,30 +715,6 @@ public void testNonStrictNumbers() throws JSONException, IOException { ); } - public static Stream> rangeData() { - final int SEED = 7; - Random random = new Random(SEED); - return Stream - .generate(() -> { - int first = random.nextInt(); - int second = random.nextInt(); - - if (first < second) { - return Range.of(first, second); - } - return Range.of(second, first); - }) - .filter(Range::isOrdered) - .flatMap(range -> Stream.of( - range, - Range.exactly(range.getMin()), - Range.atMost(range.getMin()), - Range.atLeast(range.getMin()) - )) - .filter(Range::isOrdered) - .limit(100); - } - @ParameterizedTest @MethodSource("rangeData") public void test(Range range) throws IOException, JSONException { @@ -843,25 +895,4 @@ public void formBackendVersion() throws JSONException, IOException { .test(version); } - - @Test - public void mapInternToExternMapper() throws JSONException, IOException, URISyntaxException { - final MapInternToExternMapper mapper = new MapInternToExternMapper( - "test1", - new URI("classpath:/tests/aggregator/FIRST_MAPPED_AGGREGATOR/mapping.csv"), - "internal", - "{{external}}" - ); - - mapper.setStorage(getNamespaceStorage()); - mapper.setConfig(getConfig()); - mapper.setMapIndex(getIndexService()); - - - mapper.init(); - - SerializationTestUtil.forType(InternToExternMapper.class) - .objectMappers(getApiMapper(), getNamespaceInternalMapper()) - .test(mapper); - } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java b/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java index 01b3301d79..be42149d21 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/IdpDelegatingAccessTokenCreatorTest.java @@ -10,7 +10,6 @@ import static org.mockserver.model.ParameterBody.params; import java.util.Map; - import jakarta.validation.Validator; import com.auth0.jwt.JWT; @@ -18,20 +17,23 @@ import com.bakdata.conquery.models.auth.oidc.passwordflow.IdpDelegatingAccessTokenCreator; import com.bakdata.conquery.models.config.auth.IntrospectionDelegatingRealmFactory; import com.bakdata.conquery.models.exceptions.ValidatorHelper; +import com.bakdata.conquery.util.extensions.MockServerExtension; import io.dropwizard.validation.BaseValidator; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpStatus; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockserver.integration.ClientAndServer; import org.mockserver.model.JsonBody; import org.mockserver.model.MediaType; @Slf4j public class IdpDelegatingAccessTokenCreatorTest { + @RegisterExtension + private static final MockServerExtension OIDC_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(1080), IdpDelegatingAccessTokenCreatorTest::initOIDCServer); - private static final OIDCMockServer OIDC_SERVER = new OIDCMockServer(); private static final IntrospectionDelegatingRealmFactory CONFIG = new IntrospectionDelegatingRealmFactory(); private static final Validator VALIDATOR = BaseValidator.newValidator(); @@ -45,8 +47,6 @@ public class IdpDelegatingAccessTokenCreatorTest { @BeforeAll public static void beforeAll() { - initOIDCServer(); - initRealmConfig(); idpDelegatingAccessTokenCreator = new IdpDelegatingAccessTokenCreator(CONFIG); @@ -57,16 +57,14 @@ private static void initRealmConfig() { CONFIG.setRealm(OIDCMockServer.REALM_NAME); CONFIG.setResource("test_cred"); CONFIG.setCredentials(Map.of(CONFIDENTIAL_CREDENTIAL, "test_cred")); - CONFIG.setAuthServerUrl(OIDCMockServer.MOCK_SERVER_URL); + CONFIG.setAuthServerUrl(OIDC_SERVER.baseUrl()); ValidatorHelper.failOnError(log, VALIDATOR.validate(CONFIG)); } - private static void initOIDCServer() { - - OIDC_SERVER.init( (server) -> { - + private static void initOIDCServer(ClientAndServer clientAndServer) { + OIDCMockServer.init(clientAndServer, (server) -> { // Mock username-password-for-token exchange server.when( request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME)) @@ -81,9 +79,9 @@ private static void initOIDCServer() { .withBody(JsonBody.json( new Object() { @Getter - String token_type = "Bearer"; + final String token_type = "Bearer"; @Getter - String access_token = USER_1_TOKEN; + final String access_token = USER_1_TOKEN; } ))); // Block other exchange requests (this has a lower prio than the above) @@ -112,10 +110,4 @@ public void invaildUsernamePassword() { } - @AfterAll - public static void afterAll() { - OIDC_SERVER.deinit(); - } - - } diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java b/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java index 82c65ab045..768b20c8f7 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/IntrospectionDelegatingRealmTest.java @@ -13,13 +13,11 @@ import java.util.Map; import java.util.Set; import java.util.UUID; - import jakarta.validation.Validator; import com.auth0.jwt.JWT; import com.auth0.jwt.algorithms.Algorithm; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.auth.oidc.IntrospectionDelegatingRealm; import com.bakdata.conquery.models.auth.oidc.keycloak.KeycloakApi; @@ -28,22 +26,33 @@ import com.bakdata.conquery.models.exceptions.ValidatorHelper; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.UserId; -import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.bakdata.conquery.util.extensions.GroupExtension; +import com.bakdata.conquery.util.extensions.MetaStorageExtension; +import com.bakdata.conquery.util.extensions.MockServerExtension; +import com.bakdata.conquery.util.extensions.UserExtension; +import com.codahale.metrics.MetricRegistry; import io.dropwizard.validation.BaseValidator; import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpStatus; import org.apache.shiro.authc.AuthenticationInfo; import org.apache.shiro.authc.BearerToken; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockserver.integration.ClientAndServer; import org.mockserver.model.MediaType; @Slf4j public class IntrospectionDelegatingRealmTest { + @RegisterExtension + private static final MockServerExtension OIDC_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(), IntrospectionDelegatingRealmTest::initOIDCServer); - private static final MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); + @RegisterExtension + @Order(0) + private static final MetaStorageExtension STORAGE_EXTENTION = new MetaStorageExtension(new MetricRegistry()); + private static final MetaStorage STORAGE = STORAGE_EXTENTION.getMetaStorage(); private static final IntrospectionDelegatingRealmFactory CONFIG = new IntrospectionDelegatingRealmFactory(); private static final Validator VALIDATOR = BaseValidator.newValidator(); @@ -51,7 +60,8 @@ public class IntrospectionDelegatingRealmTest { // User 1 private static final String USER_1_NAME = "test_name1"; - private static final User USER_1 = new User(USER_1_NAME, USER_1_NAME, STORAGE); + @RegisterExtension + private static final UserExtension USER_1_EXTENSION = new UserExtension(STORAGE, USER_1_NAME); private static final String USER_1_PASSWORD = "test_password1"; public static final String BACKEND_AUD = "backend"; public static final String SOME_SECRET = "secret"; @@ -64,8 +74,9 @@ public class IntrospectionDelegatingRealmTest { // User 2 private static final String USER_2_NAME = "test_name2"; - private static final User USER_2 = new User(USER_2_NAME, USER_2_NAME, STORAGE); private static final String USER_2_LABEL = "test_label2"; + @RegisterExtension + private static final UserExtension USER_2_EXTENSION = new UserExtension(STORAGE, USER_2_NAME, USER_2_LABEL); private static final String USER_2_TOKEN = JWT.create() .withSubject(USER_2_NAME) .withAudience(BACKEND_AUD) @@ -75,8 +86,9 @@ public class IntrospectionDelegatingRealmTest { // User 3 existing private static final String USER_3_NAME = "test_name3"; - private static final User USER_3 = new User(USER_3_NAME, USER_3_NAME, STORAGE); private static final String USER_3_LABEL = "test_label3"; + @RegisterExtension + private static final UserExtension USER_3_EXTENSION = new UserExtension(STORAGE, USER_3_NAME, USER_3_LABEL); private static final String USER_3_TOKEN = JWT.create() .withSubject(USER_3_NAME) .withAudience(BACKEND_AUD) @@ -86,23 +98,23 @@ public class IntrospectionDelegatingRealmTest { // Groups private static final String GROUPNAME_1 = "group1"; - private static final Group GROUP_1_EXISTING = new Group(GROUPNAME_1, GROUPNAME_1, STORAGE); - public static final KeycloakGroup - KEYCLOAK_GROUP_1 = - new KeycloakGroup(UUID.randomUUID().toString(), "Group1", "g1", Map.of(GROUP_ID_ATTRIBUTE, GROUP_1_EXISTING.getId().toString()), Set.of()); + @RegisterExtension + private static final GroupExtension GROUP_1_EXISTING_EXTENSION = new GroupExtension(STORAGE, GROUPNAME_1); + public static KeycloakGroup KEYCLOAK_GROUP_1; + private static final String GROUPNAME_2 = "group2"; // Group is created during test - public static final KeycloakGroup - KEYCLOAK_GROUP_2 = - new KeycloakGroup(UUID.randomUUID().toString(), "Group2", "g2", Map.of(GROUP_ID_ATTRIBUTE, new GroupId(GROUPNAME_2).toString()), Set.of()); - public static final URI FRONT_CHANNEL_LOGOUT = URI.create("http://localhost:1080/realms/test_realm/protocol/openid-connect/logout"); + public static KeycloakGroup KEYCLOAK_GROUP_2; + public static final URI FRONT_CHANNEL_LOGOUT = URI.create("http://localhost:%d/realms/test_realm/protocol/openid-connect/logout".formatted(OIDC_SERVER.getPort())); - private static OIDCMockServer OIDC_SERVER; private static TestRealm REALM; private static KeycloakApi KEYCLOAK_API; @BeforeAll public static void beforeAll() { + KEYCLOAK_GROUP_1 = new KeycloakGroup(UUID.randomUUID().toString(), "Group1", "g1", Map.of(GROUP_ID_ATTRIBUTE, GROUP_1_EXISTING_EXTENSION.getGroup().getId().toString()), Set.of()); + KEYCLOAK_GROUP_2 = new KeycloakGroup(UUID.randomUUID().toString(), "Group2", "g2", Map.of(GROUP_ID_ATTRIBUTE, new GroupId(GROUPNAME_2).toString()), Set.of()); + KEYCLOAK_API = mock(KeycloakApi.class); doAnswer(invocation -> Set.of(KEYCLOAK_GROUP_1, KEYCLOAK_GROUP_2)).when(KEYCLOAK_API) .getGroupHierarchy(); @@ -116,21 +128,20 @@ public static void beforeAll() { } ).when(KEYCLOAK_API).getUserGroups(any(String.class)); - initOIDCServer(); initRealm(); } - + @BeforeEach public void beforeEach() { // clear storage underlying data structures STORAGE.clear(); - + // Clear Token Cache REALM.getTokenCache().invalidateAll(); - - // add existing group to storage - STORAGE.addGroup(GROUP_1_EXISTING); + + // add existing group to storage + STORAGE.addGroup(GROUP_1_EXISTING_EXTENSION.getGroup()); } @@ -139,131 +150,130 @@ private static void initRealm() { CONFIG.setResource("backend"); CONFIG.setGroupIdAttribute(GROUP_ID_ATTRIBUTE); CONFIG.setCredentials(Map.of(CONFIDENTIAL_CREDENTIAL, "test_cred")); - CONFIG.setAuthServerUrl(OIDCMockServer.MOCK_SERVER_URL); + CONFIG.setAuthServerUrl(OIDC_SERVER.baseUrl()); ValidatorHelper.failOnError(log, VALIDATOR.validate(CONFIG)); REALM = new TestRealm(STORAGE, CONFIG); } - private static void initOIDCServer() { - OIDC_SERVER = new OIDCMockServer(); - - OIDC_SERVER.init( (server) -> { - - + private static void initOIDCServer(ClientAndServer mockServer) { // Mock username-password-for-token exchange - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")).withBody( - params( - param("password", USER_1_PASSWORD), - param("grant_type", "password"), - param("username", USER_1_NAME), - param("scope", "openid")))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"token_type\" : \"Bearer\",\"access_token\" : \"" + USER_1_TOKEN + "\"}")); - // Block other exchange requests (this has a lower prio than the above) - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME))) - .respond( - response().withStatusCode(HttpStatus.SC_FORBIDDEN).withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"error\" : \"Wrong username or password\"")); - - // Mock token introspection - // For USER 1 - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) - .withBody(params(param("token_type_hint", "access_token"), param("token", USER_1_TOKEN)))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"username\" : \"" + USER_1_NAME + "\", \"active\": true}")); - // For USER 2 - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) - .withBody(params(param("token_type_hint", "access_token"), param("token", USER_2_TOKEN)))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"username\" : \"" + USER_2_NAME + "\",\"name\" : \"" + USER_2_LABEL + "\", \"active\": true}")); - // For USER 3 - server.when( - request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) - .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) - .withBody(params(param("token_type_hint", "access_token"), param("token", USER_3_TOKEN)))) - .respond( - response().withContentType(MediaType.APPLICATION_JSON_UTF_8) - .withBody("{\"username\" : \"" + USER_3_NAME + "\",\"name\" : \"" + USER_3_LABEL + "\", \"active\": true}")); - - }); + OIDCMockServer.init( + mockServer, + (server) -> { + server.when( + request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")).withBody( + params( + param("password", USER_1_PASSWORD), + param("grant_type", "password"), + param("username", USER_1_NAME), + param("scope", "openid") + ))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"token_type\" : \"Bearer\",\"access_token\" : \"" + USER_1_TOKEN + "\"}")); + + // Block other exchange requests (this has a lower prio than the above) + server.when( + request().withMethod("POST").withPath(String.format("/realms/%s/protocol/openid-connect/token", OIDCMockServer.REALM_NAME))) + .respond( + response().withStatusCode(HttpStatus.SC_FORBIDDEN).withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"error\" : \"Wrong username or password\"")); + + // Mock token introspection + // For USER 1 + server.when( + request().withMethod("POST") + .withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) + .withBody(params(param("token_type_hint", "access_token"), param("token", USER_1_TOKEN)))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"username\" : \"" + USER_1_NAME + "\", \"active\": true}")); + // For USER 2 + server.when( + request().withMethod("POST") + .withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) + .withBody(params(param("token_type_hint", "access_token"), param("token", USER_2_TOKEN)))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"username\" : \"" + USER_2_NAME + "\",\"name\" : \"" + USER_2_LABEL + "\", \"active\": true}")); + // For USER 3 + server.when( + request().withMethod("POST") + .withPath(String.format("/realms/%s/protocol/openid-connect/token/introspect", OIDCMockServer.REALM_NAME)) + .withHeaders(header("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8")) + .withBody(params(param("token_type_hint", "access_token"), param("token", USER_3_TOKEN)))) + .respond( + response().withContentType(MediaType.APPLICATION_JSON_UTF_8) + .withBody("{\"username\" : \"" + USER_3_NAME + "\",\"name\" : \"" + USER_3_LABEL + "\", \"active\": true}")); + } + ); } @Test public void tokenIntrospectionSimpleUserNew() { AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER1_TOKEN_WRAPPED); - + assertThat(info) .usingRecursiveComparison() + .usingOverriddenEquals() .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) .ignoringFieldsOfTypes(User.ShiroUserAdapter.class) - .isEqualTo(new ConqueryAuthenticationInfo(USER_1, USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); - assertThat(STORAGE.getAllUsers()).containsOnly(new User(USER_1_NAME, USER_1_NAME, STORAGE)); + .isEqualTo(new ConqueryAuthenticationInfo(USER_1_EXTENSION.getUser(), USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); + assertThat(STORAGE.getAllUsers()).containsOnly(new User(USER_1_NAME, USER_1_NAME, STORAGE_EXTENTION.getMetaStorage())); } - + @Test public void tokenIntrospectionSimpleUserExisting() { - STORAGE.addUser(USER_1); - + AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER1_TOKEN_WRAPPED); - + assertThat(info) - .usingRecursiveComparison() - .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) - .isEqualTo(new ConqueryAuthenticationInfo(USER_1, USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); - assertThat(STORAGE.getAllUsers()).containsOnly(USER_1); + .usingRecursiveComparison() + .usingOverriddenEquals() + .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) + .isEqualTo(new ConqueryAuthenticationInfo(USER_1_EXTENSION.getUser(), USER1_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); + assertThat(STORAGE.getAllUsers()).containsOnly(USER_1_EXTENSION.getUser()); } - + @Test public void tokenIntrospectionGroupedUser() { - STORAGE.addUser(USER_2); AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER_2_TOKEN_WRAPPED); - final ConqueryAuthenticationInfo expected = new ConqueryAuthenticationInfo(USER_2, USER_2_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT); + final ConqueryAuthenticationInfo expected = new ConqueryAuthenticationInfo(USER_2_EXTENSION.getUser(), USER_2_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT); assertThat(info) - .usingRecursiveComparison() - .isEqualTo(expected); - assertThat(STORAGE.getAllUsers()).containsOnly(USER_2); + .usingRecursiveComparison() + .usingOverriddenEquals() + .isEqualTo(expected); + assertThat(STORAGE.getAllUsers()).containsOnly(USER_2_EXTENSION.getUser()); assertThat(STORAGE.getAllGroups()).hasSize(2); // Pre-existing group and a second group that has been added in the process assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_1)).getMembers()).contains(new UserId(USER_2_NAME)); assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_2)).getMembers()).contains(new UserId(USER_2_NAME)); } - + @Test public void tokenIntrospectionGroupedUserRemoveGroupMapping() { - STORAGE.addUser(USER_3); - GROUP_1_EXISTING.addMember(USER_3); - + GROUP_1_EXISTING_EXTENSION.getGroup().addMember(USER_3_EXTENSION.getUser()); + assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_1)).getMembers()).contains(new UserId(USER_3_NAME)); - + AuthenticationInfo info = REALM.doGetAuthenticationInfo(USER_3_TOKEN_WRAPPED); - + assertThat(info) - .usingRecursiveComparison() - .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) - .isEqualTo(new ConqueryAuthenticationInfo(USER_3, USER_3_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); - assertThat(STORAGE.getAllUsers()).containsOnly(USER_3); + .usingRecursiveComparison() + .usingOverriddenEquals() + .ignoringFields(ConqueryAuthenticationInfo.Fields.credentials) + .isEqualTo(new ConqueryAuthenticationInfo(USER_3_EXTENSION.getUser(), USER_3_TOKEN_WRAPPED, REALM, true, FRONT_CHANNEL_LOGOUT)); + assertThat(STORAGE.getAllUsers()).containsOnly(USER_3_EXTENSION.getUser()); assertThat(STORAGE.getAllGroups()).hasSize(1); // Pre-existing group assertThat(STORAGE.getGroup(new GroupId(GROUPNAME_1)).getMembers()).doesNotContain(new UserId(USER_3_NAME)); } - @AfterAll - public static void afterAll() { - OIDC_SERVER.deinit(); - } - private static class TestRealm extends IntrospectionDelegatingRealm { public TestRealm(MetaStorage storage, IntrospectionDelegatingRealmFactory config) { diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java b/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java index d610877011..6df41067e0 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/OIDCMockServer.java @@ -1,13 +1,13 @@ package com.bakdata.conquery.models.auth; import static org.junit.Assert.fail; -import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; import static org.mockserver.model.HttpResponse.response; import java.util.function.Consumer; import lombok.Getter; +import lombok.experimental.UtilityClass; import lombok.extern.slf4j.Slf4j; import org.mockserver.integration.ClientAndServer; import org.mockserver.mock.action.ExpectationResponseCallback; @@ -16,42 +16,36 @@ import org.mockserver.model.JsonBody; @Slf4j +@UtilityClass public class OIDCMockServer { - public static final int MOCK_SERVER_PORT = 1080; - public static final String MOCK_SERVER_URL = "http://localhost:" + MOCK_SERVER_PORT; public static final String REALM_NAME = "test_realm"; - private final ClientAndServer OIDC_SERVER; - - public OIDCMockServer() { - OIDC_SERVER = startClientAndServer(MOCK_SERVER_PORT); - } - - public OIDCMockServer(int port) { - OIDC_SERVER = startClientAndServer(port); + public static void init(ClientAndServer server) { + init(server, (_server) -> {}); } + public static void init(ClientAndServer server, Consumer testMappings) { - public void init(Consumer testMappings) { + String mockServerUrl = "http://localhost:%d".formatted(server.getPort()); // Mock well-known discovery endpoint (this is actually the output of keycloak) - OIDC_SERVER.when(request().withMethod("GET").withPath("/realms/" + REALM_NAME + "/.well-known/uma2-configuration")) + server.when(request().withMethod("GET").withPath("/realms/" + REALM_NAME + "/.well-known/uma2-configuration")) .respond( response().withBody( JsonBody.json( new Object() { @Getter - final String issuer = MOCK_SERVER_URL; + final String issuer = mockServerUrl; @Getter - final String authorization_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/auth"; + final String authorization_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/auth"; @Getter - final String token_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/token"; + final String token_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/token"; @Getter - final String introspection_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/token/introspect"; + final String introspection_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/token/introspect"; @Getter - final String end_session_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/logout"; + final String end_session_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/logout"; @Getter - final String jwks_uri = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/protocol/openid-connect/certs"; + final String jwks_uri = mockServerUrl + "/realms/" + REALM_NAME + "/protocol/openid-connect/certs"; @Getter final String[] grant_types_supported = {"authorization_code", "implicit", "refresh_token", "password", "client_credentials"}; @Getter @@ -59,7 +53,7 @@ public void init(Consumer testMappings) { @Getter final String[] response_modes_supported = {"query", "fragment", "form_post"}; @Getter - final String registration_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/clients-registrations/openid-connect"; + final String registration_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/clients-registrations/openid-connect"; @Getter final String[] token_endpoint_auth_methods_supported = {"private_key_jwt", "client_secret_basic", "client_secret_post", "tls_client_auth", "client_secret_jwt"}; @Getter @@ -67,20 +61,20 @@ public void init(Consumer testMappings) { @Getter final String[] scopes_supported = {"openid", "address", "email", "microprofile-jwt", "offline_access", "phone", "profile", "roles", "web-origins"}; @Getter - final String resource_registration_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/authz/protection/resource_set"; + final String resource_registration_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/authz/protection/resource_set"; @Getter - final String permission_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/authz/protection/permission"; + final String permission_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/authz/protection/permission"; @Getter - final String policy_endpoint = MOCK_SERVER_URL + "/realms/" + REALM_NAME + "/authz/protection/uma-policy"; + final String policy_endpoint = mockServerUrl + "/realms/" + REALM_NAME + "/authz/protection/uma-policy"; } ) )); // Register test provided mappings - testMappings.accept(OIDC_SERVER); + testMappings.accept(server); // At last (so it has the lowest priority): initialize a trap for debugging, that captures all unmapped requests - OIDC_SERVER.when(request()).respond(new ExpectationResponseCallback() { + server.when(request()).respond(new ExpectationResponseCallback() { @Override public HttpResponse handle(HttpRequest httpRequest) throws Exception { @@ -96,8 +90,4 @@ public HttpResponse handle(HttpRequest httpRequest) throws Exception { } }); } - - public void deinit() { - OIDC_SERVER.stop(); - } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java b/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java index f12774d576..cf95a1e09b 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/auth/oidc/IdpConfigRetrievalTest.java @@ -5,29 +5,29 @@ import static org.mockserver.model.HttpResponse.response; import java.net.URI; - import jakarta.ws.rs.client.Client; import com.bakdata.conquery.models.auth.OIDCMockServer; import com.bakdata.conquery.models.config.auth.JwtPkceVerifyingRealmFactory; +import com.bakdata.conquery.util.extensions.MockServerExtension; import io.dropwizard.client.JerseyClientBuilder; import io.dropwizard.core.setup.Environment; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.mockserver.integration.ClientAndServer; import org.mockserver.model.JsonBody; public class IdpConfigRetrievalTest { + @RegisterExtension + private static final MockServerExtension OIDC_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(), IdpConfigRetrievalTest::init); - private static final OIDCMockServer OIDC_MOCK_SERVER = new OIDCMockServer(); private static final JwtPkceVerifyingRealmFactory REALM_FACTORY = new JwtPkceVerifyingRealmFactory(); private static final Client CLIENT = new JerseyClientBuilder(new Environment("oidc-test")).build("oidc-test-client"); - @BeforeAll - static void init() { - OIDC_MOCK_SERVER.init((server) -> { + private static void init(ClientAndServer mockServer) { + OIDCMockServer.init(mockServer, (server) -> { // MOCK JWK Endpoint (1 signing + 1 encryption key) server.when(request().withMethod("GET").withPath("/realms/" + OIDCMockServer.REALM_NAME + "/protocol/openid-connect/certs")) .respond( @@ -36,15 +36,10 @@ static void init() { ))); }); - REALM_FACTORY.setWellKnownEndpoint(URI.create(OIDCMockServer.MOCK_SERVER_URL + REALM_FACTORY.setWellKnownEndpoint(URI.create(OIDC_SERVER.baseUrl() + "/realms/" + OIDCMockServer.REALM_NAME + "/.well-known/uma2-configuration")); } - @AfterAll - static void deinit() { - OIDC_MOCK_SERVER.deinit(); - } - @Test void getConfig() { assertThatCode(() -> REALM_FACTORY.retrieveIdpConfiguration(CLIENT)).doesNotThrowAnyException(); diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java index 7814dad2f6..c179713dce 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/filters/TestGroupFilter.java @@ -5,6 +5,8 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.LongStream; +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotEmpty; import com.bakdata.conquery.apiv1.frontend.FrontendFilterConfiguration; import com.bakdata.conquery.apiv1.frontend.FrontendFilterType; @@ -18,8 +20,6 @@ import com.fasterxml.jackson.annotation.JsonView; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.type.TypeFactory; -import jakarta.validation.constraints.Min; -import jakarta.validation.constraints.NotEmpty; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.experimental.FieldNameConstants; @@ -36,7 +36,7 @@ public void configureFrontend(FrontendFilterConfiguration.Top f, ConqueryConfig @Override public FilterNode createFilterNode(GroupFilterValue compoundFilterValue) { - return new MultiSelectFilterNode(getColumn(), Set.of(compoundFilterValue.getResolvedValues())); + return new MultiSelectFilterNode(getColumn().resolve(), Set.of(compoundFilterValue.getResolvedValues())); } private Map getFEFilter() { diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java index d6b263ec37..baa15d8444 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/frontend/FilterSearchItemTest.java @@ -6,6 +6,7 @@ import com.bakdata.conquery.apiv1.frontend.FrontendTable; import com.bakdata.conquery.apiv1.frontend.FrontendValue; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; @@ -15,43 +16,50 @@ import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.MajorTypeId; +import com.bakdata.conquery.util.extensions.NamespaceStorageExtension; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; public class FilterSearchItemTest { + @RegisterExtension + private static final NamespaceStorageExtension NAMESPACE_STORAGE_EXTENSION = new NamespaceStorageExtension(); + private static final NamespacedStorage NAMESPACED_STORAGE = NAMESPACE_STORAGE_EXTENSION.getStorage(); + @Test public void sortedValidityDates() { Dataset dataset = new Dataset(); dataset.setName("testDataset"); + dataset.setNamespacedStorageProvider(NAMESPACED_STORAGE); + NAMESPACED_STORAGE.updateDataset(dataset); Table table = new Table(); - table.setDataset(dataset); + table.setDataset(dataset.getId()); table.setName("testTable"); + NAMESPACED_STORAGE.addTable(table); Column column = new Column(); column.setName("testColumn"); column.setTable(table); Column dateColumn1 = new Column(); - column.setName("dateColumn1"); - column.setType(MajorTypeId.DATE); - column.setTable(table); + dateColumn1.setName("dateColumn1"); + dateColumn1.setType(MajorTypeId.DATE); + dateColumn1.setTable(table); Column dateColumn2 = new Column(); - column.setName("dateColumn2"); - column.setType(MajorTypeId.DATE); - column.setTable(table); - + dateColumn2.setName("dateColumn2"); + dateColumn2.setType(MajorTypeId.DATE); + dateColumn2.setTable(table); + TreeConcept concept = new TreeConcept(); + concept.setDataset(dataset.getId()); + concept.setName("testConcept"); ConceptTreeConnector connector = new ConceptTreeConnector(); connector.setName("testConnector"); - TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); - concept.setName("testConcept"); - ValidityDate val0 = ValidityDate.create(dateColumn1); val0.setName("val0"); val0.setConnector(connector); @@ -65,11 +73,12 @@ public void sortedValidityDates() { val2.setConnector(connector); List validityDates = List.of(val0, val1, val2); - connector.setColumn(column); + connector.setColumn(column.getId()); connector.setConcept(concept); connector.setValidityDates(validityDates); + FrontendTable feTable = new FrontEndConceptBuilder(new ConqueryConfig()).createTable(connector); - + assertThat(feTable.getDateColumn().getOptions()).containsExactly( new FrontendValue(val0.getId().toString(), "val0"), new FrontendValue(val1.getId().toString(), "val1"), diff --git a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java index 50783215aa..768688bb00 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/datasets/concepts/tree/GroovyIndexedTest.java @@ -8,26 +8,27 @@ import java.util.Random; import java.util.function.Supplier; import java.util.stream.Stream; +import jakarta.validation.Validator; import com.bakdata.conquery.io.jackson.Injectable; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.io.jackson.MutableInjectableValues; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; -import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.exceptions.ConfigurationException; import com.bakdata.conquery.models.exceptions.JSONException; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.worker.SingletonNamespaceCollection; import com.bakdata.conquery.util.CalculatedValue; +import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.node.ObjectNode; import com.github.powerlibraries.io.In; import io.dropwizard.jersey.validation.Validators; -import jakarta.validation.Validator; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.parallel.Execution; @@ -48,10 +49,10 @@ public static Stream getTestKeys() { ); return Stream.of( - "A13B", "I43A", "H41B", "B05Z", "L02C", "L12Z", "H08A", "I56B", "I03A", "E79C", "B80Z", "I47A", "N13A", "G08B", "F43B", "P04A", "T36Z", "T36Z", "N11A", "D13A", "R01D", "F06A", "F24A", "O03Z", "P01Z", "R63D", "A13A", "O05A", "G29B", "I18A", "J08A", "E74Z", "D06C", "H36Z", "H05Z", "P65B", "I09A", "A66Z", "F12E", "Q60E", "I46B", "I97Z", "I78Z", "T01B", "J24C", "A62Z", "Q01Z", "N25Z", "A01B", "G02A" - , "ZULU" // This may not fail, but return null on both sides - ) - .map(v -> Arguments.of(v, rowMap.get())); + "A13B", "I43A", "H41B", "B05Z", "L02C", "L12Z", "H08A", "I56B", "I03A", "E79C", "B80Z", "I47A", "N13A", "G08B", "F43B", "P04A", "T36Z", "T36Z", "N11A", "D13A", "R01D", "F06A", "F24A", "O03Z", "P01Z", "R63D", "A13A", "O05A", "G29B", "I18A", "J08A", "E74Z", "D06C", "H36Z", "H05Z", "P65B", "I09A", "A66Z", "F12E", "Q60E", "I46B", "I97Z", "I78Z", "T01B", "J24C", "A62Z", "Q01Z", "N25Z", "A01B", "G02A" + , "ZULU" // This may not fail, but return null on both sides + ) + .map(v -> Arguments.of(v, rowMap.get())); } private static TreeConcept indexedConcept; @@ -60,21 +61,23 @@ public static Stream getTestKeys() { @BeforeAll public static void init() throws IOException, JSONException, ConfigurationException { - ObjectNode node = Jackson.MAPPER.readerFor(ObjectNode.class).readValue(In.resource(GroovyIndexedTest.class, CONCEPT_SOURCE).asStream()); + final ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + ObjectNode node = mapper.readerFor(ObjectNode.class).readValue(In.resource(GroovyIndexedTest.class, CONCEPT_SOURCE).asStream()); // load concept tree from json - CentralRegistry registry = new CentralRegistry(); - + final NamespaceStorage storage = new NamespaceStorage(new NonPersistentStoreFactory(), "GroovyIndexedTest"); + storage.openStores(mapper, new MetricRegistry()); Table table = new Table(); table.setName("the_table"); Dataset dataset = new Dataset(); dataset.setName("the_dataset"); + dataset.injectInto(mapper); - registry.register(dataset); + storage.updateDataset(dataset); - table.setDataset(dataset); + table.setDataset(dataset.getId()); Column column = new Column(); column.setName("the_column"); @@ -83,25 +86,27 @@ public static void init() throws IOException, JSONException, ConfigurationExcept table.setColumns(new Column[]{column}); column.setTable(table); - registry.register(table); - registry.register(column); - + storage.addTable(table); // Prepare Serdes injections - ObjectMapper mapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); - ((Injectable) values -> values.add(Validator.class, Validators.newValidator())).injectInto(mapper); - new SingletonNamespaceCollection(registry).injectInto(mapper); - dataset.injectInto(mapper); - final ObjectReader conceptReader = mapper.readerFor(Concept.class); - - // load tree twice to avoid references + final Validator validator = Validators.newValidator(); + final ObjectReader conceptReader = new Injectable(){ + @Override + public MutableInjectableValues inject(MutableInjectableValues values) { + return values.add(Validator.class, validator); + } + }.injectInto(mapper).readerFor(Concept.class); + + // load tree twice to to avoid references indexedConcept = conceptReader.readValue(node); - indexedConcept.setDataset(dataset); + indexedConcept.setDataset(dataset.getId()); + indexedConcept.initElements(); oldConcept = conceptReader.readValue(node); - oldConcept.setDataset(dataset); + oldConcept.setDataset(dataset.getId()); + oldConcept.initElements(); } @@ -110,11 +115,11 @@ public static void init() throws IOException, JSONException, ConfigurationExcept public void basic(String key, CalculatedValue> rowMap) throws JSONException { log.trace("Searching for {}", key); - ConceptElement idxResult = indexedConcept.findMostSpecificChild(key, rowMap); - ConceptElement oldResult = oldConcept.findMostSpecificChild(key, rowMap); + ConceptTreeChild idxResult = indexedConcept.findMostSpecificChild(key, rowMap); + ConceptTreeChild oldResult = oldConcept.findMostSpecificChild(key, rowMap); assertThat(oldResult.getId()).describedAs("%s hierarchical name", key).isEqualTo(idxResult.getId()); } -} +} \ No newline at end of file diff --git a/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java b/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java index 22de3ff2dd..d411065ba6 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/events/stores/types/ColumnStoreSerializationTests.java @@ -11,6 +11,8 @@ import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.jackson.serializer.SerializationTestUtil; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.WorkerStorageImpl; import com.bakdata.conquery.mode.cluster.InternalMapperFactory; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; @@ -33,8 +35,8 @@ import com.bakdata.conquery.models.events.stores.specific.RebasingIntegerStore; import com.bakdata.conquery.models.events.stores.specific.ScaledDecimalStore; import com.bakdata.conquery.models.exceptions.JSONException; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.worker.ShardWorkers; +import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import io.dropwizard.jersey.validation.Validators; @@ -51,20 +53,21 @@ public class ColumnStoreSerializationTests { */ private static final Set> EXCLUDING = Set.of(CompoundDateRangeStore.class); - private static final CentralRegistry CENTRAL_REGISTRY = new CentralRegistry(); + private static final NamespaceStorage STORAGE = new NamespaceStorage(new NonPersistentStoreFactory(), "ColumnStoreSerializationTests"); private static ObjectMapper shardInternalMapper; private static ConqueryConfig config; @BeforeAll public static void setupRegistry() { - CENTRAL_REGISTRY.register(Dataset.PLACEHOLDER); + STORAGE.openStores(null, new MetricRegistry()); + STORAGE.updateDataset(Dataset.PLACEHOLDER); // Prepare shard node internal mapper config = new ConqueryConfig(); InternalMapperFactory internalMapperFactory = new InternalMapperFactory(config, Validators.newValidator()); - shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(mock(ShardWorkers.class)); + shardInternalMapper = internalMapperFactory.createWorkerPersistenceMapper(mock(WorkerStorageImpl.class)); } @Test @@ -115,7 +118,6 @@ public void testSerialization(ColumnStore type) throws IOException, JSONExceptio SerializationTestUtil .forType(ColumnStore.class) .objectMappers(shardInternalMapper) - .registry(CENTRAL_REGISTRY) .test(type); } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java b/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java index 34919dd389..ae7ca64fbe 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/execution/DefaultLabelTest.java @@ -2,8 +2,6 @@ import static com.bakdata.conquery.models.execution.ManagedExecution.AUTO_LABEL_SUFFIX; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; import java.time.LocalDateTime; import java.util.List; @@ -12,54 +10,57 @@ import com.bakdata.conquery.apiv1.forms.export_form.ExportForm; import com.bakdata.conquery.apiv1.query.ConceptQuery; -import com.bakdata.conquery.apiv1.query.Query; import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery; import com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.forms.managed.ManagedForm; import com.bakdata.conquery.models.i18n.I18n; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; +import org.mockito.Mockito; public class DefaultLabelTest { + public static final ConqueryConfig CONFIG = new ConqueryConfig(); private final static MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); - - private static final Namespace NAMESPACE = mock(LocalNamespace.class); + private final static NamespaceStorage NS_ID_RESOLVER = new NonPersistentStoreFactory().createNamespaceStorage(); + private static final Namespace NAMESPACE = Mockito.mock(LocalNamespace.class); private static final Dataset DATASET = new Dataset("dataset"); - private static final User user = new User("user","user", STORAGE); - - private static final TreeConcept CONCEPT = new TreeConcept() { - { - setDataset(DATASET); - setName("defaultconcept"); - setLabel("Default Concept"); - } - }; - public static final ConqueryConfig CONFIG = new ConqueryConfig(); + private static final User user = new User("user", "user", STORAGE); + private static final TreeConcept CONCEPT = new TreeConcept(); @BeforeAll public static void beforeAll() { + DATASET.setNamespacedStorageProvider(NS_ID_RESOLVER); + NS_ID_RESOLVER.updateDataset(DATASET); + + // no mapper required - STORAGE.openStores(null); + STORAGE.openStores(null, new MetricRegistry()); - I18n.init(); + CONCEPT.setDataset(DATASET.getId()); + CONCEPT.setName("defaultconcept"); + CONCEPT.setLabel("Default Concept"); + + NS_ID_RESOLVER.updateConcept(CONCEPT); - doAnswer((invocation -> CONCEPT)).when(NAMESPACE) - .resolve(CONCEPT.getId()); + I18n.init(); } @ParameterizedTest @@ -70,9 +71,9 @@ public static void beforeAll() { void autoLabelConceptQuery(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - CQConcept concept = makeCQConcept("Concept"); + CQConcept concept = makeCQConceptWithLabel("Concept"); ConceptQuery cq = new ConceptQuery(concept); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -80,19 +81,19 @@ void autoLabelConceptQuery(Locale locale, String autoLabel) { assertThat(mQuery.getLabelWithoutAutoLabelSuffix()).isEqualTo(autoLabel); } - @NotNull - private PrintSettings getPrintSettings(Locale locale) { - return new PrintSettings(true, locale, NAMESPACE, CONFIG, null, null); - } - - private static CQConcept makeCQConcept(String label) { + private static CQConcept makeCQConceptWithLabel(String label) { CQConcept concept = new CQConcept(); concept.setLabel(label); - concept.setElements(List.of(CONCEPT)); + concept.setElements(List.of(CONCEPT.getId())); return concept; } + @NotNull + private PrintSettings getPrintSettings(Locale locale) { + return new PrintSettings(true, locale, NAMESPACE, CONFIG, null, null); + } + @ParameterizedTest @CsvSource({ "de,Default-Concept", @@ -103,9 +104,9 @@ void autoLabelConceptQueryFallback(Locale locale, String autoLabel) { CQConcept concept = new CQConcept(); concept.setLabel(null); - concept.setElements(List.of(CONCEPT)); + concept.setElements(List.of(CONCEPT.getId())); ConceptQuery cq = new ConceptQuery(concept); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); UUID uuid = UUID.randomUUID(); mQuery.setQueryId(uuid); @@ -124,12 +125,12 @@ void autoLabelConceptQueryFallback(Locale locale, String autoLabel) { void autoLabelReusedQuery(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), DATASET, STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), DATASET.getId(), STORAGE, null); managedQuery.setQueryId(UUID.randomUUID()); CQReusedQuery reused = new CQReusedQuery(managedQuery.getId()); ConceptQuery cq = new ConceptQuery(reused); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -148,7 +149,7 @@ void autoLabelUploadQuery(Locale locale, String autoLabel) { CQExternal external = new CQExternal(List.of(), new String[0][0], false); ConceptQuery cq = new ConceptQuery(external); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -164,13 +165,13 @@ void autoLabelUploadQuery(Locale locale, String autoLabel) { void autoLabelComplexQuery(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), DATASET, STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), DATASET.getId(), STORAGE, null); managedQuery.setQueryId(UUID.randomUUID()); CQAnd and = new CQAnd(); - CQConcept concept1 = makeCQConcept("Concept1"); - CQConcept concept2 = makeCQConcept("Concept2"); - CQConcept concept3 = makeCQConcept("Concept3veryveryveryveryveryveryveryverylooooooooooooooooooooonglabel"); + CQConcept concept1 = makeCQConceptWithLabel("Concept1"); + CQConcept concept2 = makeCQConceptWithLabel("Concept2"); + CQConcept concept3 = makeCQConceptWithLabel("Concept3veryveryveryveryveryveryveryverylooooooooooooooooooooonglabel"); and.setChildren(List.of( new CQExternal(List.of(), new String[0][0], false), @@ -180,7 +181,7 @@ void autoLabelComplexQuery(Locale locale, String autoLabel) { concept3 )); ConceptQuery cq = new ConceptQuery(and); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -197,15 +198,15 @@ void autoLabelComplexQuery(Locale locale, String autoLabel) { void autoLabelComplexQueryNullLabels(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); - final ManagedQuery managedQuery = new ManagedQuery(mock(Query.class), mock(User.class), DATASET, STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(null, new UserId("test"), DATASET.getId(), STORAGE, null); managedQuery.setQueryId(UUID.randomUUID()); CQAnd and = new CQAnd(); CQConcept concept1 = new CQConcept(); concept1.setLabel(null); - concept1.setElements(List.of(CONCEPT)); - CQConcept concept2 = makeCQConcept("Concept2"); - CQConcept concept3 = makeCQConcept("Concept3"); + concept1.setElements(List.of(CONCEPT.getId())); + CQConcept concept2 = makeCQConceptWithLabel("Concept2"); + CQConcept concept3 = makeCQConceptWithLabel("Concept3"); and.setChildren(List.of( new CQExternal(List.of(), new String[0][0], false), new CQReusedQuery(managedQuery.getId()), @@ -214,7 +215,7 @@ void autoLabelComplexQueryNullLabels(Locale locale, String autoLabel) { concept3 )); ConceptQuery cq = new ConceptQuery(and); - ManagedQuery mQuery = cq.toManagedExecution(user, DATASET, STORAGE, null); + ManagedQuery mQuery = cq.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mQuery.setLabel(mQuery.makeAutoLabel(getPrintSettings(locale))); @@ -231,7 +232,7 @@ void autoLabelExportForm(Locale locale, String autoLabel) { I18n.LOCALE.set(locale); ExportForm form = new ExportForm(); - ManagedForm mForm = form.toManagedExecution(user, DATASET, STORAGE, null); + ManagedForm mForm = form.toManagedExecution(user.getId(), DATASET.getId(), STORAGE, null); mForm.setCreationTime(LocalDateTime.of(2020, 10, 30, 12, 37)); mForm.setLabel(mForm.makeAutoLabel(getPrintSettings(locale))); diff --git a/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java b/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java index ed9e8d76b9..7ba606498d 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/identifiable/IdMapSerialisationTest.java @@ -1,13 +1,14 @@ package com.bakdata.conquery.models.identifiable; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.identifiable.mapping.EntityPrintId; import com.bakdata.conquery.models.identifiable.mapping.ExternalId; public class IdMapSerialisationTest { - public static EntityIdMap createTestPersistentMap() { - EntityIdMap entityIdMap = new EntityIdMap(); + public static EntityIdMap createTestPersistentMap(NamespaceStorage namespaceStorage) { + EntityIdMap entityIdMap = new EntityIdMap(namespaceStorage); entityIdMap.addInputMapping("test1", new ExternalId("id", "a")); diff --git a/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java b/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java index 3f5bb9ca82..e070ecc5cd 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java +++ b/backend/src/test/java/com/bakdata/conquery/models/identifiable/ids/IdTests.java @@ -2,30 +2,76 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; import java.io.IOException; import java.lang.reflect.Modifier; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - import com.bakdata.conquery.io.cps.CPSTypeIdResolver; import com.bakdata.conquery.io.jackson.Jackson; +import com.bakdata.conquery.mode.cluster.InternalMapperFactory; +import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.identifiable.Identifiable; import com.bakdata.conquery.models.identifiable.ids.IdUtil.Parser; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptTreeChildId; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.fasterxml.jackson.core.JsonParseException; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.JsonMappingException; +import com.bakdata.conquery.models.worker.DatasetRegistry; +import com.bakdata.conquery.util.NonPersistentStoreFactory; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectReader; +import io.dropwizard.jersey.validation.Validators; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; public class IdTests { + public static Stream reflectionTest() { + return CPSTypeIdResolver + .SCAN_RESULT + .getClassesImplementing(Identifiable.class.getName()).loadClasses() + .stream() + .filter(cl -> !cl.isInterface()) + .filter(cl -> !Modifier.isAbstract(cl.getModifiers())) + //filter test classes + .filter(cl -> !cl.toString().toLowerCase().contains("test")) + .map(cl -> { + + Class idClazz = null; + // Try to get the specific Id + try { + idClazz = cl.getMethod("getId").getReturnType(); + + } + catch (NoSuchMethodException e) { + return fail(cl.getName() + " does not implement the method 'getId()'"); + } + + if (Modifier.isAbstract(idClazz.getModifiers())) { + try { + idClazz = cl.getMethod("createId").getReturnType(); + + } + catch (NoSuchMethodException e) { + return fail(cl.getName() + " does not implement the method 'createId()' unable to retrieve specific id class"); + } + } + + String packageString = "com.bakdata.conquery.models.identifiable.ids.specific."; + if (!idClazz.getName().startsWith(packageString)) { + return fail("The id class " + idClazz + " is not located in the package " + packageString + ". Please clean that up."); + } + + return Arguments.of( + cl, + idClazz + ); + }); + } + @Test public void testEquals() { ConceptTreeChildId idA = new ConceptTreeChildId( @@ -38,7 +84,7 @@ public void testEquals() { ), "4" ); - + ConceptTreeChildId idB = new ConceptTreeChildId( new ConceptTreeChildId( new ConceptId( @@ -49,7 +95,7 @@ public void testEquals() { ), "4" ); - + assertThat(idA).isEqualTo(idB); assertThat(idA).hasSameHashCodeAs(idB); assertThat(idA.toString()).isEqualTo(idB.toString()); @@ -67,16 +113,16 @@ public void testStringSerialization() { ), "4" ); - + ConceptTreeChildId copy = ConceptTreeChildId.Parser.INSTANCE.parse(id.toString()); - + assertThat(copy).isEqualTo(id); assertThat(copy).hasSameHashCodeAs(id); assertThat(copy.toString()).isEqualTo(id.toString()); } @Test - public void testJacksonSerialization() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException { + public void testJacksonSerialization() throws IOException { ConceptTreeChildId id = new ConceptTreeChildId( new ConceptTreeChildId( new ConceptId( @@ -87,22 +133,29 @@ public void testJacksonSerialization() throws JsonParseException, JsonMappingExc ), "4" ); - + ObjectMapper mapper = Jackson.MAPPER; ConceptTreeChildId copy = mapper.readValue(mapper.writeValueAsBytes(id), ConceptTreeChildId.class); - + assertThat(copy).isEqualTo(id); assertThat(copy).hasSameHashCodeAs(id); assertThat(copy.toString()).isEqualTo(id.toString()); } @Test - public void testInterning() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException { - String raw = "1.concepts.2.3.4"; - - ConceptTreeChildId id1 = ConceptTreeChildId.Parser.INSTANCE.parse(raw); - ConceptTreeChildId id2 = ConceptTreeChildId.Parser.INSTANCE.parse(raw); - + public void testInterning() throws IOException { + + InternalMapperFactory internalMapperFactory = new InternalMapperFactory(new ConqueryConfig(), Validators.newValidator()); + ObjectMapper objectMapper = Jackson.copyMapperAndInjectables(Jackson.MAPPER); + internalMapperFactory.customizeApiObjectMapper(objectMapper, mock(DatasetRegistry.class), new NonPersistentStoreFactory().createMetaStorage()); + + ObjectReader objectReader = objectMapper.readerFor(ConceptTreeChildId.class); + + String raw = "\"1.concepts.2.3.4\""; + + ConceptTreeChildId id1 = objectReader.readValue(raw); + ConceptTreeChildId id2 = objectReader.readValue(raw); + assertThat(id1).isSameAs(id2); assertThat(id1.getParent()).isSameAs(id2.getParent()); assertThat(id1.findConcept()).isSameAs(id2.findConcept()); @@ -110,7 +163,7 @@ public void testInterning() throws JsonParseException, JsonMappingException, Jso } @Test - public void testJacksonBinarySerialization() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException { + public void testJacksonBinarySerialization() throws IOException { ConceptTreeChildId id = new ConceptTreeChildId( new ConceptTreeChildId( new ConceptId( @@ -121,57 +174,14 @@ public void testJacksonBinarySerialization() throws JsonParseException, JsonMapp ), "4" ); - + ObjectMapper mapper = Jackson.BINARY_MAPPER; ConceptTreeChildId copy = mapper.readValue(mapper.writeValueAsBytes(id), ConceptTreeChildId.class); - + assertThat(copy).isEqualTo(id); assertThat(copy).hasSameHashCodeAs(id); assertThat(copy.toString()).isEqualTo(id.toString()); } - - public static Stream reflectionTest() { - return CPSTypeIdResolver - .SCAN_RESULT - .getClassesImplementing(Identifiable.class.getName()).loadClasses() - .stream() - .filter(cl -> !cl.isInterface()) - .filter(cl -> !Modifier.isAbstract(cl.getModifiers())) - //filter test classes - .filter(cl -> !cl.toString().toLowerCase().contains("test")) - .map(cl -> { - - Class idClazz = null; - // Try to get the specific Id - try { - idClazz = cl.getMethod("getId").getReturnType(); - - } - catch (NoSuchMethodException e) { - return fail(cl.getName() + " does not implement the method 'getId()'"); - } - - if (Modifier.isAbstract(idClazz.getModifiers())) { - try { - idClazz = cl.getMethod("createId").getReturnType(); - - } - catch (NoSuchMethodException e) { - return fail(cl.getName() + " does not implement the method 'createId()' unable to retrieve specific id class"); - } - } - - String packageString = "com.bakdata.conquery.models.identifiable.ids.specific."; - if (!idClazz.getName().startsWith(packageString)) { - return fail("The id class " + idClazz + " is not located in the package " + packageString + ". Please clean that up."); - } - - return Arguments.of( - cl, - idClazz - ); - }); - } @ParameterizedTest @MethodSource diff --git a/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java b/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java index bc54baed36..45d17e41bf 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java +++ b/backend/src/test/java/com/bakdata/conquery/models/query/DefaultColumnNameTest.java @@ -1,8 +1,6 @@ package com.bakdata.conquery.models.query; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import java.util.ArrayList; @@ -11,12 +9,15 @@ import java.util.List; import java.util.Locale; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.Stream; +import jakarta.validation.Validator; import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; +import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; @@ -28,15 +29,16 @@ import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.exceptions.ValidatorHelper; -import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorSelectId; import com.bakdata.conquery.models.query.queryplan.aggregators.Aggregator; import com.bakdata.conquery.models.query.resultinfo.SelectResultInfo; import com.bakdata.conquery.models.query.resultinfo.UniqueNamer; import com.bakdata.conquery.models.types.ResultType; import com.bakdata.conquery.models.worker.LocalNamespace; import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.util.NonPersistentStoreFactory; import io.dropwizard.jersey.validation.Validators; -import jakarta.validation.Validator; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.params.ParameterizedTest; @@ -49,17 +51,18 @@ public class DefaultColumnNameTest { private static final PrintSettings SETTINGS = new PrintSettings(false, Locale.ENGLISH, NAMESPACE, new ConqueryConfig(), null, null); private static final Validator VALIDATOR = Validators.newValidator(); + private static final BiFunction CONCEPT_SELECT_SELECTOR = (concept, cq) -> { final UniversalSelect select = concept.getSelects().get(0); - cq.setSelects(List.of(select)); + cq.setSelects(List.of(select.getId())); return select; }; private static final BiFunction CONNECTOR_SELECT_SELECTOR = (concept, cq) -> { final Select select = concept.getConnectors().get(0).getSelects().get(0); - cq.getTables().get(0).setSelects(List.of(select)); + cq.getTables().get(0).setSelects(List.of((ConnectorSelectId) select.getId())); return select; }; @@ -157,15 +160,6 @@ private static Stream provideCombinations() { @ParameterizedTest @MethodSource("provideCombinations") void checkCombinations(TestConcept concept, boolean hasCQConceptLabel, String expectedColumnName) { - - doAnswer(invocation -> { - final ConceptId id = invocation.getArgument(0); - if (!concept.getId().equals(id)) { - throw new IllegalStateException("Expected the id " + concept.getId() + " but got " + id); - } - return concept; - }).when(NAMESPACE).resolve(any()); - final CQConcept cqConcept = concept.createCQConcept(hasCQConceptLabel); final UniqueNamer uniqNamer = new UniqueNamer(SETTINGS); @@ -176,7 +170,7 @@ void checkCombinations(TestConcept concept, boolean hasCQConceptLabel, String ex private static class TestCQConcept extends CQConcept { - private static CQConcept create(boolean withLabel, TestConcept concept) { + private static CQConcept create(boolean withLabel, TestConcept concept) { CQConcept cqConcept = new CQConcept(); if (withLabel) { cqConcept.setLabel("TestCQLabel"); @@ -190,14 +184,15 @@ private static CQConcept create(boolean withLabel, TestConcept concept) { if (elements.isEmpty()) { elements = List.of(concept); } + final List> list = (List>) elements.stream().map(ConceptElement::getId).toList(); cqConcept.setElements( - elements + list ); List tables = concept.getConnectors().stream() .map(con -> { CQTable table = new CQTable(); - table.setConnector(con); + table.setConnector(con.getId()); table.setConcept(cqConcept); return table; }) @@ -213,36 +208,41 @@ private static CQConcept create(boolean withLabel, TestConcept concept) { private static class TestConcept extends TreeConcept { - private static final Dataset DATASET = new Dataset() { - { - setName("test"); - } - }; + /** + * We use a different dataset for each concept/test. Otherwise, the concepts override each other in the + * NamespacedStorageProvider map during test parameter creation. + */ + private static final AtomicInteger DATASET_COUNTER = new AtomicInteger(0); + private final BiFunction selectExtractor; private TestConcept(BiFunction selectExtractor) { + final NamespaceStorage NS_ID_RESOLVER = new NonPersistentStoreFactory().createNamespaceStorage(); this.selectExtractor = selectExtractor; setName("TestConceptName"); setLabel("TestConceptLabel"); - setDataset(DATASET); - setSelects(List.of(new TestUniversalSelect(this))); - } + Dataset DATASET = new Dataset() { + { + setName("test_" + DATASET_COUNTER.getAndIncrement()); + setNamespacedStorageProvider(NS_ID_RESOLVER); + NS_ID_RESOLVER.updateDataset(this); + } + }; + setDataset(DATASET.getId()); - public Select extractSelect(CQConcept cq) { - return selectExtractor.apply(this, cq); - } + NS_ID_RESOLVER.updateConcept(this); - public CQConcept createCQConcept(boolean hasCQConceptLabel) { - return TestCQConcept.create(hasCQConceptLabel, this); + setSelects(List.of(new TestUniversalSelect(this))); } - @SneakyThrows public static TestConcept create(int countConnectors, BiFunction selectExtractor, int countIds, String overwriteLabel) { TestConcept concept = new TestConcept(selectExtractor); if (overwriteLabel != null) { concept.setLabel(overwriteLabel); } + + List connectors = new ArrayList<>(); concept.setConnectors(connectors); for (; countConnectors > 0; countConnectors--) { @@ -263,15 +263,24 @@ public static TestConcept create(int countConnectors, BiFunction holder) { } @Override - public ResultType getResultType() { - return ResultType.Primitive.STRING; + public Aggregator createAggregator() { + return null; } @Override - public Aggregator createAggregator() { - return null; + public ResultType getResultType() { + return ResultType.Primitive.STRING; } } diff --git a/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java b/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java deleted file mode 100644 index d2e9409287..0000000000 --- a/backend/src/test/java/com/bakdata/conquery/models/types/SerialisationObjectsUtil.java +++ /dev/null @@ -1,129 +0,0 @@ -package com.bakdata.conquery.models.types; - -import java.time.LocalDate; -import java.util.ArrayList; -import java.util.List; -import java.util.UUID; - -import com.bakdata.conquery.apiv1.forms.export_form.AbsoluteMode; -import com.bakdata.conquery.apiv1.forms.export_form.ExportForm; -import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; -import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; -import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; -import com.bakdata.conquery.models.common.Range; -import com.bakdata.conquery.models.datasets.Column; -import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.datasets.concepts.ValidityDate; -import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; -import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; -import com.bakdata.conquery.models.events.MajorTypeId; -import com.bakdata.conquery.models.forms.util.ResolutionShortNames; -import com.bakdata.conquery.models.identifiable.CentralRegistry; -import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; -import com.fasterxml.jackson.databind.node.TextNode; -import lombok.experimental.UtilityClass; -import org.jetbrains.annotations.NotNull; - -/** - * Helper class for nested objects needed in {@link com.bakdata.conquery.models.SerializationTests} - */ -@UtilityClass -public class SerialisationObjectsUtil { - - - @NotNull - public static Dataset createDataset(CentralRegistry registry) { - final Dataset dataset = new Dataset("test-dataset"); - registry.register(dataset); - return dataset; - } - - @NotNull - public static TreeConcept createConcept(CentralRegistry registry, Dataset dataset) { - TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); - concept.setLabel("conceptLabel"); - concept.setName("conceptName"); - - Table table = new Table(); - - Column column = new Column(); - column.setLabel("colLabel"); - column.setName("colName"); - column.setType(MajorTypeId.STRING); - column.setTable(table); - - Column dateColumn = new Column(); - dateColumn.setLabel("colLabel2"); - dateColumn.setName("colName2"); - dateColumn.setType(MajorTypeId.DATE); - dateColumn.setTable(table); - - - table.setColumns(new Column[]{column, dateColumn}); - table.setDataset(dataset); - table.setLabel("tableLabel"); - table.setName("tableName"); - - column.setTable(table); - - ConceptTreeConnector connector = new ConceptTreeConnector(); - connector.setConcept(concept); - connector.setLabel("connLabel"); - connector.setName("connName"); - connector.setColumn(column); - - concept.setConnectors(List.of(connector)); - - ValidityDate valDate = ValidityDate.create(dateColumn); - valDate.setConnector(connector); - valDate.setLabel("valLabel"); - valDate.setName("valName"); - connector.setValidityDates(List.of(valDate)); - - registry.register(concept); - registry.register(column); - registry.register(dateColumn); - registry.register(table); - registry.register(connector); - registry.register(valDate); - return concept; - } - - @NotNull - public static ExportForm createExportForm(CentralRegistry registry, Dataset dataset) { - final TreeConcept concept = createConcept(registry, dataset); - final ExportForm exportForm = new ExportForm(); - final AbsoluteMode mode = new AbsoluteMode(); - mode.setDateRange(new Range<>(LocalDate.of(2200, 6, 1), LocalDate.of(2200, 6, 2))); - mode.setForm(exportForm); - - final CQConcept cqConcept = new CQConcept(); - - final CQTable table = new CQTable(); - table.setConcept(cqConcept); - table.setConnector(concept.getConnectors().get(0)); - - // Use ArrayList instead of ImmutalbeList here because they use different hash code implementations - cqConcept.setTables(new ArrayList<>(List.of(table))); - cqConcept.setElements(new ArrayList<>(List.of(concept))); - - exportForm.setTimeMode(mode); - exportForm.setFeatures(new ArrayList<>(List.of(cqConcept))); - exportForm.setValues(new TextNode("Some Node")); - exportForm.setQueryGroupId(new ManagedExecutionId(dataset.getId(), UUID.randomUUID())); - exportForm.setResolution(new ArrayList<>(List.of(ResolutionShortNames.COMPLETE))); - return exportForm; - } - - @NotNull - public static User createUser(CentralRegistry registry, MetaStorage storage) { - final User user = new User("test-user", "test-user", storage); - registry.register(user); - - user.updateStorage(); - return user; - } -} diff --git a/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java b/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java index 42f9c71e3c..231e9bfd61 100644 --- a/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java +++ b/backend/src/test/java/com/bakdata/conquery/service/FilterSearchTest.java @@ -5,24 +5,30 @@ import java.util.List; import java.util.Map; +import com.bakdata.conquery.io.storage.NamespacedStorage; import com.bakdata.conquery.models.config.IndexConfig; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.Table; -import com.bakdata.conquery.models.datasets.concepts.Searchable; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SelectFilter; import com.bakdata.conquery.models.datasets.concepts.filters.specific.SingleSelectFilter; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.index.IndexCreationException; import com.bakdata.conquery.models.query.FilterSearch; +import com.bakdata.conquery.util.extensions.NamespaceStorageExtension; import com.google.common.collect.ImmutableBiMap; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; public class FilterSearchTest { + @RegisterExtension + private static final NamespaceStorageExtension NAMESPACE_STORAGE_EXTENSION = new NamespaceStorageExtension(); + private static final NamespacedStorage NAMESPACED_STORAGE = NAMESPACE_STORAGE_EXTENSION.getStorage(); + @Test - public void totals() throws IndexCreationException { + public void totals() { final IndexConfig indexConfig = new IndexConfig(); FilterSearch search = new FilterSearch(indexConfig); @@ -33,10 +39,13 @@ public void totals() throws IndexCreationException { Column column = new Column(); Table table = new Table(); Dataset dataset = new Dataset("test_dataset"); + dataset.setNamespacedStorageProvider(NAMESPACED_STORAGE); + NAMESPACED_STORAGE.updateDataset(dataset); table.setName("test_table"); - table.setDataset(dataset); - concept.setDataset(dataset); + table.setDataset(dataset.getId()); + table.setColumns(new Column[]{column}); + concept.setDataset(dataset.getId()); concept.setName("test_concept"); concept.setConnectors(List.of(connector)); connector.setName("test_connector"); @@ -44,7 +53,8 @@ public void totals() throws IndexCreationException { connector.setConcept(concept); column.setTable(table); column.setName("test_column"); - filter.setColumn(column); + NAMESPACED_STORAGE.addTable(table); + filter.setColumn(column.getId()); filter.setConnector(connector); @@ -55,9 +65,14 @@ public void totals() throws IndexCreationException { )); // Register - for (Searchable searchable : filter.getSearchReferences()) { - search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); - } + filter.getSearchReferences().forEach(searchable -> { + try { + search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); + } + catch (IndexCreationException e) { + throw new RuntimeException(e); + } + }); search.registerValues(column, List.of( "a", @@ -71,7 +86,7 @@ public void totals() throws IndexCreationException { } @Test - public void totalsEmptyFiler() throws IndexCreationException { + public void totalsEmptyFiler() { final IndexConfig indexConfig = new IndexConfig(); FilterSearch search = new FilterSearch(indexConfig); @@ -82,10 +97,13 @@ public void totalsEmptyFiler() throws IndexCreationException { Column column = new Column(); Table table = new Table(); Dataset dataset = new Dataset("test_dataset"); + dataset.setNamespacedStorageProvider(NAMESPACED_STORAGE); + NAMESPACED_STORAGE.updateDataset(dataset); table.setName("test_table"); - table.setDataset(dataset); - concept.setDataset(dataset); + table.setDataset(dataset.getId()); + table.setColumns(new Column[]{column}); + concept.setDataset(dataset.getId()); concept.setName("test_concept"); concept.setConnectors(List.of(connector)); connector.setName("test_connector"); @@ -94,14 +112,20 @@ public void totalsEmptyFiler() throws IndexCreationException { column.setTable(table); column.setName("test_column"); column.setSearchDisabled(true); - filter.setColumn(column); + NAMESPACED_STORAGE.addTable(table); + + filter.setColumn(column.getId()); filter.setConnector(connector); // Register - for (Searchable searchable : filter.getSearchReferences()) { - search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); - } - + filter.getSearchReferences().forEach(searchable -> { + try { + search.addSearches(Map.of(searchable, searchable.createTrieSearch(indexConfig))); + } + catch (IndexCreationException e) { + throw new RuntimeException(e); + } + }); search.shrinkSearch(column); assertThat(search.getTotal(filter)).isEqualTo(0); diff --git a/backend/src/test/java/com/bakdata/conquery/models/index/IndexServiceTest.java b/backend/src/test/java/com/bakdata/conquery/service/IndexServiceTest.java similarity index 77% rename from backend/src/test/java/com/bakdata/conquery/models/index/IndexServiceTest.java rename to backend/src/test/java/com/bakdata/conquery/service/IndexServiceTest.java index 828c76cb7c..50c301a5c3 100644 --- a/backend/src/test/java/com/bakdata/conquery/models/index/IndexServiceTest.java +++ b/backend/src/test/java/com/bakdata/conquery/service/IndexServiceTest.java @@ -1,7 +1,7 @@ -package com.bakdata.conquery.models.index; +package com.bakdata.conquery.service; import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.awaitility.Awaitility.await; import static org.mockserver.model.HttpRequest.request; import java.io.IOException; @@ -10,22 +10,26 @@ import java.net.URI; import java.net.URISyntaxException; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; -import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.index.IndexService; +import com.bakdata.conquery.models.index.MapIndex; +import com.bakdata.conquery.models.index.MapInternToExternMapper; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.bakdata.conquery.util.extensions.MockServerExtension; import com.github.powerlibraries.io.In; import com.univocity.parsers.csv.CsvParserSettings; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; +import org.junit.jupiter.api.extension.RegisterExtension; import org.mockserver.integration.ClientAndServer; import org.mockserver.model.HttpResponse; import org.mockserver.model.MediaType; @@ -33,39 +37,41 @@ @TestMethodOrder(MethodOrderer.OrderAnnotation.class) @Slf4j public class IndexServiceTest { + @RegisterExtension + private static final MockServerExtension REF_SERVER = new MockServerExtension(ClientAndServer.startClientAndServer(), IndexServiceTest::initRefServer); private static final NamespaceStorage NAMESPACE_STORAGE = new NamespaceStorage(new NonPersistentStoreFactory(), IndexServiceTest.class.getName()); private static final Dataset DATASET = new Dataset("dataset"); private static final ConqueryConfig CONFIG = new ConqueryConfig(); - private static final ClientAndServer REF_SERVER = ClientAndServer.startClientAndServer(); private final IndexService indexService = new IndexService(new CsvParserSettings(), "emptyDefaultLabel"); + @SneakyThrows(IOException.class) + private static void initRefServer(ClientAndServer mockServer) { + log.info("Test loading of mapping"); + + try (InputStream inputStream = In.resource("/tests/aggregator/FIRST_MAPPED_AGGREGATOR/mapping.csv").asStream()) { + mockServer.when(request().withPath("/mapping.csv")) + .respond(HttpResponse.response().withContentType(new MediaType("text", "csv")).withBody(inputStream.readAllBytes())); + } + + } + @BeforeAll @SneakyThrows public static void beforeAll() { - NAMESPACE_STORAGE.openStores(Jackson.MAPPER); - - NAMESPACE_STORAGE.updateDataset(DATASET); CONFIG.getIndex().setBaseUrl(new URI(String.format("http://localhost:%d/", REF_SERVER.getPort()))); - } + NAMESPACE_STORAGE.openStores(null, null); + + DATASET.setNamespacedStorageProvider(NAMESPACE_STORAGE); + NAMESPACE_STORAGE.updateDataset(DATASET); - @AfterAll - @SneakyThrows - public static void afterAll() { - REF_SERVER.stop(); } @Test @Order(0) - void testLoading() throws NoSuchFieldException, IllegalAccessException, URISyntaxException, IOException, ExecutionException, InterruptedException { - log.info("Test loading of mapping"); - - try (InputStream inputStream = In.resource("/tests/aggregator/FIRST_MAPPED_AGGREGATOR/mapping.csv").asStream()) { - REF_SERVER.when(request().withPath("/mapping.csv")) - .respond(HttpResponse.response().withContentType(new MediaType("text", "csv")).withBody(inputStream.readAllBytes())); - } + void testLoading() throws NoSuchFieldException, IllegalAccessException, URISyntaxException, IOException { final MapInternToExternMapper mapper = new MapInternToExternMapper( "test1", @@ -74,6 +80,7 @@ void testLoading() throws NoSuchFieldException, IllegalAccessException, URISynta "{{external}}" ); + final MapInternToExternMapper mapperUrlAbsolute = new MapInternToExternMapper( "testUrlAbsolute", new URI(String.format("http://localhost:%d/mapping.csv", REF_SERVER.getPort())), @@ -97,17 +104,15 @@ void testLoading() throws NoSuchFieldException, IllegalAccessException, URISynta mapperUrlAbsolute.init(); mapperUrlRelative.init(); - // Wait for future - mapper.getInt2ext().get(); - mapperUrlAbsolute.getInt2ext().get(); - mapperUrlRelative.getInt2ext().get(); - + await().timeout(5, TimeUnit.SECONDS).until(mapper::initialized); assertThat(mapper.external("int1")).as("Internal Value").isEqualTo("hello"); assertThat(mapper.external("int2")).as("Internal Value").isEqualTo("int2"); + await().timeout(5, TimeUnit.SECONDS).until(mapperUrlAbsolute::initialized); assertThat(mapperUrlAbsolute.external("int1")).as("Internal Value").isEqualTo("hello"); assertThat(mapperUrlAbsolute.external("int2")).as("Internal Value").isEqualTo("int2"); + await().timeout(5, TimeUnit.SECONDS).until(mapperUrlRelative::initialized); assertThat(mapperUrlRelative.external("int1")).as("Internal Value").isEqualTo("hello"); assertThat(mapperUrlRelative.external("int2")).as("Internal Value").isEqualTo("int2"); @@ -124,7 +129,7 @@ private static void injectComponents(MapInternToExternMapper mapInternToExternMa final Field configField = MapInternToExternMapper.class.getDeclaredField(MapInternToExternMapper.Fields.config); configField.setAccessible(true); - configField.set(mapInternToExternMapper, IndexServiceTest.CONFIG); + configField.set(mapInternToExternMapper, CONFIG); } @@ -143,10 +148,8 @@ void testEvictOnMapper() injectComponents(mapInternToExternMapper, indexService); mapInternToExternMapper.init(); - // Wait for future - mapInternToExternMapper.getInt2ext().get(); - // Before eviction the result should be the same + await().timeout(5, TimeUnit.SECONDS).until(mapInternToExternMapper::initialized); assertThat(mapInternToExternMapper.external("int1")).as("Internal Value").isEqualTo("hello"); @@ -165,25 +168,4 @@ void testEvictOnMapper() assertThat(mappingBeforeEvict).as("Mapping before and after eviction") .isNotSameAs(mappingAfterEvict); } - - @Test - void testFailedLoading() throws NoSuchFieldException, IllegalAccessException, URISyntaxException { - final MapInternToExternMapper mapInternToExternMapper = new MapInternToExternMapper( - "test1", - new URI("classpath:/tests/aggregator/FIRST_MAPPED_AGGREGATOR/not_existing_mapping.csv"), - "internal", - "{{external}}" - ); - - injectComponents(mapInternToExternMapper, indexService); - mapInternToExternMapper.init(); - - // Wait for future - assertThatThrownBy(() -> mapInternToExternMapper.getInt2ext().get()).as("Not existent CSV").hasCauseInstanceOf(IllegalStateException.class); - - - // Before eviction the result should be the same - assertThat(mapInternToExternMapper.external("int1")).as("Internal Value").isEqualTo("int1"); - } - } diff --git a/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java b/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java index 5d1cd5674a..91b5e718b4 100644 --- a/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java +++ b/backend/src/test/java/com/bakdata/conquery/tasks/PermissionCleanupTaskTest.java @@ -3,7 +3,6 @@ import static com.bakdata.conquery.tasks.PermissionCleanupTask.deletePermissionsOfOwnedInstances; import static com.bakdata.conquery.tasks.PermissionCleanupTask.deleteQueryPermissionsWithMissingRef; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; import java.time.Instant; import java.time.LocalDateTime; @@ -21,6 +20,7 @@ import com.bakdata.conquery.models.auth.permissions.WildcardPermission; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.util.NonPersistentStoreFactory; import org.junit.jupiter.api.AfterEach; @@ -42,7 +42,7 @@ private ManagedQuery createManagedQuery() { ConceptQuery query = new ConceptQuery(root); - final ManagedQuery managedQuery = new ManagedQuery(query, mock(User.class), new Dataset("test"), STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(query, new UserId("test_user"), new Dataset("test").getId(), STORAGE, null); managedQuery.setCreationTime(LocalDateTime.now().minusDays(1)); @@ -121,7 +121,7 @@ void doDeletePermissionsOfOwnedReference() { STORAGE.updateUser(user); user.addPermission(ExecutionPermission.onInstance(AbilitySets.QUERY_CREATOR, managedQueryOwned.getId())); - managedQueryOwned.setOwner(user); + managedQueryOwned.setOwner(user.getId()); STORAGE.updateExecution(managedQueryOwned); // Created not owned execution @@ -130,7 +130,7 @@ void doDeletePermissionsOfOwnedReference() { user.addPermission(ExecutionPermission.onInstance(Ability.READ, managedQueryNotOwned.getId())); // Set owner - managedQueryNotOwned.setOwner(user2); + managedQueryNotOwned.setOwner(user2.getId()); STORAGE.updateExecution(managedQueryNotOwned); deletePermissionsOfOwnedInstances(STORAGE, ExecutionPermission.DOMAIN.toLowerCase(), ManagedExecutionId.Parser.INSTANCE, STORAGE::getExecution); diff --git a/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java b/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java index 05e23edad3..28691377ad 100644 --- a/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java +++ b/backend/src/test/java/com/bakdata/conquery/tasks/QueryCleanupTaskTest.java @@ -1,7 +1,6 @@ package com.bakdata.conquery.tasks; import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; import java.time.Duration; import java.time.LocalDateTime; @@ -14,8 +13,8 @@ import com.bakdata.conquery.apiv1.query.concept.specific.CQAnd; import com.bakdata.conquery.apiv1.query.concept.specific.CQReusedQuery; import com.bakdata.conquery.io.storage.MetaStorage; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ManagedQuery; import com.bakdata.conquery.util.NonPersistentStoreFactory; import org.junit.jupiter.api.AfterEach; @@ -25,6 +24,7 @@ @TestInstance(Lifecycle.PER_CLASS) class QueryCleanupTaskTest { + private static final MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); private final Duration queryExpiration = Duration.ofDays(30); @@ -36,16 +36,16 @@ private ManagedQuery createManagedQuery() { ConceptQuery query = new ConceptQuery(root); - final ManagedQuery managedQuery = new ManagedQuery(query, mock(User.class), new Dataset("test"), STORAGE, null); + final ManagedQuery managedQuery = new ManagedQuery(query, new UserId("test"), new Dataset("test").getId(), STORAGE, null); managedQuery.setCreationTime(LocalDateTime.now().minus(queryExpiration).minusDays(1)); STORAGE.addExecution(managedQuery); + managedQuery.setMetaStorage(STORAGE); return managedQuery; } - private static final MetaStorage STORAGE = new NonPersistentStoreFactory().createMetaStorage(); @AfterEach @@ -77,7 +77,8 @@ void singleNamed() throws Exception { managedQuery.setLabel("test"); - new QueryCleanupTask(STORAGE, queryExpiration).execute(Map.of(), null); + QueryCleanupTask queryCleanupTask = new QueryCleanupTask(STORAGE, queryExpiration); + queryCleanupTask.execute(Map.of(), null); assertThat(STORAGE.getAllExecutions()).containsExactlyInAnyOrder(managedQuery); } diff --git a/backend/src/test/java/com/bakdata/conquery/util/FailingMetaStorage.java b/backend/src/test/java/com/bakdata/conquery/util/FailingMetaStorage.java new file mode 100644 index 0000000000..039daac7f5 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/FailingMetaStorage.java @@ -0,0 +1,180 @@ +package com.bakdata.conquery.util; + +import java.util.stream.Stream; + +import com.bakdata.conquery.io.storage.ManagedStore; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.Group; +import com.bakdata.conquery.models.auth.entities.Role; +import com.bakdata.conquery.models.auth.entities.User; +import com.bakdata.conquery.models.execution.ManagedExecution; +import com.bakdata.conquery.models.forms.configs.FormConfig; +import com.bakdata.conquery.models.identifiable.ids.Id; +import com.bakdata.conquery.models.identifiable.ids.MetaId; +import com.bakdata.conquery.models.identifiable.ids.specific.FormConfigId; +import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; +import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.RoleId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; +import com.codahale.metrics.MetricRegistry; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; + +/** + * A meta storage that can be injected in to deserialization in environments where no MetaStorage exists, e.g. in tests on the client side. + * During debugging this can help to identify where an object was deserialized. + */ +public class FailingMetaStorage extends MetaStorage { + + public final static FailingMetaStorage INSTANCE = new FailingMetaStorage(); + public static final String ERROR_MSG = "Cannot be used in this environment. The real metastore exists only on the manager node."; + + private FailingMetaStorage() { + super(null); + } + + @Override + public void openStores(ObjectMapper mapper, MetricRegistry metricRegistry) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public ImmutableList getStores() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public void clear() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public void addExecution(ManagedExecution query) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public ManagedExecution getExecution(ManagedExecutionId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllExecutions() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateExecution(ManagedExecution query) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeExecution(ManagedExecutionId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addGroup(Group group) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Group getGroup(GroupId groupId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllGroups() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public void removeGroup(GroupId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateGroup(Group group) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addUser(User user) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public User getUser(UserId userId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllUsers() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeUser(UserId userId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateUser(User user) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addRole(Role role) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Role getRole(RoleId roleId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllRoles() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeRole(RoleId roleId) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateRole(Role role) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FormConfig getFormConfig(FormConfigId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public Stream getAllFormConfigs() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void removeFormConfig(FormConfigId id) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void updateFormConfig(FormConfig formConfig) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public synchronized void addFormConfig(FormConfig formConfig) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public & MetaId, VALUE> VALUE get(ID id) { + throw new UnsupportedOperationException(ERROR_MSG); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java index 216d858144..ac8c06a21e 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java +++ b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStore.java @@ -1,9 +1,9 @@ package com.bakdata.conquery.util; import java.io.IOException; -import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import java.util.stream.Stream; import com.bakdata.conquery.io.storage.Store; import com.bakdata.conquery.io.storage.xodus.stores.SerializingStore; @@ -46,23 +46,28 @@ public void remove(KEY key) { } @Override - public void loadData() { + public int count() { + return map.size(); + } + @Override + public Stream getAll() { + return map.values().stream(); } @Override - public int count() { - return map.size(); + public Stream getAllKeys() { + return map.keySet().stream(); } @Override - public Collection getAll() { - return map.values(); + public void loadData() { + } @Override - public Collection getAllKeys() { - return map.keySet(); + public void close() throws IOException { + // Nothing to close } @Override @@ -74,9 +79,4 @@ public void removeStore() { public void clear() { map.clear(); } - - @Override - public void close() throws IOException { - // Nothing to close - } } diff --git a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java index ae13a560f6..9b079ae62d 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java +++ b/backend/src/test/java/com/bakdata/conquery/util/NonPersistentStoreFactory.java @@ -6,27 +6,35 @@ import java.util.concurrent.ConcurrentHashMap; import com.bakdata.conquery.io.cps.CPSType; -import com.bakdata.conquery.io.storage.*; -import com.bakdata.conquery.io.storage.xodus.stores.CachedStore; +import com.bakdata.conquery.io.storage.IdentifiableStore; +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespaceStorage; +import com.bakdata.conquery.io.storage.Store; +import com.bakdata.conquery.io.storage.StoreMappings; +import com.bakdata.conquery.io.storage.WorkerStorage; import com.bakdata.conquery.io.storage.xodus.stores.SingletonStore; import com.bakdata.conquery.models.auth.entities.Group; import com.bakdata.conquery.models.auth.entities.Role; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.StoreFactory; -import com.bakdata.conquery.models.datasets.*; +import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.Import; +import com.bakdata.conquery.models.datasets.PreviewConfig; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; +import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.StructureNode; import com.bakdata.conquery.models.events.Bucket; import com.bakdata.conquery.models.events.CBlock; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.forms.configs.FormConfig; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.Id; import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap; import com.bakdata.conquery.models.index.InternToExternMapper; import com.bakdata.conquery.models.index.search.SearchIndex; import com.bakdata.conquery.models.worker.WorkerInformation; import com.bakdata.conquery.models.worker.WorkerToBucketsMap; +import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.databind.ObjectMapper; @CPSType(id = "NON_PERSISTENT", base = StoreFactory.class) @@ -63,7 +71,7 @@ public Collection discoverNamespaceStorages() { } @Override - public Collection discoverWorkerStorages() { + public Collection discoverWorkerStorages() { return Collections.emptyList(); } @@ -73,99 +81,99 @@ public SingletonStore createDatasetStore(String pathName, ObjectMapper } @Override - public IdentifiableStore createSecondaryIdDescriptionStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(secondaryIdDescriptionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createSecondaryIdDescriptionStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(secondaryIdDescriptionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createInternToExternMappingStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(internToExternStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore
createTableStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(tableStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createSearchIndexStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.identifiable(searchIndexStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore> createConceptStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(conceptStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createPreviewStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.singleton(previewStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); - + public IdentifiableStore createImportStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(importStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public CachedStore createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.cached(entity2Bucket.computeIfAbsent(pathName, ignored -> new NonPersistentStore<>())); + public IdentifiableStore createCBlockStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(cBlockStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore
createTableStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(tableStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createBucketStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(bucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore> createConceptStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(conceptStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createWorkerInformationStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(workerStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createImportStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(importStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createIdMappingStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(idMappingStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createCBlockStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(cBlockStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createWorkerToBucketsStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(workerToBucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createBucketStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(bucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createStructureStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(structureStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createWorkerInformationStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.singleton(workerStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createExecutionsStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(executionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createIdMappingStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.singleton(idMappingStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createFormConfigStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(formConfigStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createWorkerToBucketsStore(String pathName, ObjectMapper objectMapper) { - return StoreMappings.singleton(workerToBucketStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createUserStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(userStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public SingletonStore createStructureStore(String pathName, CentralRegistry centralRegistry, ObjectMapper objectMapper) { - return StoreMappings.singleton(structureStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + public IdentifiableStore createRoleStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(roleStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createExecutionsStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(executionStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createGroupStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(groupStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createFormConfigStore(CentralRegistry centralRegistry, String pathName, ObjectMapper objectMapper) { - return StoreMappings.identifiable(formConfigStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createInternToExternMappingStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(internToExternStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createUserStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(userStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public IdentifiableStore createSearchIndexStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.identifiable(searchIndexStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); } @Override - public IdentifiableStore createRoleStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(roleStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public SingletonStore createPreviewStore(String pathName, ObjectMapper objectMapper) { + return StoreMappings.singleton(previewStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>())); + } @Override - public IdentifiableStore createGroupStore(CentralRegistry centralRegistry, String pathName, MetaStorage storage, ObjectMapper objectMapper) { - return StoreMappings.identifiable(groupStore.computeIfAbsent(pathName, n -> new NonPersistentStore<>()), centralRegistry); + public Store createEntity2BucketStore(String pathName, ObjectMapper objectMapper) { + return entity2Bucket.computeIfAbsent(pathName, ignored -> new NonPersistentStore<>()); } /** @@ -173,7 +181,16 @@ public IdentifiableStore createGroupStore(CentralRegistry centralRegistry */ public MetaStorage createMetaStorage() { final MetaStorage metaStorage = new MetaStorage(this); - metaStorage.openStores(null); + metaStorage.openStores(null, new MetricRegistry()); return metaStorage; } + + /** + * @implNote intended for Unit-tests + */ + public NamespaceStorage createNamespaceStorage() { + final NamespaceStorage storage = new NamespaceStorage(this, "_"); + storage.openStores(null, new MetricRegistry()); + return storage; + } } diff --git a/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java b/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java index de4137ac1f..2555aecde9 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java +++ b/backend/src/test/java/com/bakdata/conquery/util/SerialisationObjectsUtil.java @@ -10,17 +10,18 @@ import com.bakdata.conquery.apiv1.query.concept.filter.CQTable; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.io.storage.NamespacedStorageImpl; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.common.Range; import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Dataset; +import com.bakdata.conquery.models.datasets.SecondaryIdDescription; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ValidityDate; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeConnector; import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.events.MajorTypeId; import com.bakdata.conquery.models.forms.util.ResolutionShortNames; -import com.bakdata.conquery.models.identifiable.CentralRegistry; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; import com.fasterxml.jackson.databind.node.TextNode; import lombok.experimental.UtilityClass; @@ -34,19 +35,63 @@ public class SerialisationObjectsUtil { @NotNull - public static Dataset createDataset(CentralRegistry registry) { - final Dataset dataset = new Dataset("test-dataset"); - registry.register(dataset); + public static Dataset createDataset(NamespacedStorageImpl storage) { + return createDataset("test-dataset", storage); + } + + @NotNull + public static Dataset createDataset(String name, NamespacedStorageImpl storage) { + Dataset dataset = new Dataset(name); + dataset.setNamespacedStorageProvider(storage); + storage.updateDataset(dataset); return dataset; } @NotNull - public static TreeConcept createConcept(CentralRegistry registry, Dataset dataset) { + public static ExportForm createExportForm(Dataset dataset, NamespacedStorageImpl storage) { + final TreeConcept concept = createConcept(dataset, storage); + final ExportForm exportForm = new ExportForm(); + final AbsoluteMode mode = new AbsoluteMode(); + mode.setDateRange(new Range<>(LocalDate.of(2200, 6, 1), LocalDate.of(2200, 6, 2))); + mode.setForm(exportForm); + + final CQConcept cqConcept = new CQConcept(); + + final CQTable table = new CQTable(); + table.setConcept(cqConcept); + table.setConnector(concept.getConnectors().get(0).getId()); + + // Use ArrayList instead of ImmutableList here because they use different hash code implementations + cqConcept.setTables(new ArrayList<>(List.of(table))); + cqConcept.setElements(new ArrayList<>(List.of(concept.getId()))); + + exportForm.setTimeMode(mode); + exportForm.setFeatures(new ArrayList<>(List.of(cqConcept))); + exportForm.setValues(new TextNode("Some Node")); + exportForm.setQueryGroupId(new ManagedExecutionId(dataset.getId(), UUID.randomUUID())); + exportForm.setResolution(new ArrayList<>(List.of(ResolutionShortNames.COMPLETE))); + + storage.updateConcept(concept); + + return exportForm; + } + + /** + * Does not add the produced concept to a store, only dependencies. + * Otherwise, it might clash during serdes because init was not executed + */ + @NotNull + public static TreeConcept createConcept(Dataset dataset, NamespacedStorageImpl storage) { TreeConcept concept = new TreeConcept(); - concept.setDataset(dataset); + + concept.setDataset(dataset.getId()); concept.setLabel("conceptLabel"); concept.setName("conceptName"); + final SecondaryIdDescription secondaryIdDescription = new SecondaryIdDescription(); + secondaryIdDescription.setDataset(dataset.getId()); + secondaryIdDescription.setName("sid"); + Table table = new Table(); Column column = new Column(); @@ -63,9 +108,9 @@ public static TreeConcept createConcept(CentralRegistry registry, Dataset datase table.setColumns(new Column[]{column, dateColumn}); - table.setDataset(dataset); table.setLabel("tableLabel"); table.setName("tableName"); + table.setDataset(dataset.getId()); column.setTable(table); @@ -73,56 +118,33 @@ public static TreeConcept createConcept(CentralRegistry registry, Dataset datase connector.setConcept(concept); connector.setLabel("connLabel"); connector.setName("connName"); - connector.setColumn(column); concept.setConnectors(List.of(connector)); + storage.updateDataset(dataset); + storage.addSecondaryId(secondaryIdDescription); + storage.addTable(table); + + // Set/Create ids after setting id resolver + connector.setColumn(column.getId()); + column.setSecondaryId(secondaryIdDescription.getId()); + ValidityDate valDate = ValidityDate.create(dateColumn); valDate.setConnector(connector); valDate.setLabel("valLabel"); valDate.setName("valName"); connector.setValidityDates(List.of(valDate)); - registry.register(concept); - registry.register(column); - registry.register(dateColumn); - registry.register(table); - registry.register(connector); - registry.register(valDate); - return concept; - } - - @NotNull - public static ExportForm createExportForm(CentralRegistry registry, Dataset dataset) { - final TreeConcept concept = createConcept(registry, dataset); - final ExportForm exportForm = new ExportForm(); - final AbsoluteMode mode = new AbsoluteMode(); - mode.setDateRange(new Range<>(LocalDate.of(2200, 6, 1), LocalDate.of(2200, 6, 2))); - mode.setForm(exportForm); - - final CQConcept cqConcept = new CQConcept(); - - final CQTable table = new CQTable(); - table.setConcept(cqConcept); - table.setConnector(concept.getConnectors().get(0)); + // Initialize Concept + concept = new TreeConcept.Initializer().convert(concept); - // Use ArrayList instead of ImmutalbeList here because they use different hash code implementations - cqConcept.setTables(new ArrayList<>(List.of(table))); - cqConcept.setElements(new ArrayList<>(List.of(concept))); - - exportForm.setTimeMode(mode); - exportForm.setFeatures(new ArrayList<>(List.of(cqConcept))); - exportForm.setValues(new TextNode("Some Node")); - exportForm.setQueryGroupId(new ManagedExecutionId(dataset.getId(), UUID.randomUUID())); - exportForm.setResolution(new ArrayList<>(List.of(ResolutionShortNames.COMPLETE))); - return exportForm; + return concept; } @NotNull - public static User createUser(CentralRegistry registry, MetaStorage storage) { - final User user = new User("test-user", "test-user", storage); - registry.register(user); - + public static User createUser(MetaStorage metaStorage) { + final User user = new User("test-user", "test-user", metaStorage); + user.setMetaStorage(metaStorage); user.updateStorage(); return user; } diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/GroupExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/GroupExtension.java new file mode 100644 index 0000000000..8c0c2a890e --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/GroupExtension.java @@ -0,0 +1,22 @@ +package com.bakdata.conquery.util.extensions; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.Group; +import lombok.Getter; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +@Getter +public class GroupExtension implements BeforeAllCallback { + + private final Group group; + + public GroupExtension(MetaStorage metaStorage, String name) { + group = new Group(name, name, metaStorage); + + } + @Override + public void beforeAll(ExtensionContext context) throws Exception { + group.updateStorage(); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/MetaStorageExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/MetaStorageExtension.java new file mode 100644 index 0000000000..10fc15d3b5 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/MetaStorageExtension.java @@ -0,0 +1,23 @@ +package com.bakdata.conquery.util.extensions; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + + +@RequiredArgsConstructor +@Getter +public class MetaStorageExtension implements BeforeAllCallback { + private final MetricRegistry metricRegistry; + + private final MetaStorage metaStorage = new MetaStorage(new NonPersistentStoreFactory()); + + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + metaStorage.openStores(null, metricRegistry); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/MockServerExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/MockServerExtension.java new file mode 100644 index 0000000000..8315c044d4 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/MockServerExtension.java @@ -0,0 +1,32 @@ +package com.bakdata.conquery.util.extensions; + +import java.util.function.Consumer; + +import lombok.RequiredArgsConstructor; +import lombok.experimental.Delegate; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.mockserver.integration.ClientAndServer; + +@RequiredArgsConstructor +public class MockServerExtension implements BeforeAllCallback, AfterAllCallback { + + @Delegate + private final ClientAndServer server; + private final Consumer setup; + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + setup.accept(server); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + server.stop(); + } + + public String baseUrl(){ + return "http://localhost:%d".formatted(server.getPort()); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java index b01d28f2ca..bf6f5dd8c0 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/NamespaceStorageExtension.java @@ -3,6 +3,7 @@ import com.bakdata.conquery.io.jackson.Jackson; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.util.NonPersistentStoreFactory; +import com.codahale.metrics.MetricRegistry; import lombok.Getter; import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; @@ -15,7 +16,7 @@ public class NamespaceStorageExtension implements BeforeAllCallback, BeforeEachC @Override public void beforeAll(ExtensionContext context) throws Exception { - storage.openStores(Jackson.MAPPER); + storage.openStores(Jackson.MAPPER, new MetricRegistry()); } @Override diff --git a/backend/src/test/java/com/bakdata/conquery/util/extensions/UserExtension.java b/backend/src/test/java/com/bakdata/conquery/util/extensions/UserExtension.java new file mode 100644 index 0000000000..86da936696 --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/extensions/UserExtension.java @@ -0,0 +1,28 @@ +package com.bakdata.conquery.util.extensions; + +import com.bakdata.conquery.io.storage.MetaStorage; +import com.bakdata.conquery.models.auth.entities.User; +import lombok.Getter; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; + +public class UserExtension implements BeforeAllCallback { + private final MetaStorage metaStorage; + + @Getter + private final User user; + + public UserExtension(MetaStorage metaStorage, String id, String label) { + this.metaStorage = metaStorage; + user = new User(id, label, metaStorage); + } + + public UserExtension(MetaStorage metaStorage, String id) { + this(metaStorage, id, id); + } + + @Override + public void beforeAll(ExtensionContext extensionContext) throws Exception { + metaStorage.addUser(user); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java b/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java index 3d5dfa73a1..929716ec59 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/ConfigOverride.java @@ -2,6 +2,7 @@ import java.io.File; import java.net.ServerSocket; +import java.nio.file.Path; import java.util.Collection; import com.bakdata.conquery.models.config.ConqueryConfig; @@ -53,4 +54,11 @@ public static void configureRandomPorts(ConqueryConfig config) { config.getCluster().setPort(s.getLocalPort()); } } + + + public static void configureWorkdir(XodusStoreFactory storageConfig, Path workdir) { + + // Create new storage path to prevent xodus lock conflicts + storageConfig.setDirectory(workdir); + } } diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/ConqueryAuthenticationFilter.java b/backend/src/test/java/com/bakdata/conquery/util/support/ConqueryAuthenticationFilter.java new file mode 100644 index 0000000000..aef66c7c2a --- /dev/null +++ b/backend/src/test/java/com/bakdata/conquery/util/support/ConqueryAuthenticationFilter.java @@ -0,0 +1,25 @@ +package com.bakdata.conquery.util.support; + +import java.util.function.Supplier; +import jakarta.ws.rs.client.ClientRequestContext; +import jakarta.ws.rs.client.ClientRequestFilter; +import jakarta.ws.rs.core.HttpHeaders; + +/** + * Simple filter for http client in test to provide authentication information. + * Skips, if the request had an {@link HttpHeaders#AUTHORIZATION} already set. + * @param tokenSupplier Supplier that provides a (fresh) token for each request. + */ +record ConqueryAuthenticationFilter(Supplier tokenSupplier) implements ClientRequestFilter { + + @Override + public void filter(ClientRequestContext requestContext) { + // If none set to provided token + if (requestContext.getHeaders().containsKey(HttpHeaders.AUTHORIZATION)) { + return; + } + + String token = tokenSupplier.get(); + requestContext.getHeaders().add(HttpHeaders.AUTHORIZATION, "Bearer " + token); + } +} diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java b/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java index 8f3410c085..84fc8ca08b 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/StandaloneSupport.java @@ -1,46 +1,32 @@ package com.bakdata.conquery.util.support; import java.io.File; -import java.io.IOException; import java.util.List; import java.util.Map; -import jakarta.validation.Validator; import jakarta.ws.rs.client.Client; -import jakarta.ws.rs.client.ClientRequestContext; -import jakarta.ws.rs.client.ClientRequestFilter; import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.commands.PreprocessorCommand; -import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.integration.json.TestDataImporter; -import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.io.storage.NamespaceStorage; import com.bakdata.conquery.models.auth.AuthorizationController; -import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; -import com.bakdata.conquery.models.identifiable.Identifiable; -import com.bakdata.conquery.models.identifiable.ids.Id; -import com.bakdata.conquery.models.identifiable.ids.NamespacedId; -import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; -import com.bakdata.conquery.resources.admin.rest.AdminDatasetProcessor; -import com.bakdata.conquery.resources.admin.rest.AdminProcessor; import com.google.common.util.concurrent.MoreExecutors; import io.dropwizard.core.setup.Environment; -import lombok.Data; import lombok.Getter; import lombok.RequiredArgsConstructor; +import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor public class StandaloneSupport implements TestSupport { - public enum Mode {WORKER, SQL} - @Getter private final Mode mode; + @Delegate private final TestConquery testConquery; @Getter private final Namespace namespace; @@ -51,18 +37,8 @@ public enum Mode {WORKER, SQL} @Getter private final ConqueryConfig config; @Getter - private final AdminProcessor metaProcessor; - @Getter - private final AdminDatasetProcessor datasetsProcessor; - @Getter - private final User testUser; - @Getter private final TestDataImporter testImporter; - public AuthorizationController getAuthorizationController() { - return testConquery.getStandaloneCommand().getManagerNode().getAuthController(); - } - public void waitUntilWorkDone() { testConquery.waitUntilWorkDone(); } @@ -90,58 +66,23 @@ public void run(Environment environment, net.sourceforge.argparse4j.inf.Namespac .run(env, namespace, config); } - - public Validator getValidator() { - return testConquery.getStandaloneCommand().getManagerNode().getValidator(); - } - - public MetaStorage getMetaStorage() { - return testConquery.getStandaloneCommand().getManagerNode().getMetaStorage(); - } - public NamespaceStorage getNamespaceStorage() { - return testConquery.getStandaloneCommand().getManagerNode().getDatasetRegistry().get(dataset.getId()).getStorage(); + return getStandaloneCommand().getManagerNode().getDatasetRegistry().get(dataset.getId()).getStorage(); } - public DatasetRegistry getDatasetRegistry() { - return testConquery.getStandaloneCommand().getManagerNode().getDatasetRegistry(); - } - - public List getShardNodes() { - return testConquery.getStandaloneCommand().getShardNodes(); - } - - /** - * Retrieves the port of the admin API. - * - * @return The port. - */ - public int getAdminPort() { - return testConquery.getDropwizard().getAdminPort(); + public AuthorizationController getAuthorizationController() { + return testConquery.getStandaloneCommand().getManagerNode().getAuthController(); } public Client getClient() { - return testConquery.getClient() - .register(new ConqueryAuthenticationFilter(getAuthorizationController().getConqueryTokenRealm().createTokenForUser(getTestUser().getId()))); - } - - public & NamespacedId, VALUE extends Identifiable> VALUE resolve(ID id) { - return getDatasetRegistry().resolve(id); + return testConquery.getClient(); } - @Data - private static class ConqueryAuthenticationFilter implements ClientRequestFilter { - private final String token; - - @Override - public void filter(ClientRequestContext requestContext) throws IOException { - // If none set to provided token - if(requestContext.getHeaders().containsKey("Authorization")){ - return; - } - - requestContext.getHeaders().add("Authorization", "Bearer " + getToken()); - } + public UriBuilder defaultApiURIBuilder() { + return UriBuilder.fromPath("api") + .host("localhost") + .scheme("http") + .port(getLocalPort()); } /** @@ -153,17 +94,23 @@ public int getLocalPort() { return testConquery.getDropwizard().getLocalPort(); } - public UriBuilder defaultApiURIBuilder() { - return UriBuilder.fromPath("api") - .host("localhost") - .scheme("http") - .port(getLocalPort()); - } - public UriBuilder defaultAdminURIBuilder() { return UriBuilder.fromPath("admin") .host("localhost") .scheme("http") .port(getAdminPort()); } + + /** + * Retrieves the port of the admin API. + * + * @return The port. + */ + public int getAdminPort() { + return testConquery.getDropwizard().getAdminPort(); + } + + public enum Mode {WORKER, SQL} + + } diff --git a/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java b/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java index c451a4d7e0..90b6f61be1 100644 --- a/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java +++ b/backend/src/test/java/com/bakdata/conquery/util/support/TestConquery.java @@ -1,36 +1,44 @@ package com.bakdata.conquery.util.support; +import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; import static org.junit.jupiter.api.Assertions.fail; import java.io.File; import java.time.Duration; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import jakarta.validation.Validator; import jakarta.ws.rs.client.Client; +import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.Conquery; import com.bakdata.conquery.commands.DistributedStandaloneCommand; import com.bakdata.conquery.commands.ShardNode; import com.bakdata.conquery.commands.StandaloneCommand; import com.bakdata.conquery.integration.IntegrationTests; +import com.bakdata.conquery.integration.common.LoadingUtil; import com.bakdata.conquery.integration.json.TestDataImporter; import com.bakdata.conquery.integration.sql.SqlStandaloneCommand; import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.mode.cluster.ClusterManager; import com.bakdata.conquery.mode.cluster.ClusterState; +import com.bakdata.conquery.models.auth.AuthorizationController; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.execution.ExecutionState; import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.worker.DatasetRegistry; import com.bakdata.conquery.models.worker.Namespace; +import com.bakdata.conquery.resources.admin.rest.AdminDatasetProcessor; +import com.bakdata.conquery.resources.admin.rest.AdminProcessor; import com.bakdata.conquery.util.io.Cloner; import com.google.common.util.concurrent.Uninterruptibles; import io.dropwizard.client.JerseyClientBuilder; @@ -63,6 +71,7 @@ public class TestConquery { private Client client; // Initial user which is set before each test from the config. + @Getter private User testUser; public synchronized StandaloneSupport openDataset(DatasetId datasetId) { @@ -75,6 +84,62 @@ public synchronized StandaloneSupport openDataset(DatasetId datasetId) { } } + private synchronized StandaloneSupport createSupport(DatasetId datasetId, String name) { + if (config.getSqlConnectorConfig().isEnabled()) { + return buildSupport(datasetId, name, StandaloneSupport.Mode.SQL); + } + return buildDistributedSupport(datasetId, name); + } + + private StandaloneSupport buildSupport(DatasetId datasetId, String name, StandaloneSupport.Mode mode) { + + DatasetRegistry datasets = standaloneCommand.getManager().getDatasetRegistry(); + Namespace ns = datasets.get(datasetId); + + // make tmp subdir and change cfg accordingly + File localTmpDir = new File(tmpDir, "tmp_" + name); + + if (!localTmpDir.exists()) { + if (!localTmpDir.mkdir()) { + throw new IllegalStateException("Could not create directory for Support"); + } + } + else { + log.info("Reusing existing folder {} for Support", localTmpDir.getPath()); + } + + ConqueryConfig + localCfg = + Cloner.clone(config, Map.of(Validator.class, standaloneCommand.getManagerNode().getEnvironment().getValidator()), IntegrationTests.MAPPER); + + StandaloneSupport support = new StandaloneSupport( + mode, + this, + ns, + ns.getStorage().getDataset(), + localTmpDir, + localCfg, + // Getting the User from AuthorizationConfig + testDataImporter + ); + + support.waitUntilWorkDone(); + openSupports.add(support); + return support; + } + + private synchronized StandaloneSupport buildDistributedSupport(DatasetId datasetId, String name) { + + ClusterManager manager = (ClusterManager) standaloneCommand.getManager(); + ClusterState clusterState = manager.getConnectionManager().getClusterState(); + assertThat(clusterState.getShardNodes()).hasSize(2); + + await().atMost(10, TimeUnit.SECONDS) + .until(() -> clusterState.getWorkerHandlers().get(datasetId).getWorkers().size() == clusterState.getShardNodes().size()); + + return buildSupport(datasetId, name, StandaloneSupport.Mode.WORKER); + } + public synchronized StandaloneSupport getSupport(String name) { try { log.info("Setting up dataset"); @@ -83,14 +148,73 @@ public synchronized StandaloneSupport getSupport(String name) { name += "[" + count + "]"; } Dataset dataset = new Dataset(name); - standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor().addDataset(dataset); - return createSupport(dataset.getId(), name); + waitUntilWorkDone(); + LoadingUtil.importDataset(getClient(), defaultAdminURIBuilder(), dataset); + + // Little detour here, but this way we get the correctly initialized dataset id + DatasetId datasetId = getDatasetRegistry().get(new DatasetId(dataset.getName())).getDataset().getId(); + waitUntilWorkDone(); + + return createSupport(datasetId, name); } catch (Exception e) { return fail("Failed to create a support for " + name, e); } } + public void waitUntilWorkDone() { + log.info("Waiting for jobs to finish"); + //sample multiple times from the job queues to make sure we are done with everything and don't miss late arrivals + long started = System.nanoTime(); + for (int i = 0; i < 5; i++) { + do { + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.MILLISECONDS); + + if (!isBusy()) { + break; + } + + + if (Duration.ofNanos(System.nanoTime() - started).toSeconds() > 10) { + started = System.nanoTime(); + log.warn("waiting for done work for a long time", new Exception()); + } + + } while (true); + } + log.trace("all jobs finished"); + } + + public UriBuilder defaultAdminURIBuilder() { + return UriBuilder.fromPath("admin") + .host("localhost") + .scheme("http") + .port(dropwizard.getAdminPort()); + } + + public DatasetRegistry getDatasetRegistry() { + return getStandaloneCommand().getManagerNode().getDatasetRegistry(); + } + + private boolean isBusy() { + boolean busy; + busy = standaloneCommand.getManagerNode().getJobManager().isSlowWorkerBusy(); + busy |= standaloneCommand.getManager().getDatasetRegistry().getDatasets().stream() + .map(Namespace::getExecutionManager) + .flatMap(e -> e.getExecutionStates().asMap().values().stream()) + .map(ExecutionManager.State::getState) + .anyMatch(ExecutionState.RUNNING::equals); + + for (Namespace namespace : standaloneCommand.getManagerNode().getDatasetRegistry().getDatasets()) { + busy |= namespace.getJobManager().isSlowWorkerBusy(); + } + + for (ShardNode shard : standaloneCommand.getShardNodes()) { + busy |= shard.isBusy(); + } + return busy; + } + @SneakyThrows public synchronized void shutdown() { //stop dropwizard directly so ConquerySupport does not delete the tmp directory @@ -98,7 +222,6 @@ public synchronized void shutdown() { openSupports.clear(); } - public void beforeAll() throws Exception { log.info("Working in temporary directory {}", tmpDir); @@ -117,19 +240,28 @@ public void beforeAll() throws Exception { // start server dropwizard.before(); - - if (!config.getSqlConnectorConfig().isEnabled()) { - // Wait for shards to be connected - ClusterManager manager = (ClusterManager) standaloneCommand.getManager(); - ClusterState clusterState = manager.getConnectionManager().getClusterState(); - await().atMost(10, TimeUnit.SECONDS).until(() -> clusterState.getShardNodes().size() == 2); - } - // create HTTP client for api tests client = new JerseyClientBuilder(this.getDropwizard().getEnvironment()) .withProperty(ClientProperties.CONNECT_TIMEOUT, 10000) .withProperty(ClientProperties.READ_TIMEOUT, 10000) .build("test client"); + + + + // The test user is recreated after each test, in the storage, but its id stays the same. + // Here we register the client filter once for that test user id. + UserId testUserId = config.getAuthorizationRealms().getInitialUsers().get(0).createId(); + client.register(new ConqueryAuthenticationFilter(() -> getAuthorizationController().getConqueryTokenRealm().createTokenForUser(testUserId))); + + testUser = getMetaStorage().getUser(testUserId); + } + + public AuthorizationController getAuthorizationController() { + return getStandaloneCommand().getManagerNode().getAuthController(); + } + + public MetaStorage getMetaStorage() { + return getStandaloneCommand().getManagerNode().getMetaStorage(); } public void afterAll() { @@ -162,109 +294,36 @@ public void removeSupport(StandaloneSupport support) { } } - public void waitUntilWorkDone() { - log.info("Waiting for jobs to finish"); - //sample multiple times from the job queues to make sure we are done with everything and don't miss late arrivals - long started = System.nanoTime(); - for (int i = 0; i < 5; i++) { - do { - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.MILLISECONDS); - - if (!isBusy()) { - break; - } - + public void beforeEach() { - if (Duration.ofNanos(System.nanoTime() - started).toSeconds() > 10) { - started = System.nanoTime(); - log.warn("waiting for done work for a long time", new Exception()); - } + // Because Shiro works with a static Security manager + getStandaloneCommand().getManagerNode().getAuthController().registerStaticSecurityManager(); - } while (true); - } - log.trace("all jobs finished"); - } - - public void beforeEach() { + // MetaStorage is cleared after each test, so we need to add the test user again final MetaStorage storage = standaloneCommand.getManagerNode().getMetaStorage(); testUser = standaloneCommand.getManagerNode().getConfig().getAuthorizationRealms().getInitialUsers().get(0).createOrOverwriteUser(storage); - storage.updateUser(testUser); } - private synchronized StandaloneSupport createSupport(DatasetId datasetId, String name) { - if (config.getSqlConnectorConfig().isEnabled()) { - return buildSupport(datasetId, name, StandaloneSupport.Mode.SQL); - } - return buildDistributedSupport(datasetId, name); + public Validator getValidator() { + return getStandaloneCommand().getManagerNode().getValidator(); } - private synchronized StandaloneSupport buildDistributedSupport(DatasetId datasetId, String name) { - - ClusterManager manager = (ClusterManager) standaloneCommand.getManager(); - ClusterState clusterState = manager.getConnectionManager().getClusterState(); - - await().atMost(10, TimeUnit.SECONDS) - .until(() -> clusterState.getWorkerHandlers().get(datasetId).getWorkers().size() == clusterState.getShardNodes().size()); - - return buildSupport(datasetId, name, StandaloneSupport.Mode.WORKER); + public List getShardNodes() { + return getStandaloneCommand().getShardNodes(); } - private StandaloneSupport buildSupport(DatasetId datasetId, String name, StandaloneSupport.Mode mode) { - - DatasetRegistry datasets = standaloneCommand.getManager().getDatasetRegistry(); - Namespace ns = datasets.get(datasetId); - - // make tmp subdir and change cfg accordingly - File localTmpDir = new File(tmpDir, "tmp_" + name); - - if (!localTmpDir.exists()) { - if (!localTmpDir.mkdir()) { - throw new IllegalStateException("Could not create directory for Support"); - } - } - else { - log.info("Reusing existing folder {} for Support", localTmpDir.getPath()); - } - - ConqueryConfig - localCfg = - Cloner.clone(config, Map.of(Validator.class, standaloneCommand.getManagerNode().getEnvironment().getValidator()), IntegrationTests.MAPPER); - - StandaloneSupport support = new StandaloneSupport( - mode, - this, - ns, - ns.getStorage().getDataset(), - localTmpDir, - localCfg, - standaloneCommand.getManagerNode().getAdmin().getAdminProcessor(), - standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor(), - // Getting the User from AuthorizationConfig - testUser, - testDataImporter - ); - - support.waitUntilWorkDone(); - openSupports.add(support); - return support; + public AdminProcessor getAdminProcessor() { + return standaloneCommand.getManagerNode().getAdmin().getAdminProcessor(); } - private boolean isBusy() { - boolean busy; - busy = standaloneCommand.getManagerNode().getJobManager().isSlowWorkerBusy(); - busy |= standaloneCommand.getManager().getDatasetRegistry().getDatasets().stream() - .map(Namespace::getExecutionManager) - .flatMap(e -> e.getExecutionStates().asMap().values().stream()) - .map(ExecutionManager.State::getState) - .anyMatch(ExecutionState.RUNNING::equals); - - for (Namespace namespace : standaloneCommand.getManagerNode().getDatasetRegistry().getDatasets()) { - busy |= namespace.getJobManager().isSlowWorkerBusy(); - } + public AdminDatasetProcessor getAdminDatasetsProcessor() { + return standaloneCommand.getManagerNode().getAdmin().getAdminDatasetProcessor(); + } - for (ShardNode shard : standaloneCommand.getShardNodes()) { - busy |= shard.isBusy(); - } - return busy; + public UriBuilder defaultApiURIBuilder() { + return UriBuilder.fromPath("api") + .host("localhost") + .scheme("http") + .port(dropwizard.getLocalPort()); } } diff --git a/backend/src/test/resources/tests/filter/GROUP/GROUP.test.json b/backend/src/test/resources/tests/filter/GROUP/GROUP.test.json deleted file mode 100644 index 5c9888f4ef..0000000000 --- a/backend/src/test/resources/tests/filter/GROUP/GROUP.test.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "type": "FILTER_TEST", - "label": "GROUP Test", - "expectedCsv": "tests/filter/GROUP/expected.csv", - "content": { - "tables": { - "csv": "tests/filter/GROUP/content.csv", - "primaryColumn": { - "name": "pid", - "type": "STRING" - }, - "columns": [ - { - "name": "datum", - "type": "DATE" - }, - { - "name": "value", - "type": "STRING" - } - ] - } - }, - "connector": { - "validityDates": { - "label": "Datum", - "column": "table.datum" - }, - "filters": { - "label": "test filter", - "type": "TEST_GROUP_FILTER", - "column": "table.value" - } - }, - "filterValue": { - "type": "GROUP", - "value": { - "strings": [ - "a", - "ab" - ], - "repetitions": 2 - } - }, - "expectedFrontendConfig": { - "label": "test filter", - "type": "GROUP", - "filters": { - "strings": { - "type": "MULTI_SELECT", - "label": "Elements", - "options": [] - }, - "repetitions": { - "type": "INTEGER", - "label": "Maximum Repetitions", - "options": [] - } - }, - "options": [] - } -} diff --git a/backend/src/test/resources/tests/filter/GROUP/content.csv b/backend/src/test/resources/tests/filter/GROUP/content.csv deleted file mode 100644 index a3527700c6..0000000000 --- a/backend/src/test/resources/tests/filter/GROUP/content.csv +++ /dev/null @@ -1,15 +0,0 @@ -pid,datum,value -1,2015-03-17,a - -2,2015-03-17,ab - -3,2015-03-17,abab - -4,2015-03-17,aaa -4,2015-03-18,ababab - -5,2015-03-18,abab -5,2015-03-18,aaa - -6,2015-03-18, - diff --git a/backend/src/test/resources/tests/filter/GROUP/expected.csv b/backend/src/test/resources/tests/filter/GROUP/expected.csv deleted file mode 100644 index 0ec1de07b3..0000000000 --- a/backend/src/test/resources/tests/filter/GROUP/expected.csv +++ /dev/null @@ -1,5 +0,0 @@ -result,dates -1,{2015-03-17/2015-03-17} -2,{2015-03-17/2015-03-17} -3,{2015-03-17/2015-03-17} -5,{2015-03-18/2015-03-18}