diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java index 52a007fd55..09717c1d49 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/QueryProcessor.java @@ -107,35 +107,41 @@ public class QueryProcessor { private Validator validator; - public Stream getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) { + public Stream getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) { final Stream allQueries = storage.getAllExecutions(); return getQueriesFiltered(dataset.getId(), RequestAwareUriBuilder.fromRequest(req), subject, allQueries, allProviders); } - public Stream getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Stream allQueries, boolean allProviders) { + public Stream getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Stream allQueries, boolean allProviders) { return allQueries - // The following only checks the dataset, under which the query was submitted, but a query can target more that - // one dataset. - .filter(q -> q.getDataset().equals(datasetId)) - // to exclude subtypes from somewhere else - .filter(QueryProcessor::canFrontendRender) - .filter(Predicate.not(ManagedExecution::isSystem)) - .filter(q -> { - ExecutionState state = q.getState(); - return state == ExecutionState.NEW || state == ExecutionState.DONE; - } - ) - .filter(q -> subject.isPermitted(q, Ability.READ)) - .map(mq -> { - final OverviewExecutionStatus status = mq.buildStatusOverview(subject); - - if (mq.isReadyToDownload()) { - status.setResultUrls(getResultAssets(config.getResultProviders(), mq, uriBuilder, allProviders)); - } - return status; - }); + // The following only checks the dataset, under which the query was submitted, but a query can target more that + // one dataset. + .filter(q -> q.getDataset().equals(datasetId)) + // to exclude subtypes from somewhere else + .filter(QueryProcessor::canFrontendRender) + .filter(Predicate.not(ManagedExecution::isSystem)) + .filter(q -> { + ExecutionState state = q.getState(); + return state == ExecutionState.NEW || state == ExecutionState.DONE; + }) + .filter(q -> subject.isPermitted(q, Ability.READ)) + .map(mq -> { + try { + final OverviewExecutionStatus status = mq.buildStatusOverview(subject); + + if (mq.isReadyToDownload()) { + status.setResultUrls(getResultAssets(config.getResultProviders(), mq, uriBuilder, allProviders)); + } + return status; + } + catch (Exception e) { + log.error("FAILED building status for {}", mq, e); + } + return null; + }) + .filter(Objects::nonNull); } /** diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java index 115fb6b28c..9fd20f9c6c 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/TableExportQuery.java @@ -200,7 +200,7 @@ private static Map calculateColumnPositions( for (Column column : table.getConnector().resolve().getResolvedTable().getColumns()) { // ValidityDates are handled separately in column=0 - if (validityDates.stream().anyMatch(vd -> vd.containsColumn(column))) { + if (validityDates.stream().anyMatch(vd -> vd.containsColumn(column.getId()))) { continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java index 3a9578d0fa..bb19633920 100644 --- a/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java +++ b/backend/src/main/java/com/bakdata/conquery/apiv1/query/concept/specific/CQConcept.java @@ -148,10 +148,11 @@ public String defaultLabel(Locale locale) { builder.append(" "); for (ConceptElementId id : elements) { - ConceptElement conceptElement = id.resolve(); - if (conceptElement.equals(getConcept())) { + if (id.equals(getConceptId())) { continue; } + + ConceptElement conceptElement = id.resolve(); builder.append(conceptElement.getLabel()).append("+"); } @@ -274,9 +275,7 @@ public RequiredEntities collectRequiredEntities(QueryExecutionContext context) { final Set connectors = getTables().stream().map(CQTable::getConnector).collect(Collectors.toSet()); return new RequiredEntities(context.getBucketManager() - .getEntitiesWithConcepts(getElements().stream() - .>map(ConceptElementId::resolve) - .toList(), + .getEntitiesWithConcepts(getElements(), connectors, context.getDateRestriction())); } diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java index 3169c321be..370d5eec4e 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ExcelRenderer.java @@ -12,13 +12,12 @@ import c10n.C10N; import com.bakdata.conquery.internationalization.ExcelSheetNameC10n; -import com.bakdata.conquery.models.common.CDate; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.User; import com.bakdata.conquery.models.config.ExcelConfig; import com.bakdata.conquery.models.execution.ManagedExecution; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.mapping.PrintIdMapper; -import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.SingleTableResult; import com.bakdata.conquery.models.query.resultinfo.ResultInfo; @@ -54,6 +53,7 @@ public class ExcelRenderer { private final ExcelConfig config; private final PrintSettings settings; private final ImmutableMap styles; + public ExcelRenderer(ExcelConfig config, PrintSettings settings) { workbook = new SXSSFWorkbook(); this.config = config; @@ -61,11 +61,12 @@ public ExcelRenderer(ExcelConfig config, PrintSettings settings) { this.settings = settings; } - public void renderToStream(List idHeaders, E exec, OutputStream outputStream, OptionalLong limit, PrintSettings printSettings) + public void renderToStream( + List idHeaders, E exec, OutputStream outputStream, OptionalLong limit, PrintSettings printSettings, MetaStorage storage) throws IOException { final List resultInfosExec = exec.getResultInfos(); - setMetaData(exec); + setMetaData(exec, storage); final SXSSFSheet sheet = workbook.createSheet(C10N.get(ExcelSheetNameC10n.class, I18n.LOCALE.get()).result()); try { @@ -91,12 +92,21 @@ public void renderToStream(List /** * Include meta data in the xlsx such as the title, owner/author, tag and the name of this instance. */ - private void setMetaData(E exec) { + private void setMetaData(E exec, MetaStorage metaStorage) { final POIXMLProperties.CoreProperties coreProperties = workbook.getXSSFWorkbook().getProperties().getCoreProperties(); coreProperties.setTitle(exec.getLabelWithoutAutoLabelSuffix()); - final UserId owner = exec.getOwner(); - coreProperties.setCreator(owner != null ? owner.resolve().getLabel() : config.getApplicationName()); + String creator = config.getApplicationName(); + + if (exec.getOwner() != null) { + final User user = metaStorage.get(exec.getOwner()); + + if (user != null) { + creator = user.getLabel(); + } + } + + coreProperties.setCreator(creator); coreProperties.setKeywords(String.join(" ", exec.getTags())); final POIXMLProperties.ExtendedProperties extendedProperties = workbook.getXSSFWorkbook().getProperties().getExtendedProperties(); extendedProperties.setApplication(config.getApplicationName()); @@ -180,7 +190,8 @@ private int writeBody( // Row 0 is the Header the data starts at 1 final AtomicInteger currentRow = new AtomicInteger(1); - final TypeWriter[] writers = infos.stream().map(info -> writer(info.getType(), info.createPrinter(printerFactory, settings), settings)).toArray(TypeWriter[]::new); + final TypeWriter[] writers = + infos.stream().map(info -> writer(info.getType(), info.createPrinter(printerFactory, settings), settings)).toArray(TypeWriter[]::new); final PrintIdMapper idMapper = settings.getIdMapper(); final int writtenLines = resultLines.mapToInt(l -> writeRowsForEntity(infos, l, currentRow, sheet, writers, idMapper)).sum(); @@ -215,10 +226,32 @@ private void postProcessTable(SXSSFSheet sheet, XSSFTable table, int writtenLine sheet.createFreezePane(size, 1); } + private static TypeWriter writer(ResultType type, Printer printer, PrintSettings settings) { + if (type instanceof ResultType.ListT) { + //Excel cannot handle LIST types so we just toString them. + return (value, cell, styles) -> writeStringCell(cell, value, printer); + } + + return switch (((ResultType.Primitive) type)) { + case BOOLEAN -> (value, cell, styles) -> writeBooleanCell(value, cell, printer); + case INTEGER -> (value, cell, styles) -> writeIntegerCell(value, cell, printer, styles); + case MONEY -> (value, cell, styles) -> writeMoneyCell(value, cell, printer, settings, styles); + case NUMERIC -> (value, cell, styles) -> writeNumericCell(value, cell, printer, styles); + case DATE -> (value, cell, styles) -> writeDateCell(value, cell, printer, styles); + default -> (value, cell, styles) -> writeStringCell(cell, value, printer); + }; + } + /** * Writes the result lines for each entity. */ - private int writeRowsForEntity(List infos, EntityResult internalRow, final AtomicInteger currentRow, SXSSFSheet sheet, TypeWriter[] writers, PrintIdMapper idMapper) { + private int writeRowsForEntity( + List infos, + EntityResult internalRow, + final AtomicInteger currentRow, + SXSSFSheet sheet, + TypeWriter[] writers, + PrintIdMapper idMapper) { final String[] ids = idMapper.map(internalRow).getExternalId(); @@ -286,22 +319,6 @@ private void setColumnWidthsAndUntrack(SXSSFSheet sheet) { } } - private static TypeWriter writer(ResultType type, Printer printer, PrintSettings settings) { - if (type instanceof ResultType.ListT) { - //Excel cannot handle LIST types so we just toString them. - return (value, cell, styles) -> writeStringCell(cell, value, printer); - } - - return switch (((ResultType.Primitive) type)) { - case BOOLEAN -> (value, cell, styles) -> writeBooleanCell(value, cell, printer); - case INTEGER -> (value, cell, styles) -> writeIntegerCell(value, cell, printer, styles); - case MONEY -> (value, cell, styles) -> writeMoneyCell(value, cell, printer, settings, styles); - case NUMERIC -> (value, cell, styles) -> writeNumericCell(value, cell, printer, styles); - case DATE -> (value, cell, styles) -> writeDateCell(value, cell, printer, styles); - default -> (value, cell, styles) -> writeStringCell(cell, value, printer); - }; - } - // Type specific cell writers private static void writeStringCell(Cell cell, Object value, Printer printer) { cell.setCellValue((String) printer.apply(value)); diff --git a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java index 18a0205c2b..7e2c81194f 100644 --- a/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java +++ b/backend/src/main/java/com/bakdata/conquery/io/result/excel/ResultExcelProcessor.java @@ -10,6 +10,7 @@ import jakarta.ws.rs.core.StreamingOutput; import com.bakdata.conquery.io.result.ResultUtil; +import com.bakdata.conquery.io.storage.MetaStorage; import com.bakdata.conquery.models.auth.entities.Subject; import com.bakdata.conquery.models.config.ConqueryConfig; import com.bakdata.conquery.models.config.ExcelConfig; @@ -33,6 +34,8 @@ public class ResultExcelProcessor { // Media type according to https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types public static final MediaType MEDIA_TYPE = new MediaType("application", "vnd.openxmlformats-officedocument.spreadsheetml.sheet"); + + private final MetaStorage metaStorage; private final DatasetRegistry datasetRegistry; private final ConqueryConfig conqueryConfig; @@ -57,7 +60,7 @@ public Response createResult(Su final ExcelRenderer excelRenderer = new ExcelRenderer(excelConfig, settings); final StreamingOutput out = output -> { - excelRenderer.renderToStream(conqueryConfig.getIdColumns().getIdResultInfos(), exec, output, limit, settings); + excelRenderer.renderToStream(conqueryConfig.getIdColumns().getIdResultInfos(), exec, output, limit, settings, metaStorage); log.trace("FINISHED downloading {}", exec.getId()); }; diff --git a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java index 3d7ca63abb..d7af7d5e0d 100644 --- a/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java +++ b/backend/src/main/java/com/bakdata/conquery/mode/cluster/ClusterImportHandler.java @@ -63,7 +63,7 @@ private static void handleImport(Namespace namespace, InputStream inputStream, b readAndDistributeImport(((DistributedNamespace) namespace), table, header, parser, datasetRegistry); - clearDependentConcepts(namespace.getStorage().getAllConcepts(), table); + clearDependentConcepts(namespace.getStorage().getAllConcepts(), table.getId()); } } @@ -150,10 +150,10 @@ private static void readAndDistributeImport(DistributedNamespace namespace, Tabl } - private static void clearDependentConcepts(Stream> allConcepts, Table table) { + private static void clearDependentConcepts(Stream> allConcepts, TableId table) { allConcepts.map(Concept::getConnectors) .flatMap(List::stream) - .filter(con -> con.getResolvedTableId().equals(table.getId())) + .filter(con -> con.getResolvedTableId().equals(table)) .map(Connector::getConcept) .forEach(Concept::clearMatchingStats); } @@ -183,7 +183,7 @@ public void deleteImport(Import imp) { final DatasetId id = imp.getTable().getDataset(); final DistributedNamespace namespace = datasetRegistry.get(id); - clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable().resolve()); + clearDependentConcepts(namespace.getStorage().getAllConcepts(), imp.getTable()); namespace.getStorage().removeImport(imp.getId()); namespace.getWorkerHandler().sendToAll(new RemoveImportJob(imp.getId())); diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java index 568a31adfd..a3639bbee3 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/PreviewConfig.java @@ -3,7 +3,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import jakarta.validation.Valid; @@ -180,7 +179,6 @@ public Listresolve) - .filter(Objects::nonNull) .collect(Collectors.toList()); } @@ -191,7 +189,6 @@ public List resolveSearchFilters() { return searchFilters.stream() .map(FilterId::resolve) - .filter(Objects::nonNull) .map(Filter::getId) .toList(); } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java index de44285ec2..171ead0ee0 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/ValidityDate.java @@ -1,5 +1,6 @@ package com.bakdata.conquery.models.datasets.concepts; +import java.util.function.BiFunction; import javax.annotation.CheckForNull; import javax.annotation.Nullable; @@ -38,6 +39,9 @@ public class ValidityDate extends Labeled implements NamespacedI @EqualsAndHashCode.Exclude private Connector connector; + @JsonIgnore + private BiFunction extractor; + public static ValidityDate create(Column column) { final ValidityDate validityDate = new ValidityDate(); validityDate.setColumn(column.getId()); @@ -53,37 +57,15 @@ public static ValidityDate create(Column startColumn, Column endColumn) { @CheckForNull public CDateRange getValidityDate(int event, Bucket bucket) { - // I spent a lot of time trying to create two classes implementing single/multi-column valditiy dates separately. - // JsonCreator was not happy, and I could not figure out why. This is probably the most performant implementation that's not two classes. - - if (getColumn() != null) { - final Column resolvedColumn = getColumn().resolve(); - if (bucket.has(event, resolvedColumn)) { - return bucket.getAsDateRange(event, resolvedColumn); - } - - return null; + if (extractor == null){ + //TODO this is just a workaround: We should actually be using Initializing, which sadly gives us issues with LoadingUtil + init(); } - final Column startColumn = getStartColumn() != null ? getStartColumn().resolve() : null; - final Column endColumn = getEndColumn() != null ? getEndColumn().resolve() : null; - - final boolean hasStart = bucket.has(event, startColumn); - final boolean hasEnd = bucket.has(event, endColumn); - - if (!hasStart && !hasEnd) { - return null; - } - - final int start = hasStart ? bucket.getDate(event, startColumn) : Integer.MIN_VALUE; - final int end = hasEnd ? bucket.getDate(event, endColumn) : Integer.MAX_VALUE; - - return CDateRange.of(start, end); + return extractor.apply(event, bucket); } - // TODO use Id as parameter - public boolean containsColumn(Column column) { - final ColumnId id = column.getId(); + public boolean containsColumn(ColumnId id) { return id.equals(getColumn()) || id.equals(getStartColumn()) || id.equals(getEndColumn()); } @@ -110,4 +92,37 @@ public DatasetId getDataset() { public ValidityDateId createId() { return new ValidityDateId(connector.getId(), getName()); } + + public void init() { + // Initialize extractor early to avoid resolve and dispatch in very hot code. Hopefully boxing can be elided. + if (column != null) { + final Column resolvedColumn = column.resolve(); + + extractor = (event, bucket) -> { + if (bucket.has(event, resolvedColumn)) { + return bucket.getAsDateRange(event, resolvedColumn); + } + + return null; + }; + return; + } + + final Column resolvedStartColumn = startColumn.resolve(); + final Column resolvedEndColumn = endColumn.resolve(); + + extractor = (event, bucket) -> { + final boolean hasStart = bucket.has(event, resolvedStartColumn); + final boolean hasEnd = bucket.has(event, resolvedEndColumn); + + if (!hasStart && !hasEnd) { + return null; + } + + final int start = hasStart ? bucket.getDate(event, resolvedStartColumn) : Integer.MIN_VALUE; + final int end = hasEnd ? bucket.getDate(event, resolvedEndColumn) : Integer.MAX_VALUE; + + return CDateRange.of(start, end); + }; + } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/concept/ConceptColumnSelect.java b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/concept/ConceptColumnSelect.java index e66d654c6e..cf9a36e003 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/concept/ConceptColumnSelect.java +++ b/backend/src/main/java/com/bakdata/conquery/models/datasets/concepts/select/concept/ConceptColumnSelect.java @@ -1,6 +1,5 @@ package com.bakdata.conquery.models.datasets.concepts.select.concept; -import java.util.Collections; import java.util.Set; import com.bakdata.conquery.apiv1.query.concept.specific.CQConcept; @@ -76,6 +75,7 @@ public ResultType getResultType() { @Override public SelectConverter createConverter() { + //TODO bind Select to converter here return new ConceptColumnSelectConverter(); } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java b/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java index 1a9023a301..02b1e6a598 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java +++ b/backend/src/main/java/com/bakdata/conquery/models/events/BucketManager.java @@ -20,6 +20,7 @@ import com.bakdata.conquery.models.datasets.concepts.tree.TreeConcept; import com.bakdata.conquery.models.identifiable.ids.specific.BucketId; import com.bakdata.conquery.models.identifiable.ids.specific.CBlockId; +import com.bakdata.conquery.models.identifiable.ids.specific.ConceptElementId; import com.bakdata.conquery.models.identifiable.ids.specific.ConceptId; import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId; import com.bakdata.conquery.models.identifiable.ids.specific.ImportId; @@ -237,8 +238,12 @@ private int getBucket(String id) { /** * Collects all Entites, that have any of the concepts on the connectors in a specific time. */ - public Set getEntitiesWithConcepts(Collection> concepts, Set connectors, CDateSet restriction) { - final long requiredBits = ConceptNode.calculateBitMask(concepts); + public Set getEntitiesWithConcepts(Collection> concepts, Set connectors, CDateSet restriction) { + List> resolvedConcepts = concepts.stream() + .>map(ConceptElementId::resolve) + .toList(); + + final long requiredBits = ConceptNode.calculateBitMask(resolvedConcepts); final Set out = new HashSet<>(); @@ -249,7 +254,9 @@ public Set getEntitiesWithConcepts(Collection> concept for (Map bucketCBlockMap : connectorToCblocks.get(connector).values()) { for (CBlockId cBlockId : bucketCBlockMap.values()) { - for (String entity : cBlockId.getBucket().resolve().entities()) { + Bucket bucket = cBlockId.getBucket().resolve(); + + for (String entity : bucket.entities()) { CBlock cBlock = cBlockId.resolve(); if (cBlock.isConceptIncluded(entity, requiredBits) && restriction.intersects(cBlock.getEntityDateRange(entity))) { diff --git a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java index 1923172899..0b6f5fc1aa 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java +++ b/backend/src/main/java/com/bakdata/conquery/models/execution/ManagedExecution.java @@ -10,9 +10,6 @@ import java.util.UUID; import java.util.stream.Collectors; import jakarta.validation.constraints.NotNull; -import jakarta.ws.rs.core.UriBuilder; -import jakarta.validation.constraints.NotNull; -import jakarta.ws.rs.core.UriBuilder; import com.bakdata.conquery.apiv1.execution.ExecutionStatus; import com.bakdata.conquery.apiv1.execution.FullExecutionStatus; @@ -29,14 +26,15 @@ import com.bakdata.conquery.models.auth.permissions.ConqueryPermission; import com.bakdata.conquery.models.auth.permissions.ExecutionPermission; import com.bakdata.conquery.models.config.ConqueryConfig; -import com.bakdata.conquery.models.datasets.Dataset; import com.bakdata.conquery.models.datasets.concepts.Concept; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; import com.bakdata.conquery.models.error.ConqueryErrorInfo; import com.bakdata.conquery.models.i18n.I18n; import com.bakdata.conquery.models.identifiable.IdentifiableImpl; +import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; import com.bakdata.conquery.models.identifiable.ids.specific.GroupId; import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId; +import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import com.bakdata.conquery.models.query.ExecutionManager; import com.bakdata.conquery.models.query.PrintSettings; import com.bakdata.conquery.models.query.Visitable; @@ -52,8 +50,6 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.OptBoolean; import com.google.common.base.Preconditions; -import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId; -import com.bakdata.conquery.models.identifiable.ids.specific.UserId; import lombok.AccessLevel; import lombok.EqualsAndHashCode; import lombok.Getter; @@ -313,9 +309,12 @@ public void setStatusBase(@NonNull Subject subject, @NonNull ExecutionStatus sta status.setContainsDates(containsDates); if (owner != null) { - User user = owner.resolve(); - status.setOwner(user.getId()); - status.setOwnerName(user.getLabel()); + User user = metaStorage.get(owner); + + if(user != null) { + status.setOwner(user.getId()); + status.setOwnerName(user.getLabel()); + } } } diff --git a/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java b/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java index 8e1ef40ecf..d86dfb1491 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java +++ b/backend/src/main/java/com/bakdata/conquery/models/forms/configs/FormConfig.java @@ -6,7 +6,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.function.Consumer; @@ -94,7 +93,7 @@ public FormConfigId createId() { * actual form field values. */ public FormConfigOverviewRepresentation overview(MetaStorage storage, Subject subject) { - String ownerName = getOwnerName(); + String ownerName = getOwnerName(storage); return FormConfigOverviewRepresentation.builder() .id(getId()) @@ -110,15 +109,26 @@ public FormConfigOverviewRepresentation overview(MetaStorage storage, Subject su } @JsonIgnore - private @Nullable String getOwnerName() { - return Optional.ofNullable(owner).map(UserId::resolve).map(User.class::cast).map(User::getLabel).orElse(null); + @Nullable + private String getOwnerName(MetaStorage metaStorage) { + if (owner == null){ + return null; + } + + User resolved = metaStorage.get(owner); + + if (resolved == null){ + return null; + } + + return resolved.getLabel(); } /** * Return the full representation of the configuration with the configured form fields and meta data. */ public FormConfigFullRepresentation fullRepresentation(MetaStorage storage, Subject requestingUser){ - String ownerName = getOwnerName(); + String ownerName = getOwnerName(storage); /* Calculate which groups can see this query. * This is usually not done very often and should be reasonable fast, so don't cache this. diff --git a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java index 1de21de4b8..28923c7140 100644 --- a/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java +++ b/backend/src/main/java/com/bakdata/conquery/models/query/queryplan/TableExportQueryPlan.java @@ -173,7 +173,7 @@ private Object[] collectRow(int totalColumns, CQTable exportDescription, Bucket for (Column column : connector.getResolvedTable().getColumns()) { // ValidityDates are handled separately. - if (validityDate != null && validityDate.containsColumn(column)){ + if (validityDate != null && validityDate.containsColumn(column.getId())){ continue; } diff --git a/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java b/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java index 76e6aba671..5135140ed8 100644 --- a/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java +++ b/backend/src/main/java/com/bakdata/conquery/resources/api/FilterResource.java @@ -43,7 +43,7 @@ public class FilterResource extends HAuthorized { @POST @Path("resolve") public ConceptsProcessor.ResolvedFilterValues resolveFilterValues(FilterValues filterValues) { - subject.isPermitted(filter.getDataset().resolve(), Ability.READ); + subject.isPermitted(filter.getDataset(), Ability.READ); subject.isPermitted(filter.getConnector().findConcept(), Ability.READ); return processor.resolveFilterValues((SelectFilter) filter, filterValues.values()); @@ -53,7 +53,7 @@ public ConceptsProcessor.ResolvedFilterValues resolveFilterValues(FilterValues f @POST @Path("autocomplete") public ConceptsProcessor.AutoCompleteResult autocompleteTextFilter(@Valid FilterResource.AutocompleteRequest request) { - subject.isPermitted(filter.getDataset().resolve(), Ability.READ); + subject.isPermitted(filter.getDataset(), Ability.READ); subject.isPermitted(filter.getConnector().findConcept(), Ability.READ); if (!(filter instanceof SelectFilter)) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java index 1bc33168cc..81e8287e6d 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/cqelement/concept/CQConceptConverter.java @@ -12,6 +12,7 @@ import com.bakdata.conquery.models.datasets.Column; import com.bakdata.conquery.models.datasets.Table; import com.bakdata.conquery.models.datasets.concepts.ConceptElement; +import com.bakdata.conquery.models.datasets.concepts.Connector; import com.bakdata.conquery.models.datasets.concepts.select.Select; import com.bakdata.conquery.models.datasets.concepts.select.concept.ConceptColumnSelect; import com.bakdata.conquery.models.datasets.concepts.tree.ConceptTreeChild; @@ -194,8 +195,10 @@ private static Stream collectConditions(CQTable cqTable, Concept } private static Optional convertConnectorCondition(CQTable cqTable, SqlFunctionProvider functionProvider) { - return Optional.ofNullable(cqTable.getConnector().resolve().getCondition()) - .map(condition -> condition.convertToSqlCondition(CTConditionContext.create(cqTable.getConnector().resolve(), functionProvider))); + final Connector connector = cqTable.getConnector().resolve(); + + return Optional.ofNullable(connector.getCondition()) + .map(condition -> condition.convertToSqlCondition(CTConditionContext.create(connector, functionProvider))); } private static Optional getDateRestriction(ConversionContext context, Optional validityDate) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java index 1dcd4286cc..29f800e979 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/HanaSqlFunctionProvider.java @@ -320,7 +320,7 @@ private ColumnDateRange toColumnDateRange(ValidityDate validityDate) { Column endColumn; // if no end column is present, the only existing column is both start and end of the date range - if (validityDate.getEndColumn() == null) { + if (validityDate.getColumn() != null) { Column column = validityDate.getColumn().resolve(); startColumn = column; endColumn = column; diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java index 8b1d166155..32ea34f4e0 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/dialect/PostgreSqlFunctionProvider.java @@ -122,10 +122,12 @@ public ColumnDateRange forValidityDate(ValidityDate validityDate, CDateRange dat @Override public ColumnDateRange forArbitraryDateRange(DaterangeSelectOrFilter daterangeSelectOrFilter) { String tableName = daterangeSelectOrFilter.getTable().getName(); - if (daterangeSelectOrFilter.getEndColumn() != null) { - return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn().resolve(), daterangeSelectOrFilter.getEndColumn().resolve()); + + if (daterangeSelectOrFilter.getColumn() != null) { + return ofSingleColumn(tableName, daterangeSelectOrFilter.getColumn().resolve()); } - return ofSingleColumn(tableName, daterangeSelectOrFilter.getColumn().resolve()); + + return ofStartAndEnd(tableName, daterangeSelectOrFilter.getStartColumn().resolve(), daterangeSelectOrFilter.getEndColumn().resolve()); } @Override @@ -311,10 +313,12 @@ private ColumnDateRange toColumnDateRange(CDateRange dateRestriction) { private ColumnDateRange toColumnDateRange(ValidityDate validityDate) { String tableName = validityDate.getConnector().getResolvedTableId().getTable(); - if (validityDate.getEndColumn() != null) { - return ofStartAndEnd(tableName, validityDate.getStartColumn().resolve(), validityDate.getEndColumn().resolve()); + + if (validityDate.getColumn() != null) { + return ofSingleColumn(tableName, validityDate.getColumn().resolve()); } - return ofSingleColumn(tableName, validityDate.getColumn().resolve()); + + return ofStartAndEnd(tableName, validityDate.getStartColumn().resolve(), validityDate.getEndColumn().resolve()); } private ColumnDateRange ofSingleColumn(String tableName, Column column) { diff --git a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java index 5974b7536a..684544bb4a 100644 --- a/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java +++ b/backend/src/main/java/com/bakdata/conquery/sql/conversion/model/aggregator/SumSqlAggregator.java @@ -98,14 +98,18 @@ private enum SumDistinctCteStep implements CteStep { @Override public ConnectorSqlSelects connectorSelect(SumSelect sumSelect, SelectContext selectContext) { + NameGenerator nameGenerator = selectContext.getNameGenerator(); + String alias = nameGenerator.selectName(sumSelect); + Column sumColumn = sumSelect.getColumn().resolve(); Column subtractColumn = sumSelect.getSubtractColumn() != null ? sumSelect.getSubtractColumn().resolve() : null; + List distinctByColumns = sumSelect.getDistinctByColumn().stream().map(ColumnId::resolve).toList(); - NameGenerator nameGenerator = selectContext.getNameGenerator(); - String alias = nameGenerator.selectName(sumSelect); + ConnectorSqlTables tables = selectContext.getTables(); CommonAggregationSelect sumAggregationSelect; + if (!distinctByColumns.isEmpty()) { SqlIdColumns ids = selectContext.getIds(); sumAggregationSelect = createDistinctSumAggregationSelect(sumColumn, distinctByColumns, alias, ids, tables, nameGenerator); diff --git a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java index d1af7170eb..30b9910a9d 100644 --- a/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java +++ b/backend/src/test/java/com/bakdata/conquery/api/form/config/FormConfigTest.java @@ -297,6 +297,7 @@ public void getConfigs() { @Test public void patchConfig() { + // PREPARE user.addPermission(DatasetPermission.onInstance(Ability.READ, datasetId)); Group group1 = new Group("test1", "test1", storage); diff --git a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java index 7c2062e340..2d13f8d2f1 100644 --- a/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java +++ b/backend/src/test/java/com/bakdata/conquery/integration/tests/deletion/DatasetDeletionTest.java @@ -145,7 +145,7 @@ public void execute(String name, TestConquery testConquery) throws Exception { // No CBlock associated with import may exist assertThat(workerStorage.getAllCBlocks()) .describedAs("CBlocks for Worker %s", value.getInfo().getId()) - .filteredOn(cBlock -> cBlock.getBucket().resolve().getTable().getDataset().equals(dataset.getId())) + .filteredOn(cBlock -> cBlock.getBucket().getDataset().equals(dataset.getId())) .isEmpty(); } } diff --git a/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java b/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java index 9fb1870c83..bd411810a0 100644 --- a/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java +++ b/backend/src/test/java/com/bakdata/conquery/io/result/excel/ExcelResultRenderTest.java @@ -85,7 +85,7 @@ void writeAndRead() throws IOException { final ExcelRenderer renderer = new ExcelRenderer(new ExcelConfig(), printSettings); - renderer.renderToStream(ResultTestUtil.getIdFields(), mquery, output, OptionalLong.empty(), printSettings); + renderer.renderToStream(ResultTestUtil.getIdFields(), mquery, output, OptionalLong.empty(), printSettings, metaStorage); final InputStream inputStream = new ByteArrayInputStream(output.toByteArray());