Skip to content

Commit

Permalink
Merge pull request #3554 from ingef/feature/remove-central-registry
Browse files Browse the repository at this point in the history
remove CentralRegistry
  • Loading branch information
thoniTUB authored Oct 8, 2024
2 parents 057eb76 + cbbfa29 commit d775e0e
Show file tree
Hide file tree
Showing 355 changed files with 6,767 additions and 6,223 deletions.
6 changes: 0 additions & 6 deletions autodoc/src/main/java/com/bakdata/conquery/Constants.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,6 @@
import com.bakdata.conquery.apiv1.query.concept.filter.FilterValue;
import com.bakdata.conquery.apiv1.query.concept.filter.ValidityDateContainer;
import com.bakdata.conquery.io.cps.CPSType;
import com.bakdata.conquery.io.jackson.serializer.MetaIdRef;
import com.bakdata.conquery.io.jackson.serializer.MetaIdRefCollection;
import com.bakdata.conquery.io.jackson.serializer.NsIdRef;
import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection;
import com.bakdata.conquery.model.Base;
import com.bakdata.conquery.model.Group;
import com.bakdata.conquery.models.common.Range;
Expand Down Expand Up @@ -182,8 +178,6 @@ public class Constants {

public static final String JSON_CREATOR = JsonCreator.class.getName();
public static final String CPS_TYPE = CPSType.class.getName();
public static final Set<String> ID_REF = Set.of(NsIdRef.class.getName(), MetaIdRef.class.getName());
public static final Set<String> ID_REF_COL = Set.of(NsIdRefCollection.class.getName(), MetaIdRefCollection.class.getName());
public static final String JSON_IGNORE = JsonIgnore.class.getName();
public static final String JSON_BACK_REFERENCE = JsonBackReference.class.getName();
public static final String PATH = Path.class.getName();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -267,16 +267,7 @@ private void handleField(ClassInfo currentType, FieldInfo field) throws IOExcept
final TypeSignature typeSignature = field.getTypeSignatureOrTypeDescriptor();
final Ctx ctx = new Ctx().withField(field);

final String type;
if (ID_REF.stream().anyMatch(field::hasAnnotation)) {
type = ID_OF + printType(ctx.withIdOf(true), typeSignature);
}
else if (ID_REF_COL.stream().anyMatch(field::hasAnnotation)) {
type = LIST_OF + ID_OF + StringUtils.removeStart(printType(ctx.withIdOf(true), typeSignature), LIST_OF);
}
else {
type = printType(ctx, typeSignature);
}
final String type = printType(ctx, typeSignature);

out.table(
editLink(introspec),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,10 @@

import com.bakdata.conquery.apiv1.frontend.FrontendValue;
import com.bakdata.conquery.io.cps.CPSType;
import com.bakdata.conquery.io.jackson.serializer.NsIdRef;
import com.bakdata.conquery.models.config.IndexConfig;
import com.bakdata.conquery.models.datasets.Dataset;
import com.bakdata.conquery.models.datasets.concepts.Searchable;
import com.bakdata.conquery.models.identifiable.IdentifiableImpl;
import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId;
import com.bakdata.conquery.models.identifiable.ids.specific.SearchIndexId;
import com.bakdata.conquery.models.index.FrontendValueIndex;
import com.bakdata.conquery.models.index.FrontendValueIndexKey;
Expand Down Expand Up @@ -41,9 +40,10 @@
@CPSType(id = "CSV_TEMPLATE", base = SearchIndex.class)
public class FilterTemplate extends IdentifiableImpl<SearchIndexId> implements Searchable, SearchIndex {

private static final long serialVersionUID = 1L;

@NotNull
@NsIdRef
private Dataset dataset;
private DatasetId dataset;

@NotEmpty
private final String name;
Expand Down Expand Up @@ -106,6 +106,6 @@ public TrieSearch<FrontendValue> createTrieSearch(IndexConfig config) throws Ind

@Override
public SearchIndexId createId() {
return new SearchIndexId(dataset.getId(), name);
return new SearchIndexId(dataset, name);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,12 @@
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import jakarta.inject.Inject;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Validator;
import jakarta.ws.rs.BadRequestException;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.UriBuilder;

import com.bakdata.conquery.apiv1.execution.ExecutionStatus;
import com.bakdata.conquery.apiv1.execution.FullExecutionStatus;
Expand Down Expand Up @@ -52,16 +58,16 @@
import com.bakdata.conquery.models.config.ConqueryConfig;
import com.bakdata.conquery.models.datasets.Dataset;
import com.bakdata.conquery.models.datasets.PreviewConfig;
import com.bakdata.conquery.models.datasets.SecondaryIdDescription;
import com.bakdata.conquery.models.datasets.concepts.Connector;
import com.bakdata.conquery.models.error.ConqueryError;
import com.bakdata.conquery.models.exceptions.ValidatorHelper;
import com.bakdata.conquery.models.execution.ExecutionState;
import com.bakdata.conquery.models.execution.ManagedExecution;
import com.bakdata.conquery.models.i18n.I18n;
import com.bakdata.conquery.models.identifiable.ids.specific.ConnectorId;
import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId;
import com.bakdata.conquery.models.identifiable.ids.specific.GroupId;
import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId;
import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId;
import com.bakdata.conquery.models.identifiable.mapping.IdPrinter;
import com.bakdata.conquery.models.query.ExecutionManager;
import com.bakdata.conquery.models.query.ManagedQuery;
Expand All @@ -82,12 +88,6 @@
import com.bakdata.conquery.util.QueryUtils;
import com.bakdata.conquery.util.QueryUtils.NamespacedIdentifiableCollector;
import com.bakdata.conquery.util.io.IdColumnUtil;
import jakarta.inject.Inject;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Validator;
import jakarta.ws.rs.BadRequestException;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.UriBuilder;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
Expand All @@ -108,17 +108,17 @@ public class QueryProcessor {


public Stream<ExecutionStatus> getAllQueries(Dataset dataset, HttpServletRequest req, Subject subject, boolean allProviders) {
final Collection<ManagedExecution> allQueries = storage.getAllExecutions();
final Stream<ManagedExecution> allQueries = storage.getAllExecutions();

return getQueriesFiltered(dataset.getId(), RequestAwareUriBuilder.fromRequest(req), subject, allQueries, allProviders);
}

public Stream<ExecutionStatus> getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Collection<ManagedExecution> allQueries, boolean allProviders) {
public Stream<ExecutionStatus> getQueriesFiltered(DatasetId datasetId, UriBuilder uriBuilder, Subject subject, Stream<ManagedExecution> allQueries, boolean allProviders) {

return allQueries.stream()
return allQueries
// The following only checks the dataset, under which the query was submitted, but a query can target more that
// one dataset.
.filter(q -> q.getDataset().getId().equals(datasetId))
.filter(q -> q.getDataset().equals(datasetId))
// to exclude subtypes from somewhere else
.filter(QueryProcessor::canFrontendRender)
.filter(Predicate.not(ManagedExecution::isSystem))
Expand Down Expand Up @@ -200,10 +200,10 @@ public static boolean isFrontendStructure(CQElement root) {
/**
* Cancel a running query: Sending cancellation to shards, which will cause them to stop executing them, results are not sent back, and incoming results will be discarded.
*/
public void cancel(Subject subject, Dataset dataset, ManagedExecution query) {
public void cancel(Subject subject, ManagedExecution query) {

// Does not make sense to cancel a query that isn't running.
ExecutionManager executionManager = datasetRegistry.get(dataset.getId()).getExecutionManager();
ExecutionManager executionManager = datasetRegistry.get(query.getDataset()).getExecutionManager();
if (!query.getState().equals(ExecutionState.RUNNING)) {
return;
}
Expand All @@ -221,20 +221,19 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc
if (patch.getGroups() != null && !patch.getGroups().isEmpty()) {


for (ManagedExecutionId managedExecutionId : execution.getSubmitted().collectRequiredQueries()) {
final ManagedExecution subQuery = storage.getExecution(managedExecutionId);
for (ManagedExecutionId subExecutionId : execution.getSubmitted().collectRequiredQueries()) {

if (!subject.isPermitted(subQuery, Ability.READ)) {
log.warn("Not sharing {} as User {} is not allowed to see it themselves.", subQuery.getId(), subject);
if (!subject.isPermitted(subExecutionId, Ability.READ)) {
log.warn("Not sharing {} as User {} is not allowed to see it themselves.", subExecutionId, subject);
continue;
}

final ConqueryPermission canReadQuery = subQuery.createPermission(Set.of(Ability.READ));
final ConqueryPermission canReadQuery = subExecutionId.createPermission(Set.of(Ability.READ));

final Set<GroupId> groupsToShareWith = new HashSet<>(patch.getGroups());

// Find all groups the query is already shared with, so we do not remove them, as patch is absolute
for (Group group : storage.getAllGroups()) {
for (Group group : storage.getAllGroups().toList()) {
if (groupsToShareWith.contains(group.getId())) {
continue;
}
Expand All @@ -250,7 +249,7 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc
.groups(new ArrayList<>(groupsToShareWith))
.build();

patchQuery(subject, subQuery, sharePatch);
patchQuery(subject, subExecutionId.resolve(), sharePatch);
}
}

Expand All @@ -261,31 +260,30 @@ public void patchQuery(Subject subject, ManagedExecution execution, MetaDataPatc
public void reexecute(Subject subject, ManagedExecution query) {
log.info("User[{}] reexecuted Query[{}]", subject.getId(), query);

ExecutionManager executionManager = datasetRegistry.get(query.getDataset().getId()).getExecutionManager();
if (!query.getState().equals(ExecutionState.RUNNING)) {
final Namespace namespace = query.getNamespace();

namespace.getExecutionManager().execute(namespace, query, config);
namespace.getExecutionManager().execute(query, config);
}
}

public void deleteQuery(Subject subject, ManagedExecution execution) {
log.info("User[{}] deleted Query[{}]", subject.getId(), execution.getId());

datasetRegistry.get(execution.getDataset().getId())
datasetRegistry.get(execution.getDataset())
.getExecutionManager() // Don't go over execution#getExecutionManager() as that's only set when query is initialized
.clearQueryResults(execution);

storage.removeExecution(execution.getId());
}

public ExecutionState awaitDone(ManagedExecution query, int time, TimeUnit unit) {
final Namespace namespace = datasetRegistry.get(query.getDataset().getId());
final Namespace namespace = datasetRegistry.get(query.getDataset());
return namespace.getExecutionManager().awaitDone(query, time, unit);
}

public FullExecutionStatus getQueryFullStatus(ManagedExecution query, Subject subject, UriBuilder url, Boolean allProviders) {
final Namespace namespace = datasetRegistry.get(query.getDataset().getId());
final Namespace namespace = datasetRegistry.get(query.getDataset());

query.initExecutable(config);

Expand Down Expand Up @@ -326,7 +324,7 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext
execution =
((ManagedQuery) namespace
.getExecutionManager()
.createExecution(query, subject.getUser(), namespace, false));
.createExecution(query, subject.getId(), namespace, false));

execution.setLastResultCount((long) statistic.getResolved().size());

Expand All @@ -342,7 +340,7 @@ public ExternalUploadResult uploadEntities(Subject subject, Dataset dataset, Ext
/**
* Create and submit {@link EntityPreviewForm} for subject on to extract sources for entity, and extract some additional infos to be used as infocard.
*/
public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List<Connector> sources, Dataset dataset, Range<LocalDate> dateRange) {
public FullExecutionStatus getSingleEntityExport(Subject subject, UriBuilder uriBuilder, String idKind, String entity, List<ConnectorId> sources, Dataset dataset, Range<LocalDate> dateRange) {

subject.authorize(dataset, Ability.ENTITY_PREVIEW);
subject.authorize(dataset, Ability.PRESERVE_ID);
Expand Down Expand Up @@ -438,7 +436,7 @@ public ManagedExecution postQuery(Dataset dataset, QueryDescription query, Subje
}

// Execute the query
return executionManager.runQuery(namespace, query, subject.getUser(), config, system);
return executionManager.runQuery(namespace, query, subject.getId(), config, system);
}

/**
Expand All @@ -460,8 +458,8 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe

// If SecondaryIds differ from selected and prior, we cannot reuse them.
if (query instanceof SecondaryIdQuery) {
final SecondaryIdDescription selectedSecondaryId = ((SecondaryIdQuery) query).getSecondaryId();
final SecondaryIdDescription reusedSecondaryId = ((SecondaryIdQuery) execution.getSubmitted()).getSecondaryId();
final SecondaryIdDescriptionId selectedSecondaryId = ((SecondaryIdQuery) query).getSecondaryId();
final SecondaryIdDescriptionId reusedSecondaryId = ((SecondaryIdQuery) execution.getSubmitted()).getSecondaryId();

if (!selectedSecondaryId.equals(reusedSecondaryId)) {
return null;
Expand All @@ -472,7 +470,7 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe
if (!user.isOwner(execution)) {
final ManagedExecution
newExecution =
executionManager.createExecution(execution.getSubmitted(), user, namespace, false);
executionManager.createExecution(execution.getSubmitted(), user.getId(), namespace, false);
newExecution.setLabel(execution.getLabel());
newExecution.setTags(execution.getTags().clone());
storage.updateExecution(newExecution);
Expand All @@ -487,7 +485,7 @@ private ManagedExecution tryReuse(QueryDescription query, ManagedExecutionId exe

log.trace("Re-executing Query {}", execution);

executionManager.execute(namespace, execution, config);
executionManager.execute(execution, config);

return execution;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,13 @@
import java.util.Collection;
import java.util.List;
import java.util.Set;

import javax.annotation.Nullable;

import com.bakdata.conquery.apiv1.query.QueryDescription;
import com.bakdata.conquery.io.jackson.serializer.NsIdRefCollection;
import com.bakdata.conquery.models.datasets.SecondaryIdDescription;
import com.bakdata.conquery.models.error.ConqueryErrorInfo;
import com.bakdata.conquery.models.identifiable.ids.specific.GroupId;
import com.bakdata.conquery.models.identifiable.ids.specific.SecondaryIdDescriptionId;
import com.bakdata.conquery.models.query.ColumnDescriptor;
import lombok.Data;
import lombok.EqualsAndHashCode;
Expand Down Expand Up @@ -65,6 +64,5 @@ public class FullExecutionStatus extends ExecutionStatus {
/**
* Possible {@link SecondaryIdDescription}s available, of {@link com.bakdata.conquery.models.datasets.concepts.Concept}s used in this Query.
*/
@NsIdRefCollection
private Set<SecondaryIdDescription> availableSecondaryIds;
private Set<SecondaryIdDescriptionId> availableSecondaryIds;
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
import com.bakdata.conquery.io.cps.CPSTypeIdResolver;
import com.bakdata.conquery.io.cps.SubTyped;
import com.bakdata.conquery.io.storage.MetaStorage;
import com.bakdata.conquery.models.auth.entities.User;
import com.bakdata.conquery.models.datasets.Dataset;
import com.bakdata.conquery.models.execution.ManagedExecution;
import com.bakdata.conquery.models.forms.frontendconfiguration.FormScanner;
import com.bakdata.conquery.models.forms.frontendconfiguration.FormType;
import com.bakdata.conquery.models.forms.managed.ExternalExecution;
import com.bakdata.conquery.models.i18n.I18n;
import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId;
import com.bakdata.conquery.models.identifiable.ids.specific.ManagedExecutionId;
import com.bakdata.conquery.models.identifiable.ids.specific.UserId;
import com.bakdata.conquery.models.query.QueryResolveContext;
import com.bakdata.conquery.models.query.Visitable;
import com.bakdata.conquery.models.worker.DatasetRegistry;
Expand Down Expand Up @@ -133,7 +133,7 @@ public String getFormType() {
}

@Override
public ManagedExecution toManagedExecution(User user, Dataset submittedDataset, MetaStorage storage, DatasetRegistry<?> datasetRegistry) {
public ManagedExecution toManagedExecution(UserId user, DatasetId submittedDataset, MetaStorage storage, DatasetRegistry<?> datasetRegistry) {
return new ExternalExecution(this, user, submittedDataset, storage, datasetRegistry);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.NotNull;

import com.bakdata.conquery.models.auth.entities.User;
import com.bakdata.conquery.models.forms.configs.FormConfig;
import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId;
import com.bakdata.conquery.models.identifiable.ids.specific.UserId;
import com.bakdata.conquery.util.VariableDefaultValue;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
Expand Down Expand Up @@ -37,7 +37,7 @@ public class FormConfigAPI {
@VariableDefaultValue @Builder.Default
private LocalDateTime creationTime = LocalDateTime.now();

public FormConfig intern(User owner, DatasetId dataset) {
public FormConfig intern(UserId owner, DatasetId dataset) {
FormConfig intern = new FormConfig();
intern.setFormId(formId);
intern.setFormType(formType);
Expand Down
Loading

0 comments on commit d775e0e

Please sign in to comment.