Skip to content

Commit

Permalink
Fix SqlConnectorConfig serialization (#3485)
Browse files Browse the repository at this point in the history
  • Loading branch information
jnsrnhld authored Jul 12, 2024
1 parent d145f15 commit f6dac56
Show file tree
Hide file tree
Showing 23 changed files with 223 additions and 86 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -526,9 +526,8 @@ public Stream<Map<String, String>> resolveEntities(Subject subject, List<FilterV

final List<ColumnConfig> ids = config.getIdColumns()
.getIds().stream()
// We're only interested in returning printable AND resolvable ids
// We're only interested in returning printable ids
.filter(ColumnConfig::isPrint)
.filter(ColumnConfig::isResolvable)
.collect(Collectors.toList());


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,17 +138,17 @@ public void collectRequiredQueries(Set<ManagedExecutionId> requiredQueries) {
private ExternalNode<String> createExternalNodeOnlySingle(QueryPlanContext context, ConceptQueryPlan plan, String[] extraHeaders) {
// Remove zero element Lists and substitute one element Lists by containing String
final Map<String, Map<String, String>> extraFlat = extra.entrySet().stream()
.collect(Collectors.toMap(
Map.Entry::getKey,
entityToRowMap -> entityToRowMap.getValue().entrySet().stream()
.filter(headerToValue -> !headerToValue.getValue()
.isEmpty())
.collect(Collectors.toMap(
Map.Entry::getKey,
headerToValue -> headerToValue.getValue()
.get(0)
))
));
.collect(Collectors.toMap(
Map.Entry::getKey,
entityToRowMap -> entityToRowMap.getValue().entrySet().stream()
.filter(headerToValue -> !headerToValue.getValue()
.isEmpty())
.collect(Collectors.toMap(
Map.Entry::getKey,
headerToValue -> headerToValue.getValue()
.get(0)
))
));

final Map<String, ConstantValueAggregator<String>> extraAggregators = new HashMap<>(extraHeaders.length);
for (String extraHeader : extraHeaders) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import com.bakdata.conquery.mode.NamespaceSetupData;
import com.bakdata.conquery.models.config.ConqueryConfig;
import com.bakdata.conquery.models.config.DatabaseConfig;
import com.bakdata.conquery.models.config.IdColumnConfig;
import com.bakdata.conquery.models.config.SqlConnectorConfig;
import com.bakdata.conquery.models.identifiable.ids.specific.DatasetId;
import com.bakdata.conquery.models.index.IndexService;
Expand All @@ -15,6 +16,7 @@
import com.bakdata.conquery.sql.DSLContextWrapper;
import com.bakdata.conquery.sql.DslContextFactory;
import com.bakdata.conquery.sql.conquery.SqlExecutionManager;
import com.bakdata.conquery.sql.conversion.NodeConversions;
import com.bakdata.conquery.sql.conversion.SqlConverter;
import com.bakdata.conquery.sql.conversion.dialect.SqlDialect;
import com.bakdata.conquery.sql.conversion.dialect.SqlDialectFactory;
Expand All @@ -38,15 +40,17 @@ public LocalNamespace createNamespace(NamespaceStorage namespaceStorage, MetaSto

NamespaceSetupData namespaceData = NamespaceHandler.createNamespaceSetup(namespaceStorage, config, mapperCreator, indexService);

IdColumnConfig idColumns = config.getIdColumns();
SqlConnectorConfig sqlConnectorConfig = config.getSqlConnectorConfig();
DatabaseConfig databaseConfig = sqlConnectorConfig.getDatabaseConfig(namespaceStorage.getDataset());

DSLContextWrapper dslContextWrapper = DslContextFactory.create(databaseConfig, sqlConnectorConfig);
DSLContext dslContext = dslContextWrapper.getDslContext();
SqlDialect sqlDialect = dialectFactory.createSqlDialect(databaseConfig.getDialect());

SqlConverter sqlConverter = new SqlConverter(sqlDialect, dslContext, databaseConfig);
SqlExecutionService sqlExecutionService = new SqlExecutionService(dslContext, ResultSetProcessorFactory.create(sqlDialect));
NodeConversions nodeConversions = new NodeConversions(idColumns, sqlDialect, dslContext, databaseConfig, sqlExecutionService);
SqlConverter sqlConverter = new SqlConverter(nodeConversions);
ExecutionManager<SqlExecutionResult> executionManager = new SqlExecutionManager(sqlConverter, sqlExecutionService, metaStorage);
SqlStorageHandler sqlStorageHandler = new SqlStorageHandler(sqlExecutionService);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import com.bakdata.conquery.io.jackson.View;
import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap;
import com.bakdata.conquery.resources.admin.rest.AdminDatasetProcessor;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonView;
import com.google.common.base.Strings;
import jakarta.validation.constraints.NotEmpty;
Expand All @@ -20,7 +22,7 @@

/**
* Configuration class for QueryUpload and IdMapping.
*
* <p>
* Describes how rows are mapped for {@link EntityIdMap}/{@link AdminDatasetProcessor#setIdMapping(java.io.InputStream, com.bakdata.conquery.models.worker.Namespace)}.
*/
@Builder
Expand All @@ -29,13 +31,10 @@
@NoArgsConstructor
@Setter
@Getter
@JsonIgnoreProperties(value = {"resolvable"}) // for backwards compatibility
public class ColumnConfig {

public EntityIdMap.ExternalId read(String value) {
if (!isResolvable()) {
return null;
}

if (Strings.isNullOrEmpty(value)) {
return null;
}
Expand Down Expand Up @@ -87,23 +86,26 @@ public EntityIdMap.ExternalId read(String value) {
@Builder.Default
private int length = -1;

/**
* Set to true, if the column should be resolvable in upload. This can be used to add supplemental information to an entity, for example it's data-source, which would not be unique among entities.
*/
@Builder.Default
private boolean resolvable = false;

/**
* Set to true, if the Column should be printed to output. This can be used to have resolvable but not printable fields in mapping.
*/
@Builder.Default
@JsonView(View.Persistence.class)
private boolean print = true;

/*
* Set to true, if the column should be resolvable in upload. This can be used to add supplemental information to an entity, for example it's data-source, which would not be unique among entities.
*/
@Builder.Default
private boolean resolvable = false;

/**
* Used in conjunction with {@link com.bakdata.conquery.models.identifiable.mapping.AutoIncrementingPseudomizer}: One column is required to have fillAnon true, which will be filled with pseudomized data.
* Used for CQYes to select all entities. And CQExternal as primaryId for decoding. And for IdMapping for outputting additional Ids.
* <p>
* Additionally, used in conjunction with {@link com.bakdata.conquery.models.identifiable.mapping.AutoIncrementingPseudomizer}: One column is required to have fillAnon true, which will be filled with pseudomized data.
*/
@Builder.Default
@JsonView(View.Persistence.class)
private boolean fillAnon = false;
@JsonAlias("fillAnon")
private boolean primaryId = false;
}
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
package com.bakdata.conquery.models.config;

import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.extern.jackson.Jacksonized;

@Data
@Builder
@Jacksonized
@NoArgsConstructor
@AllArgsConstructor
public class DatabaseConfig {

private static final String DEFAULT_PRIMARY_COLUMN = "pid";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,6 @@
import java.util.Set;
import java.util.stream.Collectors;

import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;

import com.bakdata.conquery.apiv1.query.concept.specific.external.DateFormat;
import com.bakdata.conquery.models.identifiable.mapping.EntityIdMap;
import com.bakdata.conquery.models.query.resultinfo.LocalizedDefaultResultInfo;
Expand All @@ -23,6 +20,8 @@
import com.google.common.base.Functions;
import com.google.common.collect.MoreCollectors;
import io.dropwizard.validation.ValidationMethod;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Getter;
Expand All @@ -42,6 +41,11 @@
@Slf4j
public class IdColumnConfig {

/**
* Relevant in SQL-Mode, used as AllIdsTable for CQExternal and CQYes.
*/
private String table;

/**
* List of resolvable and printable ids.
*
Expand All @@ -54,8 +58,7 @@ public class IdColumnConfig {
.name("ID")
.field("result")
.label(Map.of(Locale.ROOT, "result"))
.resolvable(true)
.fillAnon(true)
.primaryId(true)
.print(true)
.build()
);
Expand All @@ -64,9 +67,15 @@ public class IdColumnConfig {
@JsonIgnore
@Setter(AccessLevel.NONE)
@Getter(lazy = true, value = AccessLevel.PUBLIC)
private final Map<String, ColumnConfig> idMappers = ids.stream().filter(ColumnConfig::isResolvable)
.collect(Collectors.toMap(ColumnConfig::getName, Functions.identity()));
private final Map<String, ColumnConfig> idMappers = ids.stream().collect(Collectors.toMap(ColumnConfig::getName, Functions.identity()));

@JsonIgnore
public ColumnConfig findPrimaryIdColumn() {
return ids.stream()
.filter(ColumnConfig::isPrimaryId)
.findFirst()
.orElseThrow(() -> new IllegalStateException("Requiring at least 1 primary key column in IdColumnConfig"));
}

@ValidationMethod(message = "Duplicate Claims for Mapping Columns.")
@JsonIgnore
Expand Down Expand Up @@ -101,7 +110,7 @@ public boolean isIdColsHaveMapping() {
public boolean isExactlyOnePseudo() {
return ids.stream()
.filter(conf -> conf.getField() != null)
.filter(ColumnConfig::isFillAnon)
.filter(ColumnConfig::isPrimaryId)
.count() == 1;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,9 @@
import com.fasterxml.jackson.annotation.JsonIgnore;
import io.dropwizard.validation.ValidationMethod;
import jakarta.validation.Valid;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.extern.jackson.Jacksonized;

Expand All @@ -21,7 +19,7 @@
@AllArgsConstructor
public class SqlConnectorConfig {

boolean enabled;
private boolean enabled;

/**
* Determines if generated SQL should be formatted.
Expand All @@ -31,7 +29,6 @@ public class SqlConnectorConfig {
/**
* Keys must match the name of existing {@link Dataset}s.
*/
@Getter(AccessLevel.PRIVATE)
private Map<String, @Valid DatabaseConfig> databaseConfigs;

public DatabaseConfig getDatabaseConfig(Dataset dataset) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

/**
* Mapping from uploaded {@link ExternalId} for resolving in {@link com.bakdata.conquery.apiv1.query.concept.specific.external.CQExternal}, and also for printing with {@link EntityPrintId}.
*
*/
@Getter
@EqualsAndHashCode
Expand Down Expand Up @@ -67,18 +66,14 @@ public static EntityIdMap generateIdMapping(CsvParser parser, List<ColumnConfig>
final String otherId = record.getString(columnConfig.getField());

// Collect printable parts into id
if(columnConfig.isPrint()) {
if (columnConfig.isPrint()) {
idParts.add(otherId);
}

if (otherId == null) {
continue;
}

if (!columnConfig.isResolvable()) {
continue;
}

final ExternalId transformed = columnConfig.read(otherId);

mapping.addInputMapping(id, transformed);
Expand Down Expand Up @@ -141,7 +136,7 @@ public EntityPrintId toExternal(String internal) {

/**
* Resolve external ID to Entity Id.
*
* <p>
* Return -1 when not resolved.
*/
public String resolve(ExternalId key) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

import com.bakdata.conquery.apiv1.AdditionalMediaTypes;
import com.bakdata.conquery.apiv1.frontend.FrontendConfiguration;
import com.bakdata.conquery.models.config.ColumnConfig;
import com.bakdata.conquery.models.config.ConqueryConfig;
import com.bakdata.conquery.models.config.FrontendConfig;
import com.bakdata.conquery.models.config.IdColumnConfig;
Expand All @@ -25,16 +24,10 @@ public class ConfigResource {
@GET
@Path("frontend")
public FrontendConfiguration getFrontendConfig() {
// Filter Ids that are not resolvable
final IdColumnConfig idColumns = config.getIdColumns().withIds(config.getIdColumns()
.getIds()
.stream()
.filter(ColumnConfig::isResolvable)
.toList());

final IdColumnConfig idColumns = config.getIdColumns().withIds(config.getIdColumns().getIds());
final FrontendConfig frontendConfig = config.getFrontend();


return new FrontendConfiguration(
VersionInfo.INSTANCE.getVersions(),
frontendConfig.getCurrency(),
Expand All @@ -45,4 +38,4 @@ public FrontendConfiguration getFrontendConfig() {
);
}

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,32 +2,48 @@

import com.bakdata.conquery.apiv1.query.QueryDescription;
import com.bakdata.conquery.models.config.DatabaseConfig;
import com.bakdata.conquery.models.config.IdColumnConfig;
import com.bakdata.conquery.models.query.Visitable;
import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext;
import com.bakdata.conquery.sql.conversion.dialect.SqlDialect;
import com.bakdata.conquery.sql.conversion.model.NameGenerator;
import com.bakdata.conquery.sql.execution.SqlExecutionService;
import org.jooq.DSLContext;

/**
* Entry point for converting {@link QueryDescription} to an SQL query.
*/
public class NodeConversions extends Conversions<Visitable, ConversionContext, ConversionContext> {

private final IdColumnConfig idColumns;
private final SqlDialect dialect;
private final DatabaseConfig config;
private final NameGenerator nameGenerator;
private final SqlExecutionService executionService;

public NodeConversions(SqlDialect dialect, DSLContext dslContext, DatabaseConfig config) {
public NodeConversions(
IdColumnConfig idColumns,
SqlDialect dialect,
DSLContext dslContext,
DatabaseConfig config,
SqlExecutionService executionService
) {
super(dialect.getNodeConverters(dslContext));
this.idColumns = idColumns;
this.dialect = dialect;
this.config = config;
this.nameGenerator = new NameGenerator(config.getDialect().getNameMaxLength());
this.executionService = executionService;
}

public ConversionContext convert(QueryDescription queryDescription) {
ConversionContext initialCtx = ConversionContext.builder()
.idColumns(idColumns)
.config(config)
.nameGenerator(new NameGenerator(config.getDialect().getNameMaxLength()))
.nameGenerator(nameGenerator)
.nodeConversions(this)
.sqlDialect(this.dialect)
.sqlDialect(dialect)
.executionService(executionService)
.build();
return convert(queryDescription, initialCtx);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,15 @@
package com.bakdata.conquery.sql.conversion;

import com.bakdata.conquery.apiv1.query.QueryDescription;
import com.bakdata.conquery.models.config.DatabaseConfig;
import com.bakdata.conquery.sql.conversion.cqelement.ConversionContext;
import com.bakdata.conquery.sql.conversion.dialect.SqlDialect;
import com.bakdata.conquery.sql.conversion.model.SqlQuery;
import org.jooq.DSLContext;

public class SqlConverter {

private final NodeConversions nodeConversions;

public SqlConverter(SqlDialect dialect, DSLContext dslContext, DatabaseConfig config) {
this.nodeConversions = new NodeConversions(dialect, dslContext, config);
public SqlConverter(NodeConversions nodeConversions) {
this.nodeConversions = nodeConversions;
}

public SqlQuery convert(QueryDescription queryDescription) {
Expand Down
Loading

0 comments on commit f6dac56

Please sign in to comment.