Skip to content

Commit

Permalink
Make CaseInsensitiveIdentifier as final class (#28982)
Browse files Browse the repository at this point in the history
* Remove useless SchemaName

* Remove TableName

* Refactor SchemaTableName

* Remove LogicTableName
  • Loading branch information
terrymanu authored Nov 7, 2023
1 parent a73e82d commit fc9c81c
Show file tree
Hide file tree
Showing 21 changed files with 109 additions and 241 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper;
import org.apache.shardingsphere.data.pipeline.api.PipelineDataSourceConfiguration;
import org.apache.shardingsphere.data.pipeline.common.metadata.LogicTableName;
import org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveIdentifier;
import org.apache.shardingsphere.data.pipeline.common.spi.algorithm.JobRateLimitAlgorithm;
import org.apache.shardingsphere.data.pipeline.core.ingest.dumper.context.mapper.TableAndSchemaNameMapper;
import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData;
import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;

Expand All @@ -45,7 +45,7 @@ public final class ImporterConfiguration {
private final PipelineDataSourceConfiguration dataSourceConfig;

// TODO columnName case-insensitive?
private final Map<LogicTableName, Set<String>> shardingColumnsMap;
private final Map<CaseInsensitiveIdentifier, Set<String>> shardingColumnsMap;

private final TableAndSchemaNameMapper tableAndSchemaNameMapper;

Expand All @@ -63,7 +63,7 @@ public final class ImporterConfiguration {
* @return logic table names
*/
public Collection<String> getLogicTableNames() {
return Collections.unmodifiableList(shardingColumnsMap.keySet().stream().map(LogicTableName::toString).collect(Collectors.toList()));
return Collections.unmodifiableList(shardingColumnsMap.keySet().stream().map(CaseInsensitiveIdentifier::toString).collect(Collectors.toList()));
}

/**
Expand All @@ -73,7 +73,7 @@ public Collection<String> getLogicTableNames() {
* @return sharding columns
*/
public Set<String> getShardingColumns(final String logicTableName) {
return shardingColumnsMap.getOrDefault(new LogicTableName(logicTableName), Collections.emptySet());
return shardingColumnsMap.getOrDefault(new CaseInsensitiveIdentifier(logicTableName), Collections.emptySet());
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.shardingsphere.data.pipeline.common.metadata.ActualTableName;
import org.apache.shardingsphere.data.pipeline.common.metadata.LogicTableName;
import org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveIdentifier;
import org.apache.shardingsphere.infra.datanode.DataNode;

import java.util.LinkedHashMap;
Expand Down Expand Up @@ -74,11 +73,11 @@ private static Map<String, Map<String, List<DataNode>>> groupDataSourceDataNodes
* @param dataNodeLine data node line
* @return actual table and logic table map
*/
public static Map<ActualTableName, LogicTableName> buildTableNameMap(final JobDataNodeLine dataNodeLine) {
Map<ActualTableName, LogicTableName> result = new LinkedHashMap<>();
public static Map<CaseInsensitiveIdentifier, CaseInsensitiveIdentifier> buildTableNameMap(final JobDataNodeLine dataNodeLine) {
Map<CaseInsensitiveIdentifier, CaseInsensitiveIdentifier> result = new LinkedHashMap<>();
for (JobDataNodeEntry each : dataNodeLine.getEntries()) {
for (DataNode dataNode : each.getDataNodes()) {
result.put(new ActualTableName(dataNode.getTableName()), new LogicTableName(each.getLogicTableName()));
result.put(new CaseInsensitiveIdentifier(dataNode.getTableName()), new CaseInsensitiveIdentifier(each.getLogicTableName()));
}
}
return result;
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
*/
// TODO table name case-sensitive for some database
@EqualsAndHashCode(of = "lowercase")
public class CaseInsensitiveIdentifier {
public final class CaseInsensitiveIdentifier {

private final String original;

Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.ToString;

Expand All @@ -30,17 +29,15 @@
@Getter
@EqualsAndHashCode
@ToString
public class SchemaTableName {
public final class SchemaTableName {

@NonNull
private final SchemaName schemaName;
private final CaseInsensitiveIdentifier schemaName;

@NonNull
private final TableName tableName;
private final CaseInsensitiveIdentifier tableName;

public SchemaTableName(final String schemaName, final String tableName) {
this.schemaName = new SchemaName(schemaName);
this.tableName = new TableName(tableName);
this.schemaName = new CaseInsensitiveIdentifier(schemaName);
this.tableName = new CaseInsensitiveIdentifier(tableName);
}

/**
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@

import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.data.pipeline.common.metadata.TableName;
import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper;
import org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveIdentifier;
import org.apache.shardingsphere.data.pipeline.common.metadata.model.PipelineColumnMetaData;
import org.apache.shardingsphere.data.pipeline.common.metadata.model.PipelineIndexMetaData;
import org.apache.shardingsphere.data.pipeline.common.metadata.model.PipelineTableMetaData;
import org.apache.shardingsphere.data.pipeline.common.datasource.PipelineDataSourceWrapper;
import org.apache.shardingsphere.data.pipeline.core.exception.PipelineInternalException;
import org.apache.shardingsphere.infra.database.core.metadata.database.DialectDatabaseMetaData;
import org.apache.shardingsphere.infra.database.core.type.DatabaseTypeRegistry;
Expand Down Expand Up @@ -53,11 +53,11 @@ public final class StandardPipelineTableMetaDataLoader implements PipelineTableM
// It doesn't support ShardingSphereDataSource
private final PipelineDataSourceWrapper dataSource;

private final Map<TableName, PipelineTableMetaData> tableMetaDataMap = new ConcurrentHashMap<>();
private final Map<CaseInsensitiveIdentifier, PipelineTableMetaData> tableMetaDataMap = new ConcurrentHashMap<>();

@Override
public PipelineTableMetaData getTableMetaData(final String schemaName, final String tableName) {
PipelineTableMetaData result = tableMetaDataMap.get(new TableName(tableName));
PipelineTableMetaData result = tableMetaDataMap.get(new CaseInsensitiveIdentifier(tableName));
if (null != result) {
return result;
}
Expand All @@ -66,7 +66,7 @@ public PipelineTableMetaData getTableMetaData(final String schemaName, final Str
} catch (final SQLException ex) {
throw new PipelineInternalException(String.format("Load meta data for schema '%s' and table '%s' failed", schemaName, tableName), ex);
}
result = tableMetaDataMap.get(new TableName(tableName));
result = tableMetaDataMap.get(new CaseInsensitiveIdentifier(tableName));
if (null == result) {
log.warn("getTableMetaData, can not load meta data for table '{}'", tableName);
}
Expand All @@ -76,20 +76,20 @@ public PipelineTableMetaData getTableMetaData(final String schemaName, final Str
private void loadTableMetaData(final String schemaName, final String tableNamePattern) throws SQLException {
try (Connection connection = dataSource.getConnection()) {
DialectDatabaseMetaData dialectDatabaseMetaData = new DatabaseTypeRegistry(dataSource.getDatabaseType()).getDialectDatabaseMetaData();
Map<TableName, PipelineTableMetaData> tableMetaDataMap = loadTableMetaData0(connection, dialectDatabaseMetaData.isSchemaAvailable() ? schemaName : null, tableNamePattern);
Map<CaseInsensitiveIdentifier, PipelineTableMetaData> tableMetaDataMap = loadTableMetaData0(connection, dialectDatabaseMetaData.isSchemaAvailable() ? schemaName : null, tableNamePattern);
this.tableMetaDataMap.putAll(tableMetaDataMap);
}
}

private Map<TableName, PipelineTableMetaData> loadTableMetaData0(final Connection connection, final String schemaName, final String tableNamePattern) throws SQLException {
private Map<CaseInsensitiveIdentifier, PipelineTableMetaData> loadTableMetaData0(final Connection connection, final String schemaName, final String tableNamePattern) throws SQLException {
Collection<String> tableNames = new LinkedList<>();
try (ResultSet resultSet = connection.getMetaData().getTables(connection.getCatalog(), schemaName, tableNamePattern, null)) {
while (resultSet.next()) {
String tableName = resultSet.getString("TABLE_NAME");
tableNames.add(tableName);
}
}
Map<TableName, PipelineTableMetaData> result = new LinkedHashMap<>();
Map<CaseInsensitiveIdentifier, PipelineTableMetaData> result = new LinkedHashMap<>();
for (String each : tableNames) {
Set<String> primaryKeys = loadPrimaryKeys(connection, schemaName, each);
Map<String, Collection<String>> uniqueKeys = loadUniqueIndexesOfTable(connection, schemaName, each);
Expand All @@ -112,7 +112,7 @@ private Map<TableName, PipelineTableMetaData> loadTableMetaData0(final Connectio
}
Collection<PipelineIndexMetaData> uniqueIndexMetaData = uniqueKeys.entrySet().stream()
.map(entry -> new PipelineIndexMetaData(entry.getKey(), entry.getValue().stream().map(columnMetaDataMap::get).collect(Collectors.toList()))).collect(Collectors.toList());
result.put(new TableName(each), new PipelineTableMetaData(each, columnMetaDataMap, uniqueIndexMetaData));
result.put(new CaseInsensitiveIdentifier(each), new PipelineTableMetaData(each, columnMetaDataMap, uniqueIndexMetaData));
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.shardingsphere.data.pipeline.common.util;

import org.apache.shardingsphere.data.pipeline.common.metadata.LogicTableName;
import org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveIdentifier;
import org.apache.shardingsphere.infra.yaml.config.pojo.rule.YamlRuleConfiguration;
import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration;
import org.apache.shardingsphere.sharding.api.config.rule.ShardingAutoTableRuleConfiguration;
Expand Down Expand Up @@ -48,16 +48,16 @@ public final class ShardingColumnsExtractor {
* @param logicTableNames logic table names
* @return sharding columns map
*/
public Map<LogicTableName, Set<String>> getShardingColumnsMap(final Collection<YamlRuleConfiguration> yamlRuleConfigs, final Set<LogicTableName> logicTableNames) {
public Map<CaseInsensitiveIdentifier, Set<String>> getShardingColumnsMap(final Collection<YamlRuleConfiguration> yamlRuleConfigs, final Set<CaseInsensitiveIdentifier> logicTableNames) {
Optional<ShardingRuleConfiguration> shardingRuleConfig = ShardingRuleConfigurationConverter.findAndConvertShardingRuleConfiguration(yamlRuleConfigs);
if (!shardingRuleConfig.isPresent()) {
return Collections.emptyMap();
}
Set<String> defaultDatabaseShardingColumns = extractShardingColumns(shardingRuleConfig.get().getDefaultDatabaseShardingStrategy());
Set<String> defaultTableShardingColumns = extractShardingColumns(shardingRuleConfig.get().getDefaultTableShardingStrategy());
Map<LogicTableName, Set<String>> result = new ConcurrentHashMap<>();
Map<CaseInsensitiveIdentifier, Set<String>> result = new ConcurrentHashMap<>();
for (ShardingTableRuleConfiguration each : shardingRuleConfig.get().getTables()) {
LogicTableName logicTableName = new LogicTableName(each.getLogicTable());
CaseInsensitiveIdentifier logicTableName = new CaseInsensitiveIdentifier(each.getLogicTable());
if (!logicTableNames.contains(logicTableName)) {
continue;
}
Expand All @@ -67,7 +67,7 @@ public Map<LogicTableName, Set<String>> getShardingColumnsMap(final Collection<Y
result.put(logicTableName, shardingColumns);
}
for (ShardingAutoTableRuleConfiguration each : shardingRuleConfig.get().getAutoTables()) {
LogicTableName logicTableName = new LogicTableName(each.getLogicTable());
CaseInsensitiveIdentifier logicTableName = new CaseInsensitiveIdentifier(each.getLogicTable());
if (!logicTableNames.contains(logicTableName)) {
continue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.ToString;
import org.apache.shardingsphere.data.pipeline.common.metadata.ActualTableName;
import org.apache.shardingsphere.data.pipeline.common.metadata.LogicTableName;
import org.apache.shardingsphere.data.pipeline.common.metadata.CaseInsensitiveIdentifier;

import java.util.Map;

Expand All @@ -33,16 +32,16 @@
@ToString
public final class ActualAndLogicTableNameMapper {

private final Map<ActualTableName, LogicTableName> tableNameMap;
private final Map<CaseInsensitiveIdentifier, CaseInsensitiveIdentifier> tableNameMap;

/**
* Get logic table name.
*
* @param actualTableName actual table name
* @return logic table name
*/
public LogicTableName getLogicTableName(final String actualTableName) {
return tableNameMap.get(new ActualTableName(actualTableName));
public CaseInsensitiveIdentifier getLogicTableName(final String actualTableName) {
return tableNameMap.get(new CaseInsensitiveIdentifier(actualTableName));
}

/**
Expand All @@ -52,6 +51,6 @@ public LogicTableName getLogicTableName(final String actualTableName) {
* @return contains or not
*/
public boolean containsTable(final String actualTableName) {
return tableNameMap.containsKey(new ActualTableName(actualTableName));
return tableNameMap.containsKey(new CaseInsensitiveIdentifier(actualTableName));
}
}
Loading

0 comments on commit fc9c81c

Please sign in to comment.