Skip to content

Commit

Permalink
Refactor StorageNode and add StorageNodeName (#28612)
Browse files Browse the repository at this point in the history
  • Loading branch information
terrymanu authored Sep 27, 2023
1 parent ff79177 commit 576e75b
Show file tree
Hide file tree
Showing 22 changed files with 195 additions and 148 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.StorageResource;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapperUtils;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapper;

import javax.sql.DataSource;
Expand Down Expand Up @@ -56,10 +56,11 @@ public DataSourceGeneratedDatabaseConfiguration(final Map<String, DataSourceConf
storageResource = new StorageResource(getStorageNodeDataSourceMap(mappers), mappers);
}

private Map<StorageNode, DataSource> getStorageNodeDataSourceMap(final Map<String, StorageUnitNodeMapper> mappers) {
Map<StorageNode, DataSource> result = new LinkedHashMap<>(mappers.size(), 1F);
private Map<StorageNodeName, DataSource> getStorageNodeDataSourceMap(final Map<String, StorageUnitNodeMapper> mappers) {
Map<StorageNodeName, DataSource> result = new LinkedHashMap<>(mappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : mappers.entrySet()) {
result.computeIfAbsent(entry.getValue().getStorageNode(), key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
result.computeIfAbsent(entry.getValue().getStorageNode().getName(),
key -> DataSourcePoolCreator.create(entry.getKey(), dataSourcePoolPropertiesMap.get(entry.getKey()), true, result.values()));
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.shardingsphere.infra.database.core.type.DatabaseType;
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeUtils;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnit;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitMetaData;
Expand All @@ -43,7 +43,7 @@
@Getter
public final class ResourceMetaData {

private final Map<StorageNode, DataSource> dataSourceMap;
private final Map<StorageNodeName, DataSource> dataSourceMap;

private final StorageUnitMetaData storageUnitMetaData;

Expand All @@ -54,7 +54,7 @@ public ResourceMetaData(final Map<String, DataSource> dataSources) {
StorageUnitNodeMapperUtils.fromDataSources(dataSources));
}

public ResourceMetaData(final String databaseName, final Map<StorageNode, DataSource> dataSourceMap,
public ResourceMetaData(final String databaseName, final Map<StorageNodeName, DataSource> dataSourceMap,
final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers, final Map<String, DataSourcePoolProperties> propsMap) {
this.dataSourceMap = dataSourceMap;
storageUnitMetaData = new StorageUnitMetaData(databaseName, dataSourceMap, propsMap, storageUnitNodeMappers);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.CatalogSwitchableDataSource;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.metadata.database.resource.unit.StorageUnitNodeMapper;

import javax.sql.DataSource;
Expand All @@ -33,13 +34,13 @@
@Getter
public final class StorageResource {

private final Map<StorageNode, DataSource> dataSourceMap;
private final Map<StorageNodeName, DataSource> dataSourceMap;

private final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers;

private final Map<String, DataSource> wrappedDataSources;

public StorageResource(final Map<StorageNode, DataSource> dataSourceMap, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
public StorageResource(final Map<StorageNodeName, DataSource> dataSourceMap, final Map<String, StorageUnitNodeMapper> storageUnitNodeMappers) {
this.dataSourceMap = dataSourceMap;
this.storageUnitNodeMappers = storageUnitNodeMappers;
wrappedDataSources = createWrappedDataSources();
Expand All @@ -48,9 +49,10 @@ public StorageResource(final Map<StorageNode, DataSource> dataSourceMap, final M
private Map<String, DataSource> createWrappedDataSources() {
Map<String, DataSource> result = new LinkedHashMap<>(storageUnitNodeMappers.size(), 1F);
for (Entry<String, StorageUnitNodeMapper> entry : storageUnitNodeMappers.entrySet()) {
DataSource dataSource = dataSourceMap.get(entry.getValue().getStorageNode());
StorageNode storageNode = entry.getValue().getStorageNode();
DataSource dataSource = dataSourceMap.get(storageNode.getName());
if (null != dataSource) {
result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, entry.getValue().getCatalog(), entry.getValue().getUrl()));
result.put(entry.getKey(), new CatalogSwitchableDataSource(dataSource, storageNode.getCatalog(), storageNode.getUrl()));
}
}
return result;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,31 +17,27 @@

package org.apache.shardingsphere.infra.metadata.database.resource.node;

import com.google.common.base.Objects;
import lombok.Getter;
import lombok.RequiredArgsConstructor;

/**
* Storage node.
*/
@RequiredArgsConstructor
@Getter
public final class StorageNode {

private final String name;
private final StorageNodeName name;

@Override
public boolean equals(final Object obj) {
return obj instanceof StorageNode && ((StorageNode) obj).name.equalsIgnoreCase(name);
}
private final String url;

private final String catalog;

@Override
public int hashCode() {
return Objects.hashCode(name.toUpperCase());
public StorageNode(final String name, final String url) {
this(name, url, null);
}

@Override
public String toString() {
return name;
public StorageNode(final String name, final String url, final String catalog) {
this.name = new StorageNodeName(name);
this.url = url;
this.catalog = catalog;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.infra.metadata.database.resource.node;

import com.google.common.base.Objects;
import lombok.Getter;
import lombok.RequiredArgsConstructor;

/**
* Storage node name.
*/
@RequiredArgsConstructor
@Getter
public final class StorageNodeName {

private final String name;

@Override
public boolean equals(final Object obj) {
return obj instanceof StorageNodeName && ((StorageNodeName) obj).name.equalsIgnoreCase(name);
}

@Override
public int hashCode() {
return Objects.hashCode(name.toUpperCase());
}

@Override
public String toString() {
return name;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ public final class StorageNodeUtils {
* @param dataSources data sources
* @return storage node data sources
*/
public static Map<StorageNode, DataSource> getStorageNodeDataSources(final Map<String, DataSource> dataSources) {
return dataSources.entrySet().stream()
.collect(Collectors.toMap(entry -> new StorageNode(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSources.size(), 1F)));
public static Map<StorageNodeName, DataSource> getStorageNodeDataSources(final Map<String, DataSource> dataSources) {
return dataSources.entrySet().stream().collect(
Collectors.toMap(entry -> new StorageNodeName(entry.getKey()), Entry::getValue, (oldValue, currentValue) -> currentValue, () -> new LinkedHashMap<>(dataSources.size(), 1F)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.shardingsphere.infra.datasource.pool.props.creator.DataSourcePoolPropertiesCreator;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;
import org.apache.shardingsphere.infra.state.datasource.DataSourceStateManager;

import javax.sql.DataSource;
Expand All @@ -53,45 +54,47 @@ public final class StorageUnit {

private final ConnectionProperties connectionProperties;

public StorageUnit(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources,
public StorageUnit(final String databaseName, final Map<StorageNodeName, DataSource> storageNodeDataSources,
final DataSourcePoolProperties props, final StorageUnitNodeMapper unitNodeMapper) {
this.dataSourcePoolProperties = props;
this.unitNodeMapper = unitNodeMapper;
dataSource = getStorageUnitDataSource(storageNodeDataSources, unitNodeMapper);
Map<StorageNode, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources);
Map<StorageNodeName, DataSource> enabledStorageNodeDataSources = getEnabledStorageNodeDataSources(databaseName, storageNodeDataSources);
storageType = createStorageType(enabledStorageNodeDataSources, unitNodeMapper);
connectionProperties = createConnectionProperties(enabledStorageNodeDataSources, unitNodeMapper, storageType).orElse(null);
}

private DataSource getStorageUnitDataSource(final Map<StorageNode, DataSource> storageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
DataSource dataSource = storageNodeDataSources.get(unitNodeMapper.getStorageNode());
return new CatalogSwitchableDataSource(dataSource, unitNodeMapper.getCatalog(), unitNodeMapper.getUrl());
private DataSource getStorageUnitDataSource(final Map<StorageNodeName, DataSource> storageNodeDataSources, final StorageUnitNodeMapper mapper) {
StorageNode storageNode = mapper.getStorageNode();
DataSource dataSource = storageNodeDataSources.get(storageNode.getName());
return new CatalogSwitchableDataSource(dataSource, storageNode.getCatalog(), storageNode.getUrl());
}

private Map<StorageNode, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources) {
private Map<StorageNodeName, DataSource> getEnabledStorageNodeDataSources(final String databaseName, final Map<StorageNodeName, DataSource> storageNodeDataSources) {
Map<String, DataSource> toBeCheckedDataSources = new LinkedHashMap<>(storageNodeDataSources.size(), 1F);
for (Entry<StorageNode, DataSource> entry : storageNodeDataSources.entrySet()) {
for (Entry<StorageNodeName, DataSource> entry : storageNodeDataSources.entrySet()) {
toBeCheckedDataSources.put(entry.getKey().getName(), entry.getValue());
}
Map<String, DataSource> enabledDataSources = DataSourceStateManager.getInstance().getEnabledDataSources(databaseName, toBeCheckedDataSources);
return storageNodeDataSources.entrySet().stream()
.filter(entry -> enabledDataSources.containsKey(entry.getKey().getName())).collect(Collectors.toMap(Entry::getKey, Entry::getValue));
}

private DatabaseType createStorageType(final Map<StorageNode, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode())
? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode()))
private DatabaseType createStorageType(final Map<StorageNodeName, DataSource> enabledStorageNodeDataSources, final StorageUnitNodeMapper unitNodeMapper) {
return DatabaseTypeEngine.getStorageType(enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode().getName())
? Collections.singleton(enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode().getName()))
: Collections.emptyList());
}

private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNode, DataSource> enabledStorageNodeDataSources,
final StorageUnitNodeMapper unitNodeMapper, final DatabaseType storageType) {
if (!enabledStorageNodeDataSources.containsKey(unitNodeMapper.getStorageNode())) {
private Optional<ConnectionProperties> createConnectionProperties(final Map<StorageNodeName, DataSource> enabledStorageNodeDataSources,
final StorageUnitNodeMapper mapper, final DatabaseType storageType) {
StorageNode storageNode = mapper.getStorageNode();
if (!enabledStorageNodeDataSources.containsKey(storageNode.getName())) {
return Optional.empty();
}
Map<String, Object> standardProps = DataSourcePoolPropertiesCreator.create(
enabledStorageNodeDataSources.get(unitNodeMapper.getStorageNode())).getConnectionPropertySynonyms().getStandardProperties();
enabledStorageNodeDataSources.get(storageNode.getName())).getConnectionPropertySynonyms().getStandardProperties();
ConnectionPropertiesParser parser = DatabaseTypedSPILoader.getService(ConnectionPropertiesParser.class, storageType);
return Optional.of(parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), unitNodeMapper.getCatalog()));
return Optional.of(parser.parse(standardProps.get("url").toString(), standardProps.get("username").toString(), storageNode.getCatalog()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

import lombok.Getter;
import org.apache.shardingsphere.infra.datasource.pool.props.domain.DataSourcePoolProperties;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNode;
import org.apache.shardingsphere.infra.metadata.database.resource.node.StorageNodeName;

import javax.sql.DataSource;
import java.util.LinkedHashMap;
Expand All @@ -41,7 +41,7 @@ public final class StorageUnitMetaData {
// TODO zhangliang: should refactor
private final Map<String, DataSource> dataSources;

public StorageUnitMetaData(final String databaseName, final Map<StorageNode, DataSource> storageNodeDataSources,
public StorageUnitMetaData(final String databaseName, final Map<StorageNodeName, DataSource> storageNodeDataSources,
final Map<String, DataSourcePoolProperties> dataSourcePoolPropertiesMap, final Map<String, StorageUnitNodeMapper> unitNodeMappers) {
this.unitNodeMappers = unitNodeMappers;
storageUnits = new LinkedHashMap<>(unitNodeMappers.size(), 1F);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,4 @@ public final class StorageUnitNodeMapper {
private final String name;

private final StorageNode storageNode;

private final String url;

private final String catalog;

public StorageUnitNodeMapper(final String name, final StorageNode storageNode, final String url) {
this(name, storageNode, url, null);
}
}
Loading

0 comments on commit 576e75b

Please sign in to comment.